This commit is contained in:
Iliyan Angelov
2025-11-16 20:05:08 +02:00
parent 98ccd5b6ff
commit 48353cde9c
118 changed files with 9488 additions and 1336 deletions

View File

@@ -1,38 +1,63 @@
from sqlalchemy import create_engine
from sqlalchemy import create_engine, event
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import os
from dotenv import load_dotenv
from sqlalchemy.pool import QueuePool
from .settings import settings
from .logging_config import get_logger
load_dotenv()
logger = get_logger(__name__)
# Database configuration
DB_USER = os.getenv("DB_USER", "root")
DB_PASS = os.getenv("DB_PASS", "")
DB_NAME = os.getenv("DB_NAME", "hotel_db")
DB_HOST = os.getenv("DB_HOST", "localhost")
DB_PORT = os.getenv("DB_PORT", "3306")
DATABASE_URL = f"mysql+pymysql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
# Database configuration using settings
DATABASE_URL = settings.database_url
# Enhanced engine configuration for enterprise use
engine = create_engine(
DATABASE_URL,
pool_pre_ping=True,
pool_recycle=300,
pool_size=5,
max_overflow=10,
echo=os.getenv("NODE_ENV") == "development"
poolclass=QueuePool,
pool_pre_ping=True, # Verify connections before using
pool_recycle=3600, # Recycle connections after 1 hour
pool_size=10, # Number of connections to maintain
max_overflow=20, # Additional connections beyond pool_size
echo=settings.is_development, # Log SQL queries in development
future=True, # Use SQLAlchemy 2.0 style
connect_args={
"charset": "utf8mb4",
"connect_timeout": 10
}
)
# Event listeners for connection pool monitoring
@event.listens_for(engine, "connect")
def set_sqlite_pragma(dbapi_conn, connection_record):
"""Set connection-level settings"""
logger.debug("New database connection established")
@event.listens_for(engine, "checkout")
def receive_checkout(dbapi_conn, connection_record, connection_proxy):
"""Log connection checkout"""
logger.debug("Connection checked out from pool")
@event.listens_for(engine, "checkin")
def receive_checkin(dbapi_conn, connection_record):
"""Log connection checkin"""
logger.debug("Connection returned to pool")
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
# Dependency to get DB session
def get_db():
"""
Dependency for getting database session.
Automatically handles session lifecycle.
"""
db = SessionLocal()
try:
yield db
except Exception:
db.rollback()
raise
finally:
db.close()

View File

@@ -0,0 +1,96 @@
"""
Enterprise-grade structured logging configuration
"""
import logging
import sys
from logging.handlers import RotatingFileHandler
from pathlib import Path
from typing import Optional
from .settings import settings
def setup_logging(
log_level: Optional[str] = None,
log_file: Optional[str] = None,
enable_file_logging: bool = True
) -> logging.Logger:
"""
Setup structured logging with file and console handlers
Args:
log_level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
log_file: Path to log file
enable_file_logging: Whether to enable file logging
Returns:
Configured root logger
"""
# Get configuration from settings
level = log_level or settings.LOG_LEVEL
log_file_path = log_file or settings.LOG_FILE
# Convert string level to logging constant
numeric_level = getattr(logging, level.upper(), logging.INFO)
# Create logs directory if it doesn't exist
if enable_file_logging and log_file_path:
log_path = Path(log_file_path)
log_path.parent.mkdir(parents=True, exist_ok=True)
# Create formatter with structured format
detailed_formatter = logging.Formatter(
fmt='%(asctime)s | %(levelname)-8s | %(name)s | %(funcName)s:%(lineno)d | %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
simple_formatter = logging.Formatter(
fmt='%(asctime)s | %(levelname)-8s | %(message)s',
datefmt='%H:%M:%S'
)
# Configure root logger
root_logger = logging.getLogger()
root_logger.setLevel(numeric_level)
# Remove existing handlers
root_logger.handlers.clear()
# Console handler (always enabled)
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(numeric_level)
console_handler.setFormatter(simple_formatter if settings.is_development else detailed_formatter)
root_logger.addHandler(console_handler)
# File handler (rotating) - Disabled in development to avoid file watcher issues
if enable_file_logging and log_file_path and not settings.is_development:
file_handler = RotatingFileHandler(
log_file_path,
maxBytes=settings.LOG_MAX_BYTES,
backupCount=settings.LOG_BACKUP_COUNT,
encoding='utf-8'
)
file_handler.setLevel(numeric_level)
file_handler.setFormatter(detailed_formatter)
root_logger.addHandler(file_handler)
# Set levels for third-party loggers
logging.getLogger("uvicorn").setLevel(logging.INFO)
logging.getLogger("uvicorn.access").setLevel(logging.INFO if settings.is_development else logging.WARNING)
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
logging.getLogger("slowapi").setLevel(logging.WARNING)
return root_logger
def get_logger(name: str) -> logging.Logger:
"""
Get a logger instance with the given name
Args:
name: Logger name (typically __name__)
Returns:
Logger instance
"""
return logging.getLogger(name)

View File

@@ -0,0 +1,119 @@
"""
Enterprise-grade configuration management using Pydantic Settings
"""
from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import Field
from typing import List
import os
class Settings(BaseSettings):
"""Application settings with environment variable support"""
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
extra="ignore"
)
# Application
APP_NAME: str = Field(default="Hotel Booking API", description="Application name")
APP_VERSION: str = Field(default="1.0.0", description="Application version")
ENVIRONMENT: str = Field(default="development", description="Environment: development, staging, production")
DEBUG: bool = Field(default=False, description="Debug mode")
API_V1_PREFIX: str = Field(default="/api/v1", description="API v1 prefix")
# Server
HOST: str = Field(default="0.0.0.0", description="Server host")
PORT: int = Field(default=8000, description="Server port")
# Database
DB_USER: str = Field(default="root", description="Database user")
DB_PASS: str = Field(default="", description="Database password")
DB_NAME: str = Field(default="hotel_db", description="Database name")
DB_HOST: str = Field(default="localhost", description="Database host")
DB_PORT: str = Field(default="3306", description="Database port")
# Security
JWT_SECRET: str = Field(default="dev-secret-key-change-in-production-12345", description="JWT secret key")
JWT_ALGORITHM: str = Field(default="HS256", description="JWT algorithm")
JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = Field(default=30, description="JWT access token expiration in minutes")
JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = Field(default=7, description="JWT refresh token expiration in days")
# CORS
CLIENT_URL: str = Field(default="http://localhost:5173", description="Frontend client URL")
CORS_ORIGINS: List[str] = Field(
default_factory=lambda: [
"http://localhost:5173",
"http://localhost:3000",
"http://127.0.0.1:5173"
],
description="Allowed CORS origins"
)
# Rate Limiting
RATE_LIMIT_ENABLED: bool = Field(default=True, description="Enable rate limiting")
RATE_LIMIT_PER_MINUTE: int = Field(default=60, description="Requests per minute per IP")
# Logging
LOG_LEVEL: str = Field(default="INFO", description="Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL")
LOG_FILE: str = Field(default="logs/app.log", description="Log file path")
LOG_MAX_BYTES: int = Field(default=10485760, description="Max log file size (10MB)")
LOG_BACKUP_COUNT: int = Field(default=5, description="Number of backup log files")
# Email
SMTP_HOST: str = Field(default="smtp.gmail.com", description="SMTP host")
SMTP_PORT: int = Field(default=587, description="SMTP port")
SMTP_USER: str = Field(default="", description="SMTP username")
SMTP_PASSWORD: str = Field(default="", description="SMTP password")
SMTP_FROM_EMAIL: str = Field(default="", description="From email address")
SMTP_FROM_NAME: str = Field(default="Hotel Booking", description="From name")
# File Upload
UPLOAD_DIR: str = Field(default="uploads", description="Upload directory")
MAX_UPLOAD_SIZE: int = Field(default=5242880, description="Max upload size in bytes (5MB)")
ALLOWED_EXTENSIONS: List[str] = Field(
default_factory=lambda: ["jpg", "jpeg", "png", "gif", "webp"],
description="Allowed file extensions"
)
# Redis (for caching)
REDIS_ENABLED: bool = Field(default=False, description="Enable Redis caching")
REDIS_HOST: str = Field(default="localhost", description="Redis host")
REDIS_PORT: int = Field(default=6379, description="Redis port")
REDIS_DB: int = Field(default=0, description="Redis database number")
REDIS_PASSWORD: str = Field(default="", description="Redis password")
# Request Timeout
REQUEST_TIMEOUT: int = Field(default=30, description="Request timeout in seconds")
# Health Check
HEALTH_CHECK_INTERVAL: int = Field(default=30, description="Health check interval in seconds")
@property
def database_url(self) -> str:
"""Construct database URL"""
return f"mysql+pymysql://{self.DB_USER}:{self.DB_PASS}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}"
@property
def is_production(self) -> bool:
"""Check if running in production"""
return self.ENVIRONMENT.lower() == "production"
@property
def is_development(self) -> bool:
"""Check if running in development"""
return self.ENVIRONMENT.lower() == "development"
@property
def redis_url(self) -> str:
"""Construct Redis URL"""
if self.REDIS_PASSWORD:
return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_DB}"
return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_DB}"
# Global settings instance
settings = Settings()