This commit is contained in:
Iliyan Angelov
2025-11-19 12:27:01 +02:00
parent 2043ac897c
commit 34b4c969d4
469 changed files with 26870 additions and 8329 deletions

View File

@@ -0,0 +1,31 @@
"""add_mfa_fields_to_users
Revision ID: 08e2f866e131
Revises: add_badges_to_page_content
Create Date: 2025-11-19 11:13:30.376194
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '08e2f866e131'
down_revision = 'add_badges_to_page_content'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add MFA fields to users table
op.add_column('users', sa.Column('mfa_enabled', sa.Boolean(), nullable=False, server_default='0'))
op.add_column('users', sa.Column('mfa_secret', sa.String(255), nullable=True))
op.add_column('users', sa.Column('mfa_backup_codes', sa.Text(), nullable=True))
def downgrade() -> None:
# Remove MFA fields from users table
op.drop_column('users', 'mfa_backup_codes')
op.drop_column('users', 'mfa_secret')
op.drop_column('users', 'mfa_enabled')

View File

@@ -0,0 +1,51 @@
"""add_paypal_payment_method
Revision ID: d9aff6c5f0d4
Revises: 08e2f866e131
Create Date: 2025-11-19 12:07:50.703320
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'd9aff6c5f0d4'
down_revision = '08e2f866e131'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Note: MySQL ENUM modifications can be tricky.
# If payments table already has data with existing enum values,
# we need to preserve them when adding 'paypal'
# For MySQL, we need to alter the ENUM column to include the new value
# Check if we're using MySQL
bind = op.get_bind()
if bind.dialect.name == 'mysql':
# Alter the ENUM column to include 'paypal'
# This preserves existing values and adds 'paypal'
op.execute(
"ALTER TABLE payments MODIFY COLUMN payment_method ENUM('cash', 'credit_card', 'debit_card', 'bank_transfer', 'e_wallet', 'stripe', 'paypal') NOT NULL"
)
else:
# For other databases (PostgreSQL, SQLite), enum changes are handled differently
# For SQLite, this might not be needed as it doesn't enforce enum constraints
pass
# ### end Alembic commands ###
def downgrade() -> None:
# Remove 'paypal' from the ENUM (be careful if there are existing paypal payments)
bind = op.get_bind()
if bind.dialect.name == 'mysql':
# First, check if there are any paypal payments - if so, this will fail
# In production, you'd want to migrate existing paypal payments first
op.execute(
"ALTER TABLE payments MODIFY COLUMN payment_method ENUM('cash', 'credit_card', 'debit_card', 'bank_transfer', 'e_wallet', 'stripe') NOT NULL"
)
# ### end Alembic commands ###

View File

@@ -3,7 +3,7 @@ uvicorn[standard]==0.24.0
python-dotenv==1.0.0
sqlalchemy==2.0.23
pymysql==1.1.0
cryptography==41.0.7
cryptography>=41.0.7
python-jose[cryptography]==3.3.0
bcrypt==4.1.2
python-multipart==0.0.6
@@ -17,6 +17,9 @@ aiosmtplib==3.0.1
jinja2==3.1.2
alembic==1.12.1
stripe>=13.2.0
paypal-checkout-serversdk>=1.0.3
pyotp==2.9.0
qrcode[pil]==7.4.2
# Enterprise features (optional but recommended)
# redis==5.0.1 # Uncomment if using Redis caching

View File

@@ -96,6 +96,11 @@ class Settings(BaseSettings):
STRIPE_PUBLISHABLE_KEY: str = Field(default="", description="Stripe publishable key")
STRIPE_WEBHOOK_SECRET: str = Field(default="", description="Stripe webhook secret")
# PayPal Payment Gateway
PAYPAL_CLIENT_ID: str = Field(default="", description="PayPal client ID")
PAYPAL_CLIENT_SECRET: str = Field(default="", description="PayPal client secret")
PAYPAL_MODE: str = Field(default="sandbox", description="PayPal mode: sandbox or live")
@property
def database_url(self) -> str:
"""Construct database URL"""

View File

@@ -12,6 +12,7 @@ class PaymentMethod(str, enum.Enum):
bank_transfer = "bank_transfer"
e_wallet = "e_wallet"
stripe = "stripe"
paypal = "paypal"
class PaymentType(str, enum.Enum):

View File

@@ -17,6 +17,9 @@ class User(Base):
avatar = Column(String(255), nullable=True)
currency = Column(String(3), nullable=False, default='VND') # ISO 4217 currency code
is_active = Column(Boolean, nullable=False, default=True)
mfa_enabled = Column(Boolean, nullable=False, default=False)
mfa_secret = Column(String(255), nullable=True) # TOTP secret key (encrypted in production)
mfa_backup_codes = Column(Text, nullable=True) # JSON array of backup codes (hashed)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)

View File

@@ -1,6 +1,10 @@
from fastapi import APIRouter, Depends, HTTPException, status, Cookie, Response
from fastapi import APIRouter, Depends, HTTPException, status, Cookie, Response, Request, UploadFile, File
from fastapi.responses import JSONResponse
from sqlalchemy.orm import Session
from pathlib import Path
import aiofiles
import uuid
import os
from ..config.database import get_db
from ..services.auth_service import auth_service
@@ -12,7 +16,11 @@ from ..schemas.auth import (
ResetPasswordRequest,
AuthResponse,
TokenResponse,
MessageResponse
MessageResponse,
MFAInitResponse,
EnableMFARequest,
VerifyMFARequest,
MFAStatusResponse
)
from ..middleware.auth import get_current_user
from ..models.user import User
@@ -20,6 +28,22 @@ from ..models.user import User
router = APIRouter(prefix="/auth", tags=["auth"])
def get_base_url(request: Request) -> str:
"""Get base URL for image normalization"""
return os.getenv("SERVER_URL") or f"http://{request.headers.get('host', 'localhost:8000')}"
def normalize_image_url(image_url: str, base_url: str) -> str:
"""Normalize image URL to absolute URL"""
if not image_url:
return image_url
if image_url.startswith('http://') or image_url.startswith('https://'):
return image_url
if image_url.startswith('/'):
return f"{base_url}{image_url}"
return f"{base_url}/{image_url}"
@router.post("/register", status_code=status.HTTP_201_CREATED)
async def register(
request: RegisterRequest,
@@ -79,9 +103,18 @@ async def login(
db=db,
email=request.email,
password=request.password,
remember_me=request.rememberMe or False
remember_me=request.rememberMe or False,
mfa_token=request.mfaToken
)
# Check if MFA is required
if result.get("requires_mfa"):
return {
"status": "success",
"requires_mfa": True,
"user_id": result["user_id"]
}
# Set refresh token as HttpOnly cookie
max_age = 7 * 24 * 60 * 60 if request.rememberMe else 1 * 24 * 60 * 60
response.set_cookie(
@@ -104,7 +137,7 @@ async def login(
}
except ValueError as e:
error_message = str(e)
status_code = status.HTTP_401_UNAUTHORIZED if "Invalid email or password" in error_message else status.HTTP_400_BAD_REQUEST
status_code = status.HTTP_401_UNAUTHORIZED if "Invalid email or password" in error_message or "Invalid MFA token" in error_message else status.HTTP_400_BAD_REQUEST
return JSONResponse(
status_code=status_code,
content={
@@ -260,3 +293,229 @@ async def reset_password(
detail=str(e)
)
# MFA Routes
from ..services.mfa_service import mfa_service
from ..config.settings import settings
@router.get("/mfa/init")
async def init_mfa(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Initialize MFA setup - generate secret and QR code"""
try:
if current_user.mfa_enabled:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="MFA is already enabled"
)
secret = mfa_service.generate_secret()
app_name = getattr(settings, 'APP_NAME', 'Hotel Booking')
qr_code = mfa_service.generate_qr_code(secret, current_user.email, app_name)
return {
"status": "success",
"data": {
"secret": secret,
"qr_code": qr_code
}
}
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error initializing MFA: {str(e)}"
)
@router.post("/mfa/enable")
async def enable_mfa(
request: EnableMFARequest,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Enable MFA after verifying token"""
try:
success, backup_codes = mfa_service.enable_mfa(
db=db,
user_id=current_user.id,
secret=request.secret,
verification_token=request.verification_token
)
return {
"status": "success",
"message": "MFA enabled successfully",
"data": {
"backup_codes": backup_codes
}
}
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error enabling MFA: {str(e)}"
)
@router.post("/mfa/disable")
async def disable_mfa(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Disable MFA"""
try:
mfa_service.disable_mfa(db=db, user_id=current_user.id)
return {
"status": "success",
"message": "MFA disabled successfully"
}
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error disabling MFA: {str(e)}"
)
@router.get("/mfa/status", response_model=MFAStatusResponse)
async def get_mfa_status(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Get MFA status for current user"""
try:
status_data = mfa_service.get_mfa_status(db=db, user_id=current_user.id)
return status_data
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error getting MFA status: {str(e)}"
)
@router.post("/mfa/regenerate-backup-codes")
async def regenerate_backup_codes(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Regenerate backup codes for MFA"""
try:
backup_codes = mfa_service.regenerate_backup_codes(db=db, user_id=current_user.id)
return {
"status": "success",
"message": "Backup codes regenerated successfully",
"data": {
"backup_codes": backup_codes
}
}
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error regenerating backup codes: {str(e)}"
)
@router.post("/avatar/upload")
async def upload_avatar(
request: Request,
image: UploadFile = File(...),
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Upload user avatar"""
try:
# Validate file type
if not image.content_type or not image.content_type.startswith('image/'):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="File must be an image"
)
# Validate file size (max 2MB)
content = await image.read()
if len(content) > 2 * 1024 * 1024: # 2MB
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Avatar file size must be less than 2MB"
)
# Create uploads directory
upload_dir = Path(__file__).parent.parent.parent / "uploads" / "avatars"
upload_dir.mkdir(parents=True, exist_ok=True)
# Delete old avatar if exists
if current_user.avatar:
old_avatar_path = Path(__file__).parent.parent.parent / current_user.avatar.lstrip('/')
if old_avatar_path.exists() and old_avatar_path.is_file():
try:
old_avatar_path.unlink()
except Exception:
pass # Ignore deletion errors
# Generate filename
ext = Path(image.filename).suffix or '.png'
filename = f"avatar-{current_user.id}-{uuid.uuid4()}{ext}"
file_path = upload_dir / filename
# Save file
async with aiofiles.open(file_path, 'wb') as f:
await f.write(content)
# Update user avatar
image_url = f"/uploads/avatars/{filename}"
current_user.avatar = image_url
db.commit()
db.refresh(current_user)
# Return the image URL
base_url = get_base_url(request)
full_url = normalize_image_url(image_url, base_url)
return {
"status": "success",
"message": "Avatar uploaded successfully",
"data": {
"avatar_url": image_url,
"full_url": full_url,
"user": {
"id": current_user.id,
"name": current_user.full_name,
"email": current_user.email,
"phone": current_user.phone,
"avatar": image_url,
"role": current_user.role.name if current_user.role else "customer"
}
}
}
except HTTPException:
raise
except Exception as e:
db.rollback()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error uploading avatar: {str(e)}"
)

View File

@@ -202,6 +202,16 @@ async def create_booking(
):
"""Create new booking"""
try:
import logging
logger = logging.getLogger(__name__)
# Validate that booking_data is a dict
if not isinstance(booking_data, dict):
logger.error(f"Invalid booking_data type: {type(booking_data)}, value: {booking_data}")
raise HTTPException(status_code=400, detail="Invalid request body. Expected JSON object.")
logger.info(f"Received booking request from user {current_user.id}: {booking_data}")
room_id = booking_data.get("room_id")
check_in_date = booking_data.get("check_in_date")
check_out_date = booking_data.get("check_out_date")
@@ -210,8 +220,21 @@ async def create_booking(
notes = booking_data.get("notes")
payment_method = booking_data.get("payment_method", "cash")
if not all([room_id, check_in_date, check_out_date, total_price]):
raise HTTPException(status_code=400, detail="Missing required booking fields")
# Detailed validation with specific error messages
missing_fields = []
if not room_id:
missing_fields.append("room_id")
if not check_in_date:
missing_fields.append("check_in_date")
if not check_out_date:
missing_fields.append("check_out_date")
if total_price is None:
missing_fields.append("total_price")
if missing_fields:
error_msg = f"Missing required booking fields: {', '.join(missing_fields)}"
logger.error(error_msg)
raise HTTPException(status_code=400, detail=error_msg)
# Check if room exists
room = db.query(Room).filter(Room.id == room_id).first()
@@ -250,15 +273,15 @@ async def create_booking(
booking_number = generate_booking_number()
# Determine if deposit is required
# Cash requires deposit, Stripe doesn't require deposit (full payment or deposit handled via payment flow)
# Cash requires deposit, Stripe and PayPal don't require deposit (full payment or deposit handled via payment flow)
requires_deposit = payment_method == "cash"
deposit_percentage = 20 if requires_deposit else 0
deposit_amount = (float(total_price) * deposit_percentage) / 100 if requires_deposit else 0
# For Stripe, booking can be confirmed immediately after payment
# For Stripe and PayPal, booking can be confirmed immediately after payment
initial_status = BookingStatus.pending
if payment_method == "stripe":
# Will be confirmed after successful Stripe payment
if payment_method in ["stripe", "paypal"]:
# Will be confirmed after successful payment
initial_status = BookingStatus.pending
# Create booking
@@ -279,19 +302,32 @@ async def create_booking(
db.add(booking)
db.flush()
# Create payment record if Stripe payment method is selected
if payment_method == "stripe":
# Create payment record if Stripe or PayPal payment method is selected
if payment_method in ["stripe", "paypal"]:
from ..models.payment import Payment, PaymentMethod, PaymentStatus, PaymentType
if payment_method == "stripe":
payment_method_enum = PaymentMethod.stripe
elif payment_method == "paypal":
payment_method_enum = PaymentMethod.paypal
else:
# This shouldn't happen, but just in case
logger.warning(f"Unexpected payment_method: {payment_method}, defaulting to stripe")
payment_method_enum = PaymentMethod.stripe
logger.info(f"Creating payment for booking {booking.id} with payment_method: {payment_method} -> enum: {payment_method_enum.value}")
payment = Payment(
booking_id=booking.id,
amount=total_price,
payment_method=PaymentMethod.stripe,
payment_method=payment_method_enum,
payment_type=PaymentType.full,
payment_status=PaymentStatus.pending,
payment_date=None,
)
db.add(payment)
db.flush()
logger.info(f"Payment created: ID={payment.id}, method={payment.payment_method.value if hasattr(payment.payment_method, 'value') else payment.payment_method}")
# Create deposit payment if required (for cash method)
# Note: For cash payments, deposit is paid on arrival, so we don't create a pending payment record
@@ -301,7 +337,7 @@ async def create_booking(
services = booking_data.get("services", [])
if services:
from ..models.service import Service
from ..models.service_usage import ServiceUsage
# ServiceUsage is already imported at the top of the file
for service_item in services:
service_id = service_item.get("service_id")
@@ -354,8 +390,10 @@ async def create_booking(
except Exception as e:
# Log error but don't fail booking creation if invoice creation fails
import logging
import traceback
logger = logging.getLogger(__name__)
logger.error(f"Failed to create invoice for booking {booking.id}: {str(e)}")
logger.error(f"Traceback: {traceback.format_exc()}")
# Fetch with relations for proper serialization (eager load payments and service_usages)
from sqlalchemy.orm import joinedload, selectinload
@@ -369,12 +407,25 @@ async def create_booking(
payment_status_from_payments = "unpaid"
if booking.payments:
latest_payment = sorted(booking.payments, key=lambda p: p.created_at, reverse=True)[0]
payment_method_from_payments = latest_payment.payment_method.value if isinstance(latest_payment.payment_method, PaymentMethod) else latest_payment.payment_method
# Safely extract payment method value
if isinstance(latest_payment.payment_method, PaymentMethod):
payment_method_from_payments = latest_payment.payment_method.value
elif hasattr(latest_payment.payment_method, 'value'):
payment_method_from_payments = latest_payment.payment_method.value
else:
payment_method_from_payments = str(latest_payment.payment_method)
logger.info(f"Booking {booking.id} - Latest payment method: {payment_method_from_payments}, raw: {latest_payment.payment_method}")
if latest_payment.payment_status == PaymentStatus.completed:
payment_status_from_payments = "paid"
elif latest_payment.payment_status == PaymentStatus.refunded:
payment_status_from_payments = "refunded"
# Use payment_method from payments if available, otherwise fall back to request payment_method
final_payment_method = payment_method_from_payments if payment_method_from_payments else payment_method
logger.info(f"Booking {booking.id} - Final payment_method: {final_payment_method} (from_payments: {payment_method_from_payments}, request: {payment_method})")
# Serialize booking properly
booking_dict = {
"id": booking.id,
@@ -386,7 +437,7 @@ async def create_booking(
"guest_count": booking.num_guests,
"total_price": float(booking.total_price) if booking.total_price else 0.0,
"status": booking.status.value if isinstance(booking.status, BookingStatus) else booking.status,
"payment_method": payment_method_from_payments or payment_method,
"payment_method": final_payment_method,
"payment_status": payment_status_from_payments,
"deposit_paid": booking.deposit_paid,
"requires_deposit": booking.requires_deposit,
@@ -408,7 +459,7 @@ async def create_booking(
"id": p.id,
"booking_id": p.booking_id,
"amount": float(p.amount) if p.amount else 0.0,
"payment_method": p.payment_method.value if isinstance(p.payment_method, PaymentMethod) else p.payment_method,
"payment_method": p.payment_method.value if isinstance(p.payment_method, PaymentMethod) else (p.payment_method.value if hasattr(p.payment_method, 'value') else str(p.payment_method)),
"payment_type": p.payment_type.value if isinstance(p.payment_type, PaymentType) else p.payment_type,
"deposit_percentage": p.deposit_percentage,
"payment_status": p.payment_status.value if isinstance(p.payment_status, PaymentStatus) else p.payment_status,
@@ -495,6 +546,11 @@ async def create_booking(
except HTTPException:
raise
except Exception as e:
import logging
import traceback
logger = logging.getLogger(__name__)
logger.error(f"Error creating booking (payment_method: {payment_method}): {str(e)}")
logger.error(f"Traceback: {traceback.format_exc()}")
db.rollback()
raise HTTPException(status_code=500, detail=str(e))
@@ -530,17 +586,33 @@ async def get_booking_by_id(
# Determine payment_method and payment_status from payments
# Get latest payment efficiently (already loaded via joinedload)
payment_method = None
import logging
logger = logging.getLogger(__name__)
payment_method_from_payments = None
payment_status = "unpaid"
if booking.payments:
# Find latest payment (payments are already loaded, so this is fast)
latest_payment = max(booking.payments, key=lambda p: p.created_at if p.created_at else datetime.min)
payment_method = latest_payment.payment_method.value if isinstance(latest_payment.payment_method, PaymentMethod) else latest_payment.payment_method
# Safely extract payment method value
if isinstance(latest_payment.payment_method, PaymentMethod):
payment_method_from_payments = latest_payment.payment_method.value
elif hasattr(latest_payment.payment_method, 'value'):
payment_method_from_payments = latest_payment.payment_method.value
else:
payment_method_from_payments = str(latest_payment.payment_method)
logger.info(f"Get booking {id} - Latest payment method: {payment_method_from_payments}, raw: {latest_payment.payment_method}")
if latest_payment.payment_status == PaymentStatus.completed:
payment_status = "paid"
elif latest_payment.payment_status == PaymentStatus.refunded:
payment_status = "refunded"
# Use payment_method from payments, fallback to "cash" if no payments
final_payment_method = payment_method_from_payments if payment_method_from_payments else "cash"
logger.info(f"Get booking {id} - Final payment_method: {final_payment_method}")
booking_dict = {
"id": booking.id,
"booking_number": booking.booking_number,
@@ -551,7 +623,7 @@ async def get_booking_by_id(
"guest_count": booking.num_guests, # Frontend expects guest_count
"total_price": float(booking.total_price) if booking.total_price else 0.0,
"status": booking.status.value if isinstance(booking.status, BookingStatus) else booking.status,
"payment_method": payment_method or "cash",
"payment_method": final_payment_method,
"payment_status": payment_status,
"deposit_paid": booking.deposit_paid,
"requires_deposit": booking.requires_deposit,
@@ -605,7 +677,7 @@ async def get_booking_by_id(
{
"id": p.id,
"amount": float(p.amount) if p.amount else 0.0,
"payment_method": p.payment_method.value if isinstance(p.payment_method, PaymentMethod) else p.payment_method,
"payment_method": p.payment_method.value if isinstance(p.payment_method, PaymentMethod) else (p.payment_method.value if hasattr(p.payment_method, 'value') else str(p.payment_method)),
"payment_status": p.payment_status.value if isinstance(p.payment_status, PaymentStatus) else p.payment_status,
}
for p in booking.payments

View File

@@ -13,6 +13,7 @@ from ..models.booking import Booking, BookingStatus
from ..utils.mailer import send_email
from ..utils.email_templates import payment_confirmation_email_template
from ..services.stripe_service import StripeService
from ..services.paypal_service import PayPalService
router = APIRouter(prefix="/payments", tags=["payments"])
@@ -588,3 +589,187 @@ async def stripe_webhook(
except Exception as e:
db.rollback()
raise HTTPException(status_code=500, detail=str(e))
@router.post("/paypal/create-order")
async def create_paypal_order(
order_data: dict,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Create a PayPal order"""
try:
# Check if PayPal is configured
from ..services.paypal_service import get_paypal_client_id, get_paypal_client_secret
client_id = get_paypal_client_id(db)
if not client_id:
client_id = settings.PAYPAL_CLIENT_ID
client_secret = get_paypal_client_secret(db)
if not client_secret:
client_secret = settings.PAYPAL_CLIENT_SECRET
if not client_id or not client_secret:
raise HTTPException(
status_code=500,
detail="PayPal is not configured. Please configure PayPal settings in Admin Panel or set PAYPAL_CLIENT_ID and PAYPAL_CLIENT_SECRET environment variables."
)
booking_id = order_data.get("booking_id")
amount = float(order_data.get("amount", 0))
currency = order_data.get("currency", "USD")
if not booking_id or amount <= 0:
raise HTTPException(
status_code=400,
detail="booking_id and amount are required"
)
# Validate amount
if amount > 100000:
raise HTTPException(
status_code=400,
detail=f"Amount ${amount:,.2f} exceeds PayPal's maximum of $100,000. Please contact support for large payments."
)
# Verify booking exists and user has access
booking = db.query(Booking).filter(Booking.id == booking_id).first()
if not booking:
raise HTTPException(status_code=404, detail="Booking not found")
if current_user.role_id != 1 and booking.user_id != current_user.id:
raise HTTPException(status_code=403, detail="Forbidden")
# Get return URLs from request or use defaults
client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173")
return_url = order_data.get("return_url", f"{client_url}/payment/paypal/return")
cancel_url = order_data.get("cancel_url", f"{client_url}/payment/paypal/cancel")
# Create PayPal order
order = PayPalService.create_order(
amount=amount,
currency=currency,
metadata={
"booking_id": str(booking_id),
"booking_number": booking.booking_number,
"user_id": str(current_user.id),
"description": f"Hotel Booking Payment - {booking.booking_number}",
"return_url": return_url,
"cancel_url": cancel_url,
},
db=db
)
if not order.get("approval_url"):
raise HTTPException(
status_code=500,
detail="Failed to create PayPal order. Approval URL is missing."
)
return {
"status": "success",
"message": "PayPal order created successfully",
"data": {
"order_id": order["id"],
"approval_url": order["approval_url"],
"status": order["status"],
}
}
except HTTPException:
raise
except ValueError as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"PayPal order creation error: {str(e)}")
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"Unexpected error creating PayPal order: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post("/paypal/capture")
async def capture_paypal_payment(
payment_data: dict,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Capture a PayPal payment"""
try:
order_id = payment_data.get("order_id")
booking_id = payment_data.get("booking_id")
if not order_id:
raise HTTPException(
status_code=400,
detail="order_id is required"
)
# Confirm payment (this commits the transaction internally)
payment = PayPalService.confirm_payment(
order_id=order_id,
db=db,
booking_id=booking_id
)
# Ensure the transaction is committed
try:
db.commit()
except Exception:
pass
# Get fresh booking from database
booking = db.query(Booking).filter(Booking.id == payment["booking_id"]).first()
if booking:
db.refresh(booking)
# Send payment confirmation email (non-blocking)
if booking and booking.user:
try:
client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173")
email_html = payment_confirmation_email_template(
booking_number=booking.booking_number,
guest_name=booking.user.full_name,
amount=payment["amount"],
payment_method="paypal",
transaction_id=payment["transaction_id"],
client_url=client_url
)
await send_email(
to=booking.user.email,
subject=f"Payment Confirmed - {booking.booking_number}",
html=email_html
)
except Exception as e:
import logging
logger = logging.getLogger(__name__)
logger.warning(f"Failed to send payment confirmation email: {e}")
return {
"status": "success",
"message": "Payment confirmed successfully",
"data": {
"payment": payment,
"booking": {
"id": booking.id if booking else None,
"booking_number": booking.booking_number if booking else None,
"status": booking.status.value if booking else None,
}
}
}
except HTTPException:
db.rollback()
raise
except ValueError as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"PayPal payment confirmation error: {str(e)}")
db.rollback()
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"Unexpected error confirming PayPal payment: {str(e)}", exc_info=True)
db.rollback()
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -122,17 +122,80 @@ async def search_available_rooms(
request: Request,
from_date: str = Query(..., alias="from"),
to_date: str = Query(..., alias="to"),
roomId: Optional[int] = Query(None, alias="roomId"),
type: Optional[str] = Query(None),
capacity: Optional[int] = Query(None),
page: int = Query(1, ge=1),
limit: int = Query(12, ge=1, le=100),
db: Session = Depends(get_db)
):
"""Search for available rooms"""
"""Search for available rooms or check specific room availability"""
try:
check_in = datetime.fromisoformat(from_date.replace('Z', '+00:00'))
check_out = datetime.fromisoformat(to_date.replace('Z', '+00:00'))
# Parse dates - handle both date-only and datetime formats
try:
if 'T' in from_date or 'Z' in from_date or '+' in from_date:
check_in = datetime.fromisoformat(from_date.replace('Z', '+00:00'))
else:
check_in = datetime.strptime(from_date, '%Y-%m-%d')
except ValueError:
raise HTTPException(status_code=400, detail=f"Invalid from date format: {from_date}")
try:
if 'T' in to_date or 'Z' in to_date or '+' in to_date:
check_out = datetime.fromisoformat(to_date.replace('Z', '+00:00'))
else:
check_out = datetime.strptime(to_date, '%Y-%m-%d')
except ValueError:
raise HTTPException(status_code=400, detail=f"Invalid to date format: {to_date}")
# If checking a specific room, handle it differently
if roomId:
# Check if room exists
room = db.query(Room).filter(Room.id == roomId).first()
if not room:
raise HTTPException(status_code=404, detail="Room not found")
# Check if room is available
if room.status != RoomStatus.available:
return {
"status": "success",
"data": {
"available": False,
"message": "Room is not available",
"room_id": roomId
}
}
# Check for overlapping bookings
overlapping = db.query(Booking).filter(
and_(
Booking.room_id == roomId,
Booking.status != BookingStatus.cancelled,
Booking.check_in_date < check_out,
Booking.check_out_date > check_in
)
).first()
if overlapping:
return {
"status": "success",
"data": {
"available": False,
"message": "Room is already booked for the selected dates",
"room_id": roomId
}
}
return {
"status": "success",
"data": {
"available": True,
"message": "Room is available",
"room_id": roomId
}
}
# Original search functionality
if check_in >= check_out:
raise HTTPException(
status_code=400,

View File

@@ -323,6 +323,162 @@ async def update_stripe_settings(
raise HTTPException(status_code=500, detail=str(e))
@router.get("/paypal")
async def get_paypal_settings(
current_user: User = Depends(authorize_roles("admin")),
db: Session = Depends(get_db)
):
"""Get PayPal payment settings (Admin only)"""
try:
client_id_setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_client_id"
).first()
client_secret_setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_client_secret"
).first()
mode_setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_mode"
).first()
# Mask secret for security (only show last 4 characters)
def mask_key(key_value: str) -> str:
if not key_value or len(key_value) < 4:
return ""
return "*" * (len(key_value) - 4) + key_value[-4:]
result = {
"paypal_client_id": "",
"paypal_client_secret": "",
"paypal_mode": "sandbox",
"paypal_client_secret_masked": "",
"has_client_id": False,
"has_client_secret": False,
}
if client_id_setting:
result["paypal_client_id"] = client_id_setting.value
result["has_client_id"] = bool(client_id_setting.value)
result["updated_at"] = client_id_setting.updated_at.isoformat() if client_id_setting.updated_at else None
result["updated_by"] = client_id_setting.updated_by.full_name if client_id_setting.updated_by else None
if client_secret_setting:
result["paypal_client_secret"] = client_secret_setting.value
result["paypal_client_secret_masked"] = mask_key(client_secret_setting.value)
result["has_client_secret"] = bool(client_secret_setting.value)
if mode_setting:
result["paypal_mode"] = mode_setting.value or "sandbox"
return {
"status": "success",
"data": result
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.put("/paypal")
async def update_paypal_settings(
paypal_data: dict,
current_user: User = Depends(authorize_roles("admin")),
db: Session = Depends(get_db)
):
"""Update PayPal payment settings (Admin only)"""
try:
client_id = paypal_data.get("paypal_client_id", "").strip()
client_secret = paypal_data.get("paypal_client_secret", "").strip()
mode = paypal_data.get("paypal_mode", "sandbox").strip().lower()
# Validate mode
if mode and mode not in ["sandbox", "live"]:
raise HTTPException(
status_code=400,
detail="Invalid PayPal mode. Must be 'sandbox' or 'live'"
)
# Update or create client ID setting
if client_id:
setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_client_id"
).first()
if setting:
setting.value = client_id
setting.updated_by_id = current_user.id
else:
setting = SystemSettings(
key="paypal_client_id",
value=client_id,
description="PayPal client ID for processing payments",
updated_by_id=current_user.id
)
db.add(setting)
# Update or create client secret setting
if client_secret:
setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_client_secret"
).first()
if setting:
setting.value = client_secret
setting.updated_by_id = current_user.id
else:
setting = SystemSettings(
key="paypal_client_secret",
value=client_secret,
description="PayPal client secret for processing payments",
updated_by_id=current_user.id
)
db.add(setting)
# Update or create mode setting
if mode:
setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_mode"
).first()
if setting:
setting.value = mode
setting.updated_by_id = current_user.id
else:
setting = SystemSettings(
key="paypal_mode",
value=mode,
description="PayPal mode: sandbox or live",
updated_by_id=current_user.id
)
db.add(setting)
db.commit()
# Return masked values
def mask_key(key_value: str) -> str:
if not key_value or len(key_value) < 4:
return ""
return "*" * (len(key_value) - 4) + key_value[-4:]
return {
"status": "success",
"message": "PayPal settings updated successfully",
"data": {
"paypal_client_id": client_id if client_id else "",
"paypal_client_secret": client_secret if client_secret else "",
"paypal_mode": mode,
"paypal_client_secret_masked": mask_key(client_secret) if client_secret else "",
"has_client_id": bool(client_id),
"has_client_secret": bool(client_secret),
}
}
except HTTPException:
raise
except Exception as e:
db.rollback()
raise HTTPException(status_code=500, detail=str(e))
@router.get("/smtp")
async def get_smtp_settings(
current_user: User = Depends(authorize_roles("admin")),

View File

@@ -31,6 +31,7 @@ class LoginRequest(BaseModel):
email: EmailStr
password: str
rememberMe: Optional[bool] = False
mfaToken: Optional[str] = None
class RefreshTokenRequest(BaseModel):
@@ -85,3 +86,23 @@ class MessageResponse(BaseModel):
status: str
message: str
class MFAInitResponse(BaseModel):
secret: str
qr_code: str # Base64 data URL
class EnableMFARequest(BaseModel):
secret: str
verification_token: str
class VerifyMFARequest(BaseModel):
token: str
is_backup_code: Optional[bool] = False
class MFAStatusResponse(BaseModel):
mfa_enabled: bool
backup_codes_count: int

View File

@@ -144,8 +144,8 @@ class AuthService:
"refreshToken": tokens["refreshToken"]
}
async def login(self, db: Session, email: str, password: str, remember_me: bool = False) -> dict:
"""Login user"""
async def login(self, db: Session, email: str, password: str, remember_me: bool = False, mfa_token: str = None) -> dict:
"""Login user with optional MFA verification"""
# Find user with role and password
user = db.query(User).filter(User.email == email).first()
if not user:
@@ -158,6 +158,21 @@ class AuthService:
if not self.verify_password(password, user.password):
raise ValueError("Invalid email or password")
# Check if MFA is enabled
if user.mfa_enabled:
if not mfa_token:
# Return special response indicating MFA is required
return {
"requires_mfa": True,
"user_id": user.id
}
# Verify MFA token
from ..services.mfa_service import mfa_service
is_backup_code = len(mfa_token) == 8 # Backup codes are 8 characters
if not mfa_service.verify_mfa(db, user.id, mfa_token, is_backup_code):
raise ValueError("Invalid MFA token")
# Generate tokens
tokens = self.generate_tokens(user.id)

View File

@@ -66,7 +66,8 @@ class InvoiceService:
booking = db.query(Booking).options(
selectinload(Booking.service_usages).selectinload("service"),
selectinload(Booking.room).selectinload("room_type")
selectinload(Booking.room).selectinload("room_type"),
selectinload(Booking.payments)
).filter(Booking.id == booking_id).first()
if not booking:
raise ValueError("Booking not found")
@@ -82,6 +83,10 @@ class InvoiceService:
# Initial subtotal is booking total (room + services)
subtotal = float(booking.total_price)
# Calculate tax and total amounts
tax_amount = (subtotal - discount_amount) * (tax_rate / 100)
total_amount = subtotal + tax_amount - discount_amount
# Calculate amount paid from completed payments
amount_paid = sum(
float(p.amount) for p in booking.payments
@@ -134,6 +139,7 @@ class InvoiceService:
)
db.add(invoice)
db.flush() # Flush to get invoice.id before creating invoice items
# Create invoice items from booking
# Calculate room price (total_price includes services, so subtract services)

View File

@@ -0,0 +1,299 @@
"""
Multi-Factor Authentication (MFA) Service
Handles TOTP-based MFA functionality
"""
import pyotp
import qrcode
import secrets
import hashlib
import json
import base64
import io
from typing import List, Optional, Dict, Tuple
from sqlalchemy.orm import Session
from ..models.user import User
import logging
logger = logging.getLogger(__name__)
class MFAService:
"""Service for managing Multi-Factor Authentication"""
@staticmethod
def generate_secret() -> str:
"""Generate a new TOTP secret"""
return pyotp.random_base32()
@staticmethod
def generate_qr_code(secret: str, email: str, app_name: str = "Hotel Booking") -> str:
"""
Generate QR code data URL for TOTP setup
Args:
secret: TOTP secret key
email: User's email address
app_name: Application name for the authenticator app
Returns:
Base64 encoded QR code image data URL
"""
# Create provisioning URI for authenticator apps
totp_uri = pyotp.totp.TOTP(secret).provisioning_uri(
name=email,
issuer_name=app_name
)
# Generate QR code
qr = qrcode.QRCode(
version=1,
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=10,
border=4,
)
qr.add_data(totp_uri)
qr.make(fit=True)
# Create image
img = qr.make_image(fill_color="black", back_color="white")
# Convert to base64 data URL
buffer = io.BytesIO()
img.save(buffer, format='PNG')
img_data = base64.b64encode(buffer.getvalue()).decode()
return f"data:image/png;base64,{img_data}"
@staticmethod
def generate_backup_codes(count: int = 10) -> List[str]:
"""
Generate backup codes for MFA recovery
Args:
count: Number of backup codes to generate (default: 10)
Returns:
List of backup codes (8-character alphanumeric)
"""
codes = []
for _ in range(count):
# Generate 8-character alphanumeric code
code = secrets.token_urlsafe(6).upper()[:8]
codes.append(code)
return codes
@staticmethod
def hash_backup_code(code: str) -> str:
"""
Hash a backup code for storage (SHA-256)
Args:
code: Plain backup code
Returns:
Hashed backup code
"""
return hashlib.sha256(code.encode()).hexdigest()
@staticmethod
def verify_backup_code(code: str, hashed_codes: List[str]) -> bool:
"""
Verify if a backup code matches any hashed code
Args:
code: Plain backup code to verify
hashed_codes: List of hashed backup codes
Returns:
True if code matches, False otherwise
"""
code_hash = MFAService.hash_backup_code(code)
return code_hash in hashed_codes
@staticmethod
def verify_totp(token: str, secret: str) -> bool:
"""
Verify a TOTP token
Args:
token: 6-digit TOTP token from authenticator app
secret: User's TOTP secret
Returns:
True if token is valid, False otherwise
"""
try:
totp = pyotp.TOTP(secret)
# Allow tokens from current and previous/next time window for clock skew
return totp.verify(token, valid_window=1)
except Exception as e:
logger.error(f"Error verifying TOTP: {str(e)}")
return False
@staticmethod
def enable_mfa(
db: Session,
user_id: int,
secret: str,
verification_token: str
) -> Tuple[bool, List[str]]:
"""
Enable MFA for a user after verifying the token
Args:
db: Database session
user_id: User ID
secret: TOTP secret
verification_token: Token from authenticator app to verify
Returns:
Tuple of (success, backup_codes)
"""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError("User not found")
# Verify the token before enabling
if not MFAService.verify_totp(verification_token, secret):
raise ValueError("Invalid verification token")
# Generate backup codes
backup_codes = MFAService.generate_backup_codes()
hashed_codes = [MFAService.hash_backup_code(code) for code in backup_codes]
# Update user
user.mfa_enabled = True
user.mfa_secret = secret
user.mfa_backup_codes = json.dumps(hashed_codes)
db.commit()
# Return plain backup codes (only shown once)
return True, backup_codes
@staticmethod
def disable_mfa(db: Session, user_id: int) -> bool:
"""
Disable MFA for a user
Args:
db: Database session
user_id: User ID
Returns:
True if successful
"""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError("User not found")
user.mfa_enabled = False
user.mfa_secret = None
user.mfa_backup_codes = None
db.commit()
return True
@staticmethod
def verify_mfa(
db: Session,
user_id: int,
token: str,
is_backup_code: bool = False
) -> bool:
"""
Verify MFA token or backup code for a user
Args:
db: Database session
user_id: User ID
token: TOTP token or backup code
is_backup_code: Whether the token is a backup code
Returns:
True if verification successful, False otherwise
"""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError("User not found")
if not user.mfa_enabled or not user.mfa_secret:
raise ValueError("MFA is not enabled for this user")
if is_backup_code:
# Verify backup code
if not user.mfa_backup_codes:
return False
hashed_codes = json.loads(user.mfa_backup_codes)
if not MFAService.verify_backup_code(token, hashed_codes):
return False
# Remove used backup code
code_hash = MFAService.hash_backup_code(token)
hashed_codes.remove(code_hash)
user.mfa_backup_codes = json.dumps(hashed_codes) if hashed_codes else None
db.commit()
return True
else:
# Verify TOTP token
return MFAService.verify_totp(token, user.mfa_secret)
@staticmethod
def regenerate_backup_codes(db: Session, user_id: int) -> List[str]:
"""
Regenerate backup codes for a user
Args:
db: Database session
user_id: User ID
Returns:
List of new backup codes (plain, shown once)
"""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError("User not found")
if not user.mfa_enabled:
raise ValueError("MFA is not enabled for this user")
# Generate new backup codes
backup_codes = MFAService.generate_backup_codes()
hashed_codes = [MFAService.hash_backup_code(code) for code in backup_codes]
user.mfa_backup_codes = json.dumps(hashed_codes)
db.commit()
# Return plain backup codes (only shown once)
return backup_codes
@staticmethod
def get_mfa_status(db: Session, user_id: int) -> Dict:
"""
Get MFA status for a user
Args:
db: Database session
user_id: User ID
Returns:
Dictionary with MFA status information
"""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError("User not found")
backup_codes_count = 0
if user.mfa_backup_codes:
backup_codes_count = len(json.loads(user.mfa_backup_codes))
return {
"mfa_enabled": user.mfa_enabled,
"backup_codes_count": backup_codes_count
}
# Create singleton instance
mfa_service = MFAService()

View File

@@ -0,0 +1,429 @@
"""
PayPal payment service for processing PayPal payments
"""
from paypalcheckoutsdk.core import PayPalHttpClient, SandboxEnvironment, LiveEnvironment
from paypalcheckoutsdk.orders import OrdersCreateRequest, OrdersGetRequest, OrdersCaptureRequest
from paypalcheckoutsdk.payments import CapturesRefundRequest
from typing import Optional, Dict, Any
from ..config.settings import settings
from ..models.payment import Payment, PaymentMethod, PaymentType, PaymentStatus
from ..models.booking import Booking, BookingStatus
from ..models.system_settings import SystemSettings
from sqlalchemy.orm import Session
from datetime import datetime
import json
def get_paypal_client_id(db: Session) -> Optional[str]:
"""Get PayPal client ID from database or environment variable"""
try:
setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_client_id"
).first()
if setting and setting.value:
return setting.value
except Exception:
pass
# Fallback to environment variable
return settings.PAYPAL_CLIENT_ID if settings.PAYPAL_CLIENT_ID else None
def get_paypal_client_secret(db: Session) -> Optional[str]:
"""Get PayPal client secret from database or environment variable"""
try:
setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_client_secret"
).first()
if setting and setting.value:
return setting.value
except Exception:
pass
# Fallback to environment variable
return settings.PAYPAL_CLIENT_SECRET if settings.PAYPAL_CLIENT_SECRET else None
def get_paypal_mode(db: Session) -> str:
"""Get PayPal mode from database or environment variable"""
try:
setting = db.query(SystemSettings).filter(
SystemSettings.key == "paypal_mode"
).first()
if setting and setting.value:
return setting.value
except Exception:
pass
# Fallback to environment variable
return settings.PAYPAL_MODE if settings.PAYPAL_MODE else "sandbox"
def get_paypal_client(db: Optional[Session] = None) -> PayPalHttpClient:
"""
Get PayPal HTTP client
Args:
db: Optional database session to get credentials from database
Returns:
PayPalHttpClient instance
"""
client_id = None
client_secret = None
mode = "sandbox"
if db:
client_id = get_paypal_client_id(db)
client_secret = get_paypal_client_secret(db)
mode = get_paypal_mode(db)
if not client_id:
client_id = settings.PAYPAL_CLIENT_ID
if not client_secret:
client_secret = settings.PAYPAL_CLIENT_SECRET
if not mode:
mode = settings.PAYPAL_MODE or "sandbox"
if not client_id or not client_secret:
raise ValueError("PayPal credentials are not configured")
# Create environment based on mode
if mode.lower() == "live":
environment = LiveEnvironment(client_id=client_id, client_secret=client_secret)
else:
environment = SandboxEnvironment(client_id=client_id, client_secret=client_secret)
return PayPalHttpClient(environment)
class PayPalService:
"""Service for handling PayPal payments"""
@staticmethod
def create_order(
amount: float,
currency: str = "USD",
metadata: Optional[Dict[str, Any]] = None,
db: Optional[Session] = None
) -> Dict[str, Any]:
"""
Create a PayPal order
Args:
amount: Payment amount in currency units
currency: Currency code (default: USD)
metadata: Additional metadata to attach to the order
db: Optional database session to get credentials from database
Returns:
Order object with approval URL and order ID
"""
client = get_paypal_client(db)
# Validate amount
if amount <= 0:
raise ValueError("Amount must be greater than 0")
if amount > 100000:
raise ValueError(f"Amount ${amount:,.2f} exceeds PayPal's maximum of $100,000")
# Create order request
request = OrdersCreateRequest()
request.prefer("return=representation")
# Build order body
order_data = {
"intent": "CAPTURE",
"purchase_units": [
{
"amount": {
"currency_code": currency.upper(),
"value": f"{amount:.2f}"
},
"description": metadata.get("description", "Hotel Booking Payment") if metadata else "Hotel Booking Payment",
"custom_id": metadata.get("booking_id") if metadata else None,
}
],
"application_context": {
"brand_name": "Hotel Booking",
"landing_page": "BILLING",
"user_action": "PAY_NOW",
"return_url": metadata.get("return_url") if metadata else None,
"cancel_url": metadata.get("cancel_url") if metadata else None,
}
}
# Add metadata if provided
if metadata:
order_data["purchase_units"][0]["invoice_id"] = metadata.get("booking_number")
request.request_body(order_data)
try:
response = client.execute(request)
order = response.result
# Extract approval URL
approval_url = None
for link in order.links:
if link.rel == "approve":
approval_url = link.href
break
return {
"id": order.id,
"status": order.status,
"approval_url": approval_url,
"amount": amount,
"currency": currency.upper(),
}
except Exception as e:
error_msg = str(e)
# Try to extract more details from PayPal error
if hasattr(e, 'message'):
error_msg = e.message
elif hasattr(e, 'details') and e.details:
error_msg = json.dumps(e.details)
raise ValueError(f"PayPal error: {error_msg}")
@staticmethod
def get_order(
order_id: str,
db: Optional[Session] = None
) -> Dict[str, Any]:
"""
Retrieve an order by ID
Args:
order_id: PayPal order ID
db: Optional database session to get credentials from database
Returns:
Order object
"""
client = get_paypal_client(db)
request = OrdersGetRequest(order_id)
try:
response = client.execute(request)
order = response.result
# Extract amount from purchase units
amount = 0.0
currency = "USD"
if order.purchase_units and len(order.purchase_units) > 0:
amount_str = order.purchase_units[0].amount.value
currency = order.purchase_units[0].amount.currency_code
amount = float(amount_str)
return {
"id": order.id,
"status": order.status,
"amount": amount,
"currency": currency,
"create_time": order.create_time,
"update_time": order.update_time,
}
except Exception as e:
error_msg = str(e)
if hasattr(e, 'message'):
error_msg = e.message
raise ValueError(f"PayPal error: {error_msg}")
@staticmethod
def capture_order(
order_id: str,
db: Optional[Session] = None
) -> Dict[str, Any]:
"""
Capture a PayPal order
Args:
order_id: PayPal order ID
db: Optional database session to get credentials from database
Returns:
Capture details
"""
client = get_paypal_client(db)
request = OrdersCaptureRequest(order_id)
request.prefer("return=representation")
try:
response = client.execute(request)
order = response.result
# Extract capture details
capture_id = None
amount = 0.0
currency = "USD"
status = order.status
if order.purchase_units and len(order.purchase_units) > 0:
payments = order.purchase_units[0].payments
if payments and payments.captures and len(payments.captures) > 0:
capture = payments.captures[0]
capture_id = capture.id
amount_str = capture.amount.value
currency = capture.amount.currency_code
amount = float(amount_str)
status = capture.status
return {
"order_id": order.id,
"capture_id": capture_id,
"status": status,
"amount": amount,
"currency": currency,
}
except Exception as e:
error_msg = str(e)
if hasattr(e, 'message'):
error_msg = e.message
raise ValueError(f"PayPal error: {error_msg}")
@staticmethod
def confirm_payment(
order_id: str,
db: Session,
booking_id: Optional[int] = None
) -> Dict[str, Any]:
"""
Confirm a payment and update database records
Args:
order_id: PayPal order ID
db: Database session
booking_id: Optional booking ID for metadata lookup
Returns:
Payment record dictionary
"""
try:
# First capture the order
capture_data = PayPalService.capture_order(order_id, db)
# Get order details to extract booking_id from metadata if not provided
if not booking_id:
order_data = PayPalService.get_order(order_id, db)
# Try to get booking_id from custom_id in purchase_units
# Note: We'll need to store booking_id in the order metadata when creating
# For now, we'll require booking_id to be passed
if not booking_id:
raise ValueError("Booking ID is required")
booking = db.query(Booking).filter(Booking.id == booking_id).first()
if not booking:
raise ValueError("Booking not found")
# Check capture status
capture_status = capture_data.get("status")
if capture_status not in ["COMPLETED", "PENDING"]:
raise ValueError(f"Payment capture not in a valid state. Status: {capture_status}")
# Find existing payment or create new one
# First try to find by transaction_id (for already captured payments)
payment = db.query(Payment).filter(
Payment.booking_id == booking_id,
Payment.transaction_id == order_id,
Payment.payment_method == PaymentMethod.paypal
).first()
# If not found, try to find pending PayPal payment for this booking
if not payment:
payment = db.query(Payment).filter(
Payment.booking_id == booking_id,
Payment.payment_method == PaymentMethod.paypal,
Payment.payment_status == PaymentStatus.pending
).order_by(Payment.created_at.desc()).first()
amount = capture_data["amount"]
capture_id = capture_data.get("capture_id")
if payment:
# Update existing payment
if capture_status == "COMPLETED":
payment.payment_status = PaymentStatus.completed
payment.payment_date = datetime.utcnow()
# If pending, keep as pending
payment.amount = amount
if capture_id:
payment.transaction_id = f"{order_id}|{capture_id}"
else:
# Create new payment record
payment_type = PaymentType.full
if booking.requires_deposit and not booking.deposit_paid:
payment_type = PaymentType.deposit
payment_status_enum = PaymentStatus.completed if capture_status == "COMPLETED" else PaymentStatus.pending
payment_date = datetime.utcnow() if capture_status == "COMPLETED" else None
transaction_id = f"{order_id}|{capture_id}" if capture_id else order_id
payment = Payment(
booking_id=booking_id,
amount=amount,
payment_method=PaymentMethod.paypal,
payment_type=payment_type,
payment_status=payment_status_enum,
transaction_id=transaction_id,
payment_date=payment_date,
notes=f"PayPal payment - Order: {order_id}, Capture: {capture_id} (Status: {capture_status})",
)
db.add(payment)
# Commit payment first
db.commit()
db.refresh(payment)
# Update booking status only if payment is completed
if payment.payment_status == PaymentStatus.completed:
db.refresh(booking)
if payment.payment_type == PaymentType.deposit:
booking.deposit_paid = True
if booking.status == BookingStatus.pending:
booking.status = BookingStatus.confirmed
elif payment.payment_type == PaymentType.full:
total_paid = sum(
float(p.amount) for p in booking.payments
if p.payment_status == PaymentStatus.completed
)
if total_paid >= float(booking.total_price) or float(payment.amount) >= float(booking.total_price):
booking.status = BookingStatus.confirmed
db.commit()
db.refresh(booking)
# Safely get enum values
def get_enum_value(enum_obj):
if enum_obj is None:
return None
if isinstance(enum_obj, (PaymentMethod, PaymentType, PaymentStatus)):
return enum_obj.value
return enum_obj
return {
"id": payment.id,
"booking_id": payment.booking_id,
"amount": float(payment.amount) if payment.amount else 0.0,
"payment_method": get_enum_value(payment.payment_method),
"payment_type": get_enum_value(payment.payment_type),
"payment_status": get_enum_value(payment.payment_status),
"transaction_id": payment.transaction_id,
"payment_date": payment.payment_date.isoformat() if payment.payment_date else None,
}
except ValueError as e:
db.rollback()
raise
except Exception as e:
import traceback
error_details = traceback.format_exc()
error_msg = str(e) if str(e) else f"{type(e).__name__}: {repr(e)}"
print(f"Error in confirm_payment: {error_msg}")
print(f"Traceback: {error_details}")
db.rollback()
raise ValueError(f"Error confirming payment: {error_msg}")

266
Backend/venv/bin/prichunkpng Executable file
View File

@@ -0,0 +1,266 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# prichunkpng
# Chunk editing tool.
"""
Make a new PNG by adding, delete, or replacing particular chunks.
"""
import argparse
import collections
# https://docs.python.org/2.7/library/io.html
import io
import re
import string
import struct
import sys
import zlib
# Local module.
import png
Chunk = collections.namedtuple("Chunk", "type content")
class ArgumentError(Exception):
"""A user problem with the command arguments."""
def process(out, args):
"""Process the PNG file args.input to the output, chunk by chunk.
Chunks can be inserted, removed, replaced, or sometimes edited.
Chunks are specified by their 4 byte Chunk Type;
see https://www.w3.org/TR/2003/REC-PNG-20031110/#5Chunk-layout .
The chunks in args.delete will be removed from the stream.
The chunks in args.chunk will be inserted into the stream
with their contents taken from the named files.
Other options on the args object will create particular
ancillary chunks.
.gamma -> gAMA chunk
.sigbit -> sBIT chunk
Chunk types need not be official PNG chunks at all.
Non-standard chunks can be created.
"""
# Convert options to chunks in the args.chunk list
if args.gamma:
v = int(round(1e5 * args.gamma))
bs = io.BytesIO(struct.pack(">I", v))
args.chunk.insert(0, Chunk(b"gAMA", bs))
if args.sigbit:
v = struct.pack("%dB" % len(args.sigbit), *args.sigbit)
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"sBIT", bs))
if args.iccprofile:
# http://www.w3.org/TR/PNG/#11iCCP
v = b"a color profile\x00\x00" + zlib.compress(args.iccprofile.read())
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"iCCP", bs))
if args.transparent:
# https://www.w3.org/TR/2003/REC-PNG-20031110/#11tRNS
v = struct.pack(">%dH" % len(args.transparent), *args.transparent)
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"tRNS", bs))
if args.background:
# https://www.w3.org/TR/2003/REC-PNG-20031110/#11bKGD
v = struct.pack(">%dH" % len(args.background), *args.background)
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"bKGD", bs))
if args.physical:
# https://www.w3.org/TR/PNG/#11pHYs
numbers = re.findall(r"(\d+\.?\d*)", args.physical)
if len(numbers) not in {1, 2}:
raise ArgumentError("One or two numbers are required for --physical")
xppu = float(numbers[0])
if len(numbers) == 1:
yppu = xppu
else:
yppu = float(numbers[1])
unit_spec = 0
if args.physical.endswith("dpi"):
# Convert from DPI to Pixels Per Metre
# 1 inch is 0.0254 metres
l = 0.0254
xppu /= l
yppu /= l
unit_spec = 1
elif args.physical.endswith("ppm"):
unit_spec = 1
v = struct.pack("!LLB", round(xppu), round(yppu), unit_spec)
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"pHYs", bs))
# Create:
# - a set of chunks to delete
# - a dict of chunks to replace
# - a list of chunk to add
delete = set(args.delete)
# The set of chunks to replace are those where the specification says
# that there should be at most one of them.
replacing = set([b"gAMA", b"pHYs", b"sBIT", b"PLTE", b"tRNS", b"sPLT", b"IHDR"])
replace = dict()
add = []
for chunk in args.chunk:
if chunk.type in replacing:
replace[chunk.type] = chunk
else:
add.append(chunk)
input = png.Reader(file=args.input)
return png.write_chunks(out, edit_chunks(input.chunks(), delete, replace, add))
def edit_chunks(chunks, delete, replace, add):
"""
Iterate over chunks, yielding edited chunks.
Subtle: the new chunks have to have their contents .read().
"""
for type, v in chunks:
if type in delete:
continue
if type in replace:
yield type, replace[type].content.read()
del replace[type]
continue
if b"IDAT" <= type <= b"IDAT" and replace:
# If there are any chunks on the replace list by
# the time we reach IDAT, add then all now.
# put them all on the add list.
for chunk in replace.values():
yield chunk.type, chunk.content.read()
replace = dict()
if b"IDAT" <= type <= b"IDAT" and add:
# We reached IDAT; add all remaining chunks now.
for chunk in add:
yield chunk.type, chunk.content.read()
add = []
yield type, v
def chunk_name(s):
"""
Type check a chunk name option value.
"""
# See https://www.w3.org/TR/2003/REC-PNG-20031110/#table51
valid = len(s) == 4 and set(s) <= set(string.ascii_letters)
if not valid:
raise ValueError("Chunk name must be 4 ASCII letters")
return s.encode("ascii")
def comma_list(s):
"""
Convert s, a command separated list of whole numbers,
into a sequence of int.
"""
return tuple(int(v) for v in s.split(","))
def hex_color(s):
"""
Type check and convert a hex color.
"""
if s.startswith("#"):
s = s[1:]
valid = len(s) in [1, 2, 3, 4, 6, 12] and set(s) <= set(string.hexdigits)
if not valid:
raise ValueError("colour must be 1,2,3,4,6, or 12 hex-digits")
# For the 4-bit RGB, expand to 8-bit, by repeating digits.
if len(s) == 3:
s = "".join(c + c for c in s)
if len(s) in [1, 2, 4]:
# Single grey value.
return (int(s, 16),)
if len(s) in [6, 12]:
w = len(s) // 3
return tuple(int(s[i : i + w], 16) for i in range(0, len(s), w))
def main(argv=None):
if argv is None:
argv = sys.argv
argv = argv[1:]
parser = argparse.ArgumentParser()
parser.add_argument("--gamma", type=float, help="Gamma value for gAMA chunk")
parser.add_argument(
"--physical",
type=str,
metavar="x[,y][dpi|ppm]",
help="specify intended pixel size or aspect ratio",
)
parser.add_argument(
"--sigbit",
type=comma_list,
metavar="D[,D[,D[,D]]]",
help="Number of significant bits in each channel",
)
parser.add_argument(
"--iccprofile",
metavar="file.iccp",
type=argparse.FileType("rb"),
help="add an ICC Profile from a file",
)
parser.add_argument(
"--transparent",
type=hex_color,
metavar="#RRGGBB",
help="Specify the colour that is transparent (tRNS chunk)",
)
parser.add_argument(
"--background",
type=hex_color,
metavar="#RRGGBB",
help="background colour for bKGD chunk",
)
parser.add_argument(
"--delete",
action="append",
default=[],
type=chunk_name,
help="delete the chunk",
)
parser.add_argument(
"--chunk",
action="append",
nargs=2,
default=[],
type=str,
help="insert chunk, taking contents from file",
)
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args(argv)
# Reprocess the chunk arguments, converting each pair into a Chunk.
args.chunk = [
Chunk(chunk_name(type), open(path, "rb")) for type, path in args.chunk
]
return process(png.binary_stdout(), args)
if __name__ == "__main__":
main()

81
Backend/venv/bin/pricolpng Executable file
View File

@@ -0,0 +1,81 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# http://www.python.org/doc/2.4.4/lib/module-itertools.html
import itertools
import sys
import png
Description = """Join PNG images in a column top-to-bottom."""
class FormatError(Exception):
"""
Some problem with the image format.
"""
def join_col(out, l):
"""
Join the list of images.
All input images must be same width and
have the same number of channels.
They are joined top-to-bottom.
`out` is the (open file) destination for the output image.
`l` should be a list of open files (the input image files).
"""
image = 0
stream = 0
# When the first image is read, this will be the reference width,
# which must be the same for all images.
width = None
# Total height (accumulated as images are read).
height = 0
# Accumulated rows.
rows = []
for f in l:
stream += 1
while True:
im = png.Reader(file=f)
try:
im.preamble()
except EOFError:
break
image += 1
if not width:
width = im.width
elif width != im.width:
raise FormatError('Image %d in stream %d has width %d; does not match %d.' %
(image, stream, im.width, width))
height += im.height
# Various bugs here because different numbers of channels and depths go wrong.
w, h, p, info = im.asDirect()
rows.extend(p)
# Alarmingly re-use the last info object.
tinfo = dict(info)
del tinfo['size']
w = png.Writer(width, height, **tinfo)
w.write(out, rows)
def main(argv):
import argparse
parser = argparse.ArgumentParser(description=Description)
parser.add_argument(
"input", nargs="*", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
return join_col(png.binary_stdout(), args.input)
if __name__ == '__main__':
main(sys.argv)

254
Backend/venv/bin/priditherpng Executable file
View File

@@ -0,0 +1,254 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# pipdither
# Error Diffusing image dithering.
# Now with serpentine scanning.
# See http://www.efg2.com/Lab/Library/ImageProcessing/DHALF.TXT
# http://www.python.org/doc/2.4.4/lib/module-bisect.html
from bisect import bisect_left
import png
def dither(
out,
input,
bitdepth=1,
linear=False,
defaultgamma=1.0,
targetgamma=None,
cutoff=0.5, # see :cutoff:default
):
"""Dither the input PNG `inp` into an image with a smaller bit depth
and write the result image onto `out`. `bitdepth` specifies the bit
depth of the new image.
Normally the source image gamma is honoured (the image is
converted into a linear light space before being dithered), but
if the `linear` argument is true then the image is treated as
being linear already: no gamma conversion is done (this is
quicker, and if you don't care much about accuracy, it won't
matter much).
Images with no gamma indication (no ``gAMA`` chunk) are normally
treated as linear (gamma = 1.0), but often it can be better
to assume a different gamma value: For example continuous tone
photographs intended for presentation on the web often carry
an implicit assumption of being encoded with a gamma of about
0.45 (because that's what you get if you just "blat the pixels"
onto a PC framebuffer), so ``defaultgamma=0.45`` might be a
good idea. `defaultgamma` does not override a gamma value
specified in the file itself: It is only used when the file
does not specify a gamma.
If you (pointlessly) specify both `linear` and `defaultgamma`,
`linear` wins.
The gamma of the output image is, by default, the same as the input
image. The `targetgamma` argument can be used to specify a
different gamma for the output image. This effectively recodes the
image to a different gamma, dithering as we go. The gamma specified
is the exponent used to encode the output file (and appears in the
output PNG's ``gAMA`` chunk); it is usually less than 1.
"""
# Encoding is what happened when the PNG was made (and also what
# happens when we output the PNG). Decoding is what we do to the
# source PNG in order to process it.
# The dithering algorithm is not completely general; it
# can only do bit depth reduction, not arbitrary palette changes.
import operator
maxval = 2 ** bitdepth - 1
r = png.Reader(file=input)
_, _, pixels, info = r.asDirect()
planes = info["planes"]
# :todo: make an Exception
assert planes == 1
width = info["size"][0]
sourcemaxval = 2 ** info["bitdepth"] - 1
if linear:
gamma = 1
else:
gamma = info.get("gamma") or defaultgamma
# Calculate an effective gamma for input and output;
# then build tables using those.
# `gamma` (whether it was obtained from the input file or an
# assumed value) is the encoding gamma.
# We need the decoding gamma, which is the reciprocal.
decode = 1.0 / gamma
# `targetdecode` is the assumed gamma that is going to be used
# to decoding the target PNG.
# Note that even though we will _encode_ the target PNG we
# still need the decoding gamma, because
# the table we use maps from PNG pixel value to linear light level.
if targetgamma is None:
targetdecode = decode
else:
targetdecode = 1.0 / targetgamma
incode = build_decode_table(sourcemaxval, decode)
# For encoding, we still build a decode table, because we
# use it inverted (searching with bisect).
outcode = build_decode_table(maxval, targetdecode)
# The table used for choosing output codes. These values represent
# the cutoff points between two adjacent output codes.
# The cutoff parameter can be varied between 0 and 1 to
# preferentially choose lighter (when cutoff > 0.5) or
# darker (when cutoff < 0.5) values.
# :cutoff:default: The default for this used to be 0.75, but
# testing by drj on 2021-07-30 showed that this produces
# banding when dithering left-to-right gradients;
# test with:
# priforgepng grl | priditherpng | kitty icat
choosecode = list(zip(outcode[1:], outcode))
p = cutoff
choosecode = [x[0] * p + x[1] * (1.0 - p) for x in choosecode]
rows = repeat_header(pixels)
dithered_rows = run_dither(incode, choosecode, outcode, width, rows)
dithered_rows = remove_header(dithered_rows)
info["bitdepth"] = bitdepth
info["gamma"] = 1.0 / targetdecode
w = png.Writer(**info)
w.write(out, dithered_rows)
def build_decode_table(maxval, gamma):
"""Build a lookup table for decoding;
table converts from pixel values to linear space.
"""
assert maxval == int(maxval)
assert maxval > 0
f = 1.0 / maxval
table = [f * v for v in range(maxval + 1)]
if gamma != 1.0:
table = [v ** gamma for v in table]
return table
def run_dither(incode, choosecode, outcode, width, rows):
"""
Run an serpentine dither.
Using the incode and choosecode tables.
"""
# Errors diffused downwards (into next row)
ed = [0.0] * width
flipped = False
for row in rows:
# Convert to linear...
row = [incode[v] for v in row]
# Add errors...
row = [e + v for e, v in zip(ed, row)]
if flipped:
row = row[::-1]
targetrow = [0] * width
for i, v in enumerate(row):
# `it` will be the index of the chosen target colour;
it = bisect_left(choosecode, v)
targetrow[i] = it
t = outcode[it]
# err is the error that needs distributing.
err = v - t
# Sierra "Filter Lite" distributes * 2
# as per this diagram. 1 1
ef = err * 0.5
# :todo: consider making rows one wider at each end and
# removing "if"s
if i + 1 < width:
row[i + 1] += ef
ef *= 0.5
ed[i] = ef
if i:
ed[i - 1] += ef
if flipped:
ed = ed[::-1]
targetrow = targetrow[::-1]
yield targetrow
flipped = not flipped
WARMUP_ROWS = 32
def repeat_header(rows):
"""Repeat the first row, to "warm up" the error register."""
for row in rows:
yield row
for _ in range(WARMUP_ROWS):
yield row
break
yield from rows
def remove_header(rows):
"""Remove the same number of rows that repeat_header added."""
for _ in range(WARMUP_ROWS):
next(rows)
yield from rows
def main(argv=None):
import sys
# https://docs.python.org/3.5/library/argparse.html
import argparse
parser = argparse.ArgumentParser()
if argv is None:
argv = sys.argv
progname, *args = argv
parser.add_argument("--bitdepth", type=int, default=1, help="bitdepth of output")
parser.add_argument(
"--cutoff",
type=float,
default=0.5,
help="cutoff to select adjacent output values",
)
parser.add_argument(
"--defaultgamma",
type=float,
default=1.0,
help="gamma value to use when no gamma in input",
)
parser.add_argument("--linear", action="store_true", help="force linear input")
parser.add_argument(
"--targetgamma",
type=float,
help="gamma to use in output (target), defaults to input gamma",
)
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
ns = parser.parse_args(args)
return dither(png.binary_stdout(), **vars(ns))
if __name__ == "__main__":
main()

275
Backend/venv/bin/priforgepng Executable file
View File

@@ -0,0 +1,275 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# priforgepng
"""Forge PNG image from raw computation."""
from array import array
from fractions import Fraction
import argparse
import re
import sys
import png
def gen_glr(x):
"""Gradient Left to Right"""
return x
def gen_grl(x):
"""Gradient Right to Left"""
return 1 - x
def gen_gtb(x, y):
"""Gradient Top to Bottom"""
return y
def gen_gbt(x, y):
"""Gradient Bottom to Top"""
return 1.0 - y
def gen_rtl(x, y):
"""Radial gradient, centred at Top-Left"""
return max(1 - (float(x) ** 2 + float(y) ** 2) ** 0.5, 0.0)
def gen_rctr(x, y):
"""Radial gradient, centred at Centre"""
return gen_rtl(float(x) - 0.5, float(y) - 0.5)
def gen_rtr(x, y):
"""Radial gradient, centred at Top-Right"""
return gen_rtl(1.0 - float(x), y)
def gen_rbl(x, y):
"""Radial gradient, centred at Bottom-Left"""
return gen_rtl(x, 1.0 - float(y))
def gen_rbr(x, y):
"""Radial gradient, centred at Bottom-Right"""
return gen_rtl(1.0 - float(x), 1.0 - float(y))
def stripe(x, n):
return int(x * n) & 1
def gen_vs2(x):
"""2 Vertical Stripes"""
return stripe(x, 2)
def gen_vs4(x):
"""4 Vertical Stripes"""
return stripe(x, 4)
def gen_vs10(x):
"""10 Vertical Stripes"""
return stripe(x, 10)
def gen_hs2(x, y):
"""2 Horizontal Stripes"""
return stripe(float(y), 2)
def gen_hs4(x, y):
"""4 Horizontal Stripes"""
return stripe(float(y), 4)
def gen_hs10(x, y):
"""10 Horizontal Stripes"""
return stripe(float(y), 10)
def gen_slr(x, y):
"""10 diagonal stripes, rising from Left to Right"""
return stripe(x + y, 10)
def gen_srl(x, y):
"""10 diagonal stripes, rising from Right to Left"""
return stripe(1 + x - y, 10)
def checker(x, y, n):
return stripe(x, n) ^ stripe(y, n)
def gen_ck8(x, y):
"""8 by 8 checkerboard"""
return checker(x, y, 8)
def gen_ck15(x, y):
"""15 by 15 checkerboard"""
return checker(x, y, 15)
def gen_zero(x):
"""All zero (black)"""
return 0
def gen_one(x):
"""All one (white)"""
return 1
def yield_fun_rows(size, bitdepth, pattern):
"""
Create a single channel (monochrome) test pattern.
Yield each row in turn.
"""
width, height = size
maxval = 2 ** bitdepth - 1
if maxval > 255:
typecode = "H"
else:
typecode = "B"
pfun = pattern_function(pattern)
# The coordinates are an integer + 0.5,
# effectively sampling each pixel at its centre.
# This is morally better, and produces all 256 sample values
# in a 256-pixel wide gradient.
# We make a list of x coordinates here and re-use it,
# because Fraction instances are slow to allocate.
xs = [Fraction(x, 2 * width) for x in range(1, 2 * width, 2)]
# The general case is a function in x and y,
# but if the function only takes an x argument,
# it's handled in a special case that is a lot faster.
if n_args(pfun) == 2:
for y in range(height):
a = array(typecode)
fy = Fraction(Fraction(y + 0.5), height)
for fx in xs:
a.append(int(round(maxval * pfun(fx, fy))))
yield a
return
# For functions in x only, it's a _lot_ faster
# to generate a single row and repeatedly yield it
a = array(typecode)
for fx in xs:
a.append(int(round(maxval * pfun(x=fx))))
for y in range(height):
yield a
return
def generate(args):
"""
Create a PNG test image and write the file to stdout.
`args` should be an argparse Namespace instance or similar.
"""
size = args.size
bitdepth = args.depth
out = png.binary_stdout()
for pattern in args.pattern:
rows = yield_fun_rows(size, bitdepth, pattern)
writer = png.Writer(
size[0], size[1], bitdepth=bitdepth, greyscale=True, alpha=False
)
writer.write(out, rows)
def n_args(fun):
"""Number of arguments in fun's argument list."""
return fun.__code__.co_argcount
def pattern_function(pattern):
"""From `pattern`, a string,
return the function for that pattern.
"""
lpat = pattern.lower()
for name, fun in globals().items():
parts = name.split("_")
if parts[0] != "gen":
continue
if parts[1] == lpat:
return fun
def patterns():
"""
List the patterns.
"""
for name, fun in globals().items():
parts = name.split("_")
if parts[0] == "gen":
yield parts[1], fun.__doc__
def dimensions(s):
"""
Typecheck the --size option, which should be
one or two comma separated numbers.
Example: "64,40".
"""
tupl = re.findall(r"\d+", s)
if len(tupl) not in (1, 2):
raise ValueError("%r should be width or width,height" % s)
if len(tupl) == 1:
tupl *= 2
assert len(tupl) == 2
return list(map(int, tupl))
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(description="Forge greyscale PNG patterns")
parser.add_argument(
"-l", "--list", action="store_true", help="print list of patterns and exit"
)
parser.add_argument(
"-d", "--depth", default=8, type=int, metavar="N", help="N bits per pixel"
)
parser.add_argument(
"-s",
"--size",
default=[256, 256],
type=dimensions,
metavar="w[,h]",
help="width and height of the image in pixels",
)
parser.add_argument("pattern", nargs="*", help="name of pattern")
args = parser.parse_args(argv[1:])
if args.list:
for name, doc in sorted(patterns()):
print(name, doc, sep="\t")
return
if not args.pattern:
parser.error("--list or pattern is required")
return generate(args)
if __name__ == "__main__":
main()

72
Backend/venv/bin/prigreypng Executable file
View File

@@ -0,0 +1,72 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# prigreypng
# Convert image to grey (L, or LA), but only if that involves no colour change.
import argparse
import array
import png
def as_grey(out, inp):
"""
Convert image to greyscale, but only when no colour change.
This works by using the input G channel (green) as
the output L channel (luminance) and
checking that every pixel is grey as we go.
A non-grey pixel will raise an error.
"""
r = png.Reader(file=inp)
_, _, rows, info = r.asDirect()
if info["greyscale"]:
w = png.Writer(**info)
return w.write(out, rows)
planes = info["planes"]
targetplanes = planes - 2
alpha = info["alpha"]
width, height = info["size"]
typecode = "BH"[info["bitdepth"] > 8]
# Values per target row
vpr = width * targetplanes
def iterasgrey():
for i, row in enumerate(rows):
row = array.array(typecode, row)
targetrow = array.array(typecode, [0] * vpr)
# Copy G (and possibly A) channel.
green = row[0::planes]
if alpha:
targetrow[0::2] = green
targetrow[1::2] = row[3::4]
else:
targetrow = green
# Check R and B channel match.
if green != row[0::planes] or green != row[2::planes]:
raise ValueError("Row %i contains non-grey pixel." % i)
yield targetrow
info["greyscale"] = True
del info["planes"]
w = png.Writer(**info)
return w.write(out, iterasgrey())
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
return as_grey(png.binary_stdout(), args.input)
if __name__ == "__main__":
import sys
sys.exit(main())

111
Backend/venv/bin/pripalpng Executable file
View File

@@ -0,0 +1,111 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# pripalpng
"""Convert to Palette PNG (without changing colours)"""
import argparse
import collections
# https://docs.python.org/2.7/library/io.html
import io
import string
import zlib
# Local module.
import png
def make_inverse_palette(rows, channels):
"""
The inverse palette maps from tuple to palette index.
"""
palette = {}
for row in rows:
for pixel in png.group(row, channels):
if pixel in palette:
continue
palette[pixel] = len(palette)
return palette
def palette_convert(out, inp, palette_file):
"""
Convert PNG image in `inp` to use a palette, colour type 3,
and write converted image to `out`.
`palette_file` is a file descriptor for the palette to use.
If `palette_file` is None, then `inp` is used as the palette.
"""
if palette_file is None:
inp, palette_file = palette_file, inp
reader = png.Reader(file=palette_file)
w, h, rows, info = asRGBorA8(reader)
channels = info["planes"]
if not inp:
rows = list(rows)
palette_map = make_inverse_palette(rows, channels)
if inp:
reader = png.Reader(file=inp)
w, h, rows, info = asRGBorA8(reader)
channels = info["planes"]
# Default for colours not in palette is to use last entry.
last = len(palette_map) - 1
def map_pixel(p):
return palette_map.get(p, last)
def convert_rows():
for row in rows:
yield [map_pixel(p) for p in png.group(row, channels)]
# Make a palette by sorting the pixels according to their index.
palette = sorted(palette_map.keys(), key=palette_map.get)
pal_info = dict(size=info["size"], palette=palette)
w = png.Writer(**pal_info)
w.write(out, convert_rows())
def asRGBorA8(reader):
"""
Return (width, height, rows, info) converting to RGB,
or RGBA if original has an alpha channel.
"""
_, _, _, info = reader.read()
if info["alpha"]:
return reader.asRGBA8()
else:
return reader.asRGB8()
def main(argv=None):
import sys
import re
if argv is None:
argv = sys.argv
argv = argv[1:]
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--palette", type=png.cli_open)
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args(argv)
palette_convert(png.binary_stdout(), args.input, args.palette)
if __name__ == "__main__":
main()

355
Backend/venv/bin/pripamtopng Executable file
View File

@@ -0,0 +1,355 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# pripamtopng
#
# Python Raster Image PAM to PNG
import array
import struct
import sys
import png
Description = """Convert NetPBM PAM/PNM format files to PNG."""
def read_pam_header(infile):
"""
Read (the rest of a) PAM header.
`infile` should be positioned immediately after the initial 'P7' line
(at the beginning of the second line).
Returns are as for `read_pnm_header`.
"""
# Unlike PBM, PGM, and PPM, we can read the header a line at a time.
header = dict()
while True:
line = infile.readline().strip()
if line == b"ENDHDR":
break
if not line:
raise EOFError("PAM ended prematurely")
if line[0] == b"#":
continue
line = line.split(None, 1)
key = line[0]
if key not in header:
header[key] = line[1]
else:
header[key] += b" " + line[1]
required = [b"WIDTH", b"HEIGHT", b"DEPTH", b"MAXVAL"]
required_str = b", ".join(required).decode("ascii")
result = []
for token in required:
if token not in header:
raise png.Error("PAM file must specify " + required_str)
try:
x = int(header[token])
except ValueError:
raise png.Error(required_str + " must all be valid integers")
if x <= 0:
raise png.Error(required_str + " must all be positive integers")
result.append(x)
return (b"P7",) + tuple(result)
def read_pnm_header(infile):
"""
Read a PNM header, returning (format,width,height,depth,maxval).
Also reads a PAM header (by using a helper function).
`width` and `height` are in pixels.
`depth` is the number of channels in the image;
for PBM and PGM it is synthesized as 1, for PPM as 3;
for PAM images it is read from the header.
`maxval` is synthesized (as 1) for PBM images.
"""
# Generally, see http://netpbm.sourceforge.net/doc/ppm.html
# and http://netpbm.sourceforge.net/doc/pam.html
# Technically 'P7' must be followed by a newline,
# so by using rstrip() we are being liberal in what we accept.
# I think this is acceptable.
magic = infile.read(3).rstrip()
if magic == b"P7":
# PAM header parsing is completely different.
return read_pam_header(infile)
# Expected number of tokens in header (3 for P4, 4 for P6)
expected = 4
pbm = (b"P1", b"P4")
if magic in pbm:
expected = 3
header = [magic]
# We must read the rest of the header byte by byte because
# the final whitespace character may not be a newline.
# Of course all PNM files in the wild use a newline at this point,
# but we are strong and so we avoid
# the temptation to use readline.
bs = bytearray()
backs = bytearray()
def next():
if backs:
c = bytes(backs[0:1])
del backs[0]
else:
c = infile.read(1)
if not c:
raise png.Error("premature EOF reading PNM header")
bs.extend(c)
return c
def backup():
"""Push last byte of token onto front of backs."""
backs.insert(0, bs[-1])
del bs[-1]
def ignore():
del bs[:]
def tokens():
ls = lexInit
while True:
token, ls = ls()
if token:
yield token
def lexInit():
c = next()
# Skip comments
if b"#" <= c <= b"#":
while c not in b"\n\r":
c = next()
ignore()
return None, lexInit
# Skip whitespace (that precedes a token)
if c.isspace():
ignore()
return None, lexInit
if not c.isdigit():
raise png.Error("unexpected byte %r found in header" % c)
return None, lexNumber
def lexNumber():
# According to the specification it is legal to have comments
# that appear in the middle of a token.
# I've never seen it; and,
# it's a bit awkward to code good lexers in Python (no goto).
# So we break on such cases.
c = next()
while c.isdigit():
c = next()
backup()
token = bs[:]
ignore()
return token, lexInit
for token in tokens():
# All "tokens" are decimal integers, so convert them here.
header.append(int(token))
if len(header) == expected:
break
final = next()
if not final.isspace():
raise png.Error("expected header to end with whitespace, not %r" % final)
if magic in pbm:
# synthesize a MAXVAL
header.append(1)
depth = (1, 3)[magic == b"P6"]
return header[0], header[1], header[2], depth, header[3]
def convert_pnm_plain(w, infile, outfile):
"""
Convert a plain PNM file containing raw pixel data into
a PNG file with the parameters set in the writer object.
Works for plain PGM formats.
"""
# See convert_pnm_binary for the corresponding function for
# binary PNM formats.
rows = scan_rows_from_file_plain(infile, w.width, w.height, w.planes)
w.write(outfile, rows)
def scan_rows_from_file_plain(infile, width, height, planes):
"""
Generate a sequence of rows from the input file `infile`.
The input file should be in a "Netpbm-like" plain format.
The input file should be positioned at the beginning of the
first value (that is, immediately after the header).
The number of pixels to read is taken from
the image dimensions (`width`, `height`, `planes`).
Each row is yielded as a single sequence of values.
"""
# Values per row
vpr = width * planes
values = []
rows_output = 0
# The core problem is that input lines (text lines) may not
# correspond with pixel rows. We use two nested loops.
# The outer loop reads the input one text line at a time;
# this will contain a whole number of values, which are
# added to the `values` list.
# The inner loop strips the first `vpr` values from the
# list, until there aren't enough.
# Note we can't tell how many iterations the inner loop will
# run for, it could be 0 (if not enough values were read to
# make a whole pixel row) or many (if the entire image were
# on one input line), or somewhere in between.
# In PNM there is in general no requirement to have
# correspondence between text lines and pixel rows.
for inp in infile:
values.extend(map(int, inp.split()))
while len(values) >= vpr:
yield values[:vpr]
del values[:vpr]
rows_output += 1
if rows_output >= height:
# Diagnostic here if there are spare values?
return
# Diagnostic here for early EOF?
def convert_pnm_binary(w, infile, outfile):
"""
Convert a PNM file containing raw pixel data into
a PNG file with the parameters set in the writer object.
Works for (binary) PGM, PPM, and PAM formats.
"""
rows = scan_rows_from_file(infile, w.width, w.height, w.planes, w.bitdepth)
w.write(outfile, rows)
def scan_rows_from_file(infile, width, height, planes, bitdepth):
"""
Generate a sequence of rows from the input file `infile`.
The input file should be in a "Netpbm-like" binary format.
The input file should be positioned at the beginning of the first pixel.
The number of pixels to read is taken from
the image dimensions (`width`, `height`, `planes`);
the number of bytes per value is implied by `bitdepth`.
Each row is yielded as a single sequence of values.
"""
# Values per row
vpr = width * planes
# Bytes per row
bpr = vpr
if bitdepth > 8:
assert bitdepth == 16
bpr *= 2
fmt = ">%dH" % vpr
def line():
return array.array("H", struct.unpack(fmt, infile.read(bpr)))
else:
def line():
return array.array("B", infile.read(bpr))
for y in range(height):
yield line()
def parse_args(args):
"""
Create a parser and parse the command line arguments.
"""
from argparse import ArgumentParser
parser = ArgumentParser(description=Description)
version = "%(prog)s " + png.__version__
parser.add_argument("--version", action="version", version=version)
parser.add_argument(
"-c",
"--compression",
type=int,
metavar="level",
help="zlib compression level (0-9)",
)
parser.add_argument(
"input",
nargs="?",
default="-",
type=png.cli_open,
metavar="PAM/PNM",
help="input PAM/PNM file to convert",
)
args = parser.parse_args(args)
return args
def main(argv=None):
if argv is None:
argv = sys.argv
args = parse_args(argv[1:])
# Prepare input and output files
infile = args.input
# Call after parsing, so that --version and --help work.
outfile = png.binary_stdout()
# Encode PNM to PNG
format, width, height, depth, maxval = read_pnm_header(infile)
ok_formats = (b"P2", b"P5", b"P6", b"P7")
if format not in ok_formats:
raise NotImplementedError("file format %s not supported" % format)
# The NetPBM depth (number of channels) completely
# determines the PNG format.
# Observe:
# - L, LA, RGB, RGBA are the 4 modes supported by PNG;
# - they correspond to 1, 2, 3, 4 channels respectively.
# We use the number of channels in the source image to
# determine which one we have.
# We ignore the NetPBM image type and the PAM TUPLTYPE.
greyscale = depth <= 2
pamalpha = depth in (2, 4)
supported = [2 ** x - 1 for x in range(1, 17)]
try:
mi = supported.index(maxval)
except ValueError:
raise NotImplementedError(
"input maxval (%s) not in supported list %s" % (maxval, str(supported))
)
bitdepth = mi + 1
writer = png.Writer(
width,
height,
greyscale=greyscale,
bitdepth=bitdepth,
alpha=pamalpha,
compression=args.compression,
)
plain = format in (b"P1", b"P2", b"P3")
if plain:
convert_pnm_plain(writer, infile, outfile)
else:
convert_pnm_binary(writer, infile, outfile)
if __name__ == "__main__":
try:
sys.exit(main())
except png.Error as e:
print(e, file=sys.stderr)
sys.exit(99)

540
Backend/venv/bin/priplan9topng Executable file
View File

@@ -0,0 +1,540 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# Imported from //depot/prj/plan9topam/master/code/plan9topam.py#4 on
# 2009-06-15.
"""Command line tool to convert from Plan 9 image format to PNG format.
Plan 9 image format description:
https://plan9.io/magic/man2html/6/image
Where possible this tool will use unbuffered read() calls,
so that when finished the file offset is exactly at the end of
the image data.
This is useful for Plan9 subfont files which place font metric
data immediately after the image.
"""
# Test materials
# asset/left.bit is a Plan 9 image file, a leftwards facing Glenda.
# Other materials have to be scrounged from the internet.
# https://plan9.io/sources/plan9/sys/games/lib/sokoban/images/cargo.bit
import array
import collections
import io
# http://www.python.org/doc/2.3.5/lib/module-itertools.html
import itertools
import os
# http://www.python.org/doc/2.3.5/lib/module-re.html
import re
import struct
# http://www.python.org/doc/2.3.5/lib/module-sys.html
import sys
# https://docs.python.org/3/library/tarfile.html
import tarfile
# https://pypi.org/project/pypng/
import png
# internal
import prix
class Error(Exception):
"""Some sort of Plan 9 image error."""
def block(s, n):
return zip(*[iter(s)] * n)
def plan9_as_image(inp):
"""Represent a Plan 9 image file as a png.Image instance, so
that it can be written as a PNG file.
Works with compressed input files and may work with uncompressed files.
"""
# Use inp.raw if available.
# This avoids buffering and means that when the image is processed,
# the resulting input stream is cued up exactly at the end
# of the image.
inp = getattr(inp, "raw", inp)
info, blocks = plan9_open_image(inp)
rows, infodict = plan9_image_rows(blocks, info)
return png.Image(rows, infodict)
def plan9_open_image(inp):
"""Open a Plan9 image file (`inp` should be an already open
file object), and return (`info`, `blocks`) pair.
`info` should be a Plan9 5-tuple;
`blocks` is the input, and it should yield (`row`, `data`)
pairs (see :meth:`pixmeta`).
"""
r = inp.read(11)
if r == b"compressed\n":
info, blocks = decompress(inp)
else:
# Since Python 3, there is a good chance that this path
# doesn't work.
info, blocks = glue(inp, r)
return info, blocks
def glue(f, r):
"""Return (info, stream) pair, given `r` the initial portion of
the metadata that has already been read from the stream `f`.
"""
r = r + f.read(60 - len(r))
return (meta(r), f)
def meta(r):
"""Convert 60 byte bytestring `r`, the metadata from an image file.
Returns a 5-tuple (*chan*,*minx*,*miny*,*limx*,*limy*).
5-tuples may settle into lists in transit.
As per https://plan9.io/magic/man2html/6/image the metadata
comprises 5 words separated by blanks.
As it happens each word starts at an index that is a multiple of 12,
but this routine does not care about that.
"""
r = r.split()
# :todo: raise FormatError
if 5 != len(r):
raise Error("Expected 5 space-separated words in metadata")
r = [r[0]] + [int(x) for x in r[1:]]
return r
def bitdepthof(chan):
"""Return the bitdepth for a Plan9 pixel format string."""
maxd = 0
for c in re.findall(rb"[a-z]\d*", chan):
if c[0] != "x":
maxd = max(maxd, int(c[1:]))
return maxd
def maxvalof(chan):
"""Return the netpbm MAXVAL for a Plan9 pixel format string."""
bitdepth = bitdepthof(chan)
return (2 ** bitdepth) - 1
def plan9_image_rows(blocks, metadata):
"""
Convert (uncompressed) Plan 9 image file to pair of (*rows*, *info*).
This is intended to be used by PyPNG format.
*info* is the image info (metadata) returned in a dictionary,
*rows* is an iterator that yields each row in
boxed row flat pixel format.
`blocks`, should be an iterator of (`row`, `data`) pairs.
"""
chan, minx, miny, limx, limy = metadata
rows = limy - miny
width = limx - minx
nchans = len(re.findall(b"[a-wyz]", chan))
alpha = b"a" in chan
# Iverson's convention for the win!
ncolour = nchans - alpha
greyscale = ncolour == 1
bitdepth = bitdepthof(chan)
maxval = maxvalof(chan)
# PNG style info dict.
meta = dict(
size=(width, rows),
bitdepth=bitdepth,
greyscale=greyscale,
alpha=alpha,
planes=nchans,
)
arraycode = "BH"[bitdepth > 8]
return (
map(
lambda x: array.array(arraycode, itertools.chain(*x)),
block(unpack(blocks, rows, width, chan, maxval), width),
),
meta,
)
def unpack(f, rows, width, chan, maxval):
"""Unpack `f` into pixels.
`chan` describes the pixel format using
the Plan9 syntax ("k8", "r8g8b8", and so on).
Assumes the pixel format has a total channel bit depth
that is either a multiple or a divisor of 8
(the Plan9 image specification requires this).
`f` should be an iterator that returns blocks of input such that
each block contains a whole number of pixels.
The return value is an iterator that yields each pixel as an n-tuple.
"""
def mask(w):
"""An integer, to be used as a mask, with bottom `w` bits set to 1."""
return (1 << w) - 1
def deblock(f, depth, width):
"""A "packer" used to convert multiple bytes into single pixels.
`depth` is the pixel depth in bits (>= 8), `width` is the row width in
pixels.
"""
w = depth // 8
i = 0
for block in f:
for i in range(len(block) // w):
p = block[w * i : w * (i + 1)]
i += w
# Convert little-endian p to integer x
x = 0
s = 1 # scale
for j in p:
x += s * j
s <<= 8
yield x
def bitfunge(f, depth, width):
"""A "packer" used to convert single bytes into multiple pixels.
Depth is the pixel depth (< 8), width is the row width in pixels.
"""
assert 8 / depth == 8 // depth
for block in f:
col = 0
for x in block:
for j in range(8 // depth):
yield x >> (8 - depth)
col += 1
if col == width:
# A row-end forces a new byte even if
# we haven't consumed all of the current byte.
# Effectively rows are bit-padded to make
# a whole number of bytes.
col = 0
break
x <<= depth
# number of bits in each channel
bits = [int(d) for d in re.findall(rb"\d+", chan)]
# colr of each channel
# (r, g, b, k for actual colours, and
# a, m, x for alpha, map-index, and unused)
colr = re.findall(b"[a-z]", chan)
depth = sum(bits)
# Select a "packer" that either:
# - gathers multiple bytes into a single pixel (for depth >= 8); or,
# - splits bytes into several pixels (for depth < 8).
if depth >= 8:
assert depth % 8 == 0
packer = deblock
else:
assert 8 % depth == 0
packer = bitfunge
for x in packer(f, depth, width):
# x is the pixel as an unsigned integer
o = []
# This is a bit yucky.
# Extract each channel from the _most_ significant part of x.
for b, col in zip(bits, colr):
v = (x >> (depth - b)) & mask(b)
x <<= b
if col != "x":
# scale to maxval
v = v * float(maxval) / mask(b)
v = int(v + 0.5)
o.append(v)
yield o
def decompress(f):
"""Decompress a Plan 9 image file.
The input `f` should be a binary file object that
is already cued past the initial 'compressed\n' string.
The return result is (`info`, `blocks`);
`info` is a 5-tuple of the Plan 9 image metadata;
`blocks` is an iterator that yields a (row, data) pair
for each block of data.
"""
r = meta(f.read(60))
return r, decomprest(f, r[4])
def decomprest(f, rows):
"""Iterator that decompresses the rest of a file once the metadata
have been consumed."""
row = 0
while row < rows:
row, o = deblock(f)
yield o
def deblock(f):
"""Decompress a single block from a compressed Plan 9 image file.
Each block starts with 2 decimal strings of 12 bytes each.
Yields a sequence of (row, data) pairs where
`row` is the total number of rows processed
(according to the file format) and
`data` is the decompressed data for this block.
"""
row = int(f.read(12))
size = int(f.read(12))
if not (0 <= size <= 6000):
raise Error("block has invalid size; not a Plan 9 image file?")
# Since each block is at most 6000 bytes we may as well read it all in
# one go.
d = f.read(size)
i = 0
o = []
while i < size:
x = d[i]
i += 1
if x & 0x80:
x = (x & 0x7F) + 1
lit = d[i : i + x]
i += x
o.extend(lit)
continue
# x's high-order bit is 0
length = (x >> 2) + 3
# Offset is made from bottom 2 bits of x and 8 bits of next byte.
# MSByte LSByte
# +---------------------+-------------------------+
# | - - - - - - | x1 x0 | d7 d6 d5 d4 d3 d2 d1 d0 |
# +-----------------------------------------------+
# Had to discover by inspection which way round the bits go,
# because https://plan9.io/magic/man2html/6/image doesn't say.
# that x's 2 bits are most significant.
offset = (x & 3) << 8
offset |= d[i]
i += 1
# Note: complement operator neatly maps (0 to 1023) to (-1 to
# -1024). Adding len(o) gives a (non-negative) offset into o from
# which to start indexing.
offset = ~offset + len(o)
if offset < 0:
raise Error(
"byte offset indexes off the begininning of "
"the output buffer; not a Plan 9 image file?"
)
for j in range(length):
o.append(o[offset + j])
return row, bytes(o)
FontChar = collections.namedtuple("FontChar", "x top bottom left width")
def font_copy(inp, image, out, control):
"""
Convert a Plan 9 font (`inp`, `image`) to a series of PNG images,
and write them out as a tar file to the file object `out`.
Write a text control file out to the file object `control`.
Each valid glyph in the font becomes a single PNG image;
the output is a tar file of all the images.
A Plan 9 font consists of a Plan 9 image immediately
followed by font data.
The image for the font should be the `image` argument,
the file containing the rest of the font data should be the
file object `inp` which should be cued up to the start of
the font data that immediately follows the image.
https://plan9.io/magic/man2html/6/font
"""
# The format is a little unusual, and isn't completely
# clearly documented.
# Each 6-byte structure (see FontChar above) defines
# a rectangular region of the image that is used for each
# glyph.
# The source image region that is used may be strictly
# smaller than the rectangle for the target glyph.
# This seems like a micro-optimisation.
# For each glyph,
# rows above `top` and below `bottom` will not be copied
# from the source (they can be assumed to be blank).
# No space is saved in the source image, since the rows must
# be present.
# `x` is always non-decreasing, so the glyphs appear strictly
# left-to-image in the source image.
# The x of the next glyph is used to
# infer the width of the source rectangle.
# `top` and `bottom` give the y-coordinate of the top- and
# bottom- sides of the rectangle in both source and targets.
# `left` is the x-coordinate of the left-side of the
# rectangle in the target glyph. (equivalently, the amount
# of padding that should be added on the left).
# `width` is the advance-width of the glyph; by convention
# it is 0 for an undefined glyph.
name = getattr(inp, "name", "*subfont*name*not*supplied*")
header = inp.read(36)
n, height, ascent = [int(x) for x in header.split()]
print("baseline", name, ascent, file=control, sep=",")
chs = []
for i in range(n + 1):
bs = inp.read(6)
ch = FontChar(*struct.unpack("<HBBBB", bs))
chs.append(ch)
tar = tarfile.open(mode="w|", fileobj=out)
# Start at 0, increment for every image output
# (recall that not every input glyph has an output image)
output_index = 0
for i in range(n):
ch = chs[i]
if ch.width == 0:
continue
print("png", "index", output_index, "glyph", name, i, file=control, sep=",")
info = dict(image.info, size=(ch.width, height))
target = new_image(info)
source_width = chs[i + 1].x - ch.x
rect = ((ch.left, ch.top), (ch.left + source_width, ch.bottom))
image_draw(target, rect, image, (ch.x, ch.top))
# :todo: add source, glyph, and baseline data here (as a
# private tag?)
o = io.BytesIO()
target.write(o)
binary_size = o.tell()
o.seek(0)
tarinfo = tar.gettarinfo(arcname="%s/glyph%d.png" % (name, i), fileobj=inp)
tarinfo.size = binary_size
tar.addfile(tarinfo, fileobj=o)
output_index += 1
tar.close()
def new_image(info):
"""Return a fresh png.Image instance."""
width, height = info["size"]
vpr = width * info["planes"]
row = lambda: [0] * vpr
rows = [row() for _ in range(height)]
return png.Image(rows, info)
def image_draw(target, rect, source, point):
"""The point `point` in the source image is aligned with the
top-left of rect in the target image, and then the rectangle
in target is replaced with the pixels from `source`.
This routine assumes that both source and target can have
their rows objects indexed (not streamed).
"""
# :todo: there is no attempt to do clipping or channel or
# colour conversion. But maybe later?
if target.info["planes"] != source.info["planes"]:
raise NotImplementedError(
"source and target must have the same number of planes"
)
if target.info["bitdepth"] != source.info["bitdepth"]:
raise NotImplementedError("source and target must have the same bitdepth")
tl, br = rect
left, top = tl
right, bottom = br
height = bottom - top
planes = source.info["planes"]
vpr = (right - left) * planes
source_left, source_top = point
source_l = source_left * planes
source_r = source_l + vpr
target_l = left * planes
target_r = target_l + vpr
for y in range(height):
row = source.rows[y + source_top]
row = row[source_l:source_r]
target.rows[top + y][target_l:target_r] = row
def main(argv=None):
import argparse
parser = argparse.ArgumentParser(description="Convert Plan9 image to PNG")
parser.add_argument(
"input",
nargs="?",
default="-",
type=png.cli_open,
metavar="image",
help="image file in Plan 9 format",
)
parser.add_argument(
"--control",
default=os.path.devnull,
type=argparse.FileType("w"),
metavar="ControlCSV",
help="(when using --font) write a control CSV file to named file",
)
parser.add_argument(
"--font",
action="store_true",
help="process as Plan 9 subfont: output a tar file of PNGs",
)
args = parser.parse_args()
image = plan9_as_image(args.input)
image.stream()
if not args.font:
image.write(png.binary_stdout())
else:
font_copy(args.input, image, png.binary_stdout(), args.control)
if __name__ == "__main__":
sys.exit(main())

33
Backend/venv/bin/pripnglsch Executable file
View File

@@ -0,0 +1,33 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# pripnglsch
# PNG List Chunks
import png
def list_chunks(out, inp):
r = png.Reader(file=inp)
for t, v in r.chunks():
add = ""
if len(v) <= 28:
add = " " + v.hex()
else:
add = " " + v[:26].hex() + "..."
t = t.decode("ascii")
print("%s %10d%s" % (t, len(v), add), file=out)
def main(argv=None):
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
return list_chunks(sys.stdout, args.input)
if __name__ == "__main__":
main()

101
Backend/venv/bin/pripngtopam Executable file
View File

@@ -0,0 +1,101 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
import struct
import png
def write_pnm(file, plain, rows, meta):
"""
Write a Netpbm PNM (or PAM) file.
*file* output file object;
*plain* (a bool) true if writing plain format (not possible for PAM);
*rows* an iterator for the rows;
*meta* the info dictionary.
"""
meta = dict(meta)
meta["maxval"] = 2 ** meta["bitdepth"] - 1
meta["width"], meta["height"] = meta["size"]
# Number of planes determines both image formats:
# 1 : L to PGM
# 2 : LA to PAM
# 3 : RGB to PPM
# 4 : RGBA to PAM
planes = meta["planes"]
# Assume inputs are from a PNG file.
assert planes in (1, 2, 3, 4)
if planes in (1, 3):
if 1 == planes:
# PGM
# Even if maxval is 1 we use PGM instead of PBM,
# to avoid converting data.
magic = "P5"
if plain:
magic = "P2"
else:
# PPM
magic = "P6"
if plain:
magic = "P3"
header = "{magic} {width:d} {height:d} {maxval:d}\n".format(magic=magic, **meta)
if planes in (2, 4):
# PAM
# See http://netpbm.sourceforge.net/doc/pam.html
if plain:
raise Exception("PAM (%d-plane) does not support plain format" % planes)
if 2 == planes:
tupltype = "GRAYSCALE_ALPHA"
else:
tupltype = "RGB_ALPHA"
header = (
"P7\nWIDTH {width:d}\nHEIGHT {height:d}\n"
"DEPTH {planes:d}\nMAXVAL {maxval:d}\n"
"TUPLTYPE {tupltype}\nENDHDR\n".format(tupltype=tupltype, **meta)
)
file.write(header.encode("ascii"))
# Values per row
vpr = planes * meta["width"]
if plain:
for row in rows:
row_b = b" ".join([b"%d" % v for v in row])
file.write(row_b)
file.write(b"\n")
else:
# format for struct.pack
fmt = ">%d" % vpr
if meta["maxval"] > 0xFF:
fmt = fmt + "H"
else:
fmt = fmt + "B"
for row in rows:
file.write(struct.pack(fmt, *row))
file.flush()
def main(argv=None):
import argparse
parser = argparse.ArgumentParser(description="Convert PNG to PAM")
parser.add_argument("--plain", action="store_true")
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
# Encode PNG to PNM (or PAM)
image = png.Reader(file=args.input)
_, _, rows, info = image.asDirect()
write_pnm(png.binary_stdout(), args.plain, rows, info)
if __name__ == "__main__":
import sys
sys.exit(main())

71
Backend/venv/bin/prirowpng Executable file
View File

@@ -0,0 +1,71 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# http://www.python.org/doc/2.4.4/lib/module-itertools.html
import itertools
import sys
import png
Description = """Join PNG images in a row left-to-right."""
class FormatError(Exception):
"""
Some problem with the image format.
"""
def join_row(out, l):
"""
Concatenate the list of images.
All input images must be same height and
have the same number of channels.
They are concatenated left-to-right.
`out` is the (open file) destination for the output image.
`l` should be a list of open files (the input image files).
"""
l = [png.Reader(file=f) for f in l]
# Ewgh, side effects.
for r in l:
r.preamble()
# The reference height; from the first image.
height = l[0].height
# The total target width
width = 0
for i,r in enumerate(l):
if r.height != height:
raise FormatError('Image %d, height %d, does not match %d.' %
(i, r.height, height))
width += r.width
# Various bugs here because different numbers of channels and depths go wrong.
pixel, info = zip(*[r.asDirect()[2:4] for r in l])
tinfo = dict(info[0])
del tinfo['size']
w = png.Writer(width, height, **tinfo)
def iter_all_rows():
for row in zip(*pixel):
# `row` is a sequence that has one row from each input image.
# list() is required here to hasten the lazy row building;
# not sure if that's a bug in PyPNG or not.
yield list(itertools.chain(*row))
w.write(out, iter_all_rows())
def main(argv):
import argparse
parser = argparse.ArgumentParser(description=Description)
parser.add_argument(
"input", nargs="*", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
return join_row(png.binary_stdout(), args.input)
if __name__ == '__main__':
main(sys.argv)

215
Backend/venv/bin/priweavepng Executable file
View File

@@ -0,0 +1,215 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# priweavepng
# Weave selected channels from input PNG files into
# a multi-channel output PNG.
import collections
import re
from array import array
import png
"""
priweavepng file1.png [file2.png ...]
The `priweavepng` tool combines channels from the input images and
weaves a selection of those channels into an output image.
Conceptually an intermediate image is formed consisting of
all channels of all input images in the order given on the command line
and in the order of each channel in its image.
Then from 1 to 4 channels are selected and
an image is output with those channels.
The limit on the number of selected channels is
imposed by the PNG image format.
The `-c n` option selects channel `n`.
Further channels can be selected either by repeating the `-c` option,
or using a comma separated list.
For example `-c 3,2,1` will select channels 3, 2, and 1 in that order;
if the input is an RGB PNG, this will swop the Red and Blue channels.
The order is significant, the order in which the options are given is
the order of the output channels.
It is permissible, and sometimes useful
(for example, grey to colour expansion, see below),
to repeat the same channel.
If no `-c` option is used the default is
to select all of the input channels, up to the first 4.
`priweavepng` does not care about the meaning of the channels
and treats them as a matrix of values.
The numer of output channels determines the colour mode of the PNG file:
L (1-channel, Grey), LA (2-channel, Grey+Alpha),
RGB (3-channel, Red+Green+Blue), RGBA (4-channel, Red+Green+Blue+Alpha).
The `priweavepng` tool can be used for a variety of
channel building, swopping, and extraction effects:
Combine 3 grayscale images into RGB colour:
priweavepng grey1.png grey2.png grey3.png
Swop Red and Blue channels in colour image:
priweavepng -c 3 -c 2 -c 1 rgb.png
Extract Green channel as a greyscale image:
priweavepng -c 2 rgb.png
Convert a greyscale image to a colour image (all grey):
priweavepng -c 1 -c 1 -c 1 grey.png
Add alpha mask from a separate (greyscale) image:
priweavepng rgb.png grey.png
Extract alpha mask into a separate (greyscale) image:
priweavepng -c 4 rgba.png
Steal alpha mask from second file and add to first.
Note that the intermediate image in this example has 7 channels:
priweavepng -c 1 -c 2 -c 3 -c 7 rgb.png rgba.png
Take Green channel from 3 successive colour images to make a new RGB image:
priweavepng -c 2 -c 5 -c 8 rgb1.png rgb2.png rgb3.png
"""
Image = collections.namedtuple("Image", "rows info")
# For each channel in the intermediate raster,
# model:
# - image: the input image (0-based);
# - i: the channel index within that image (0-based);
# - bitdepth: the bitdepth of this channel.
Channel = collections.namedtuple("Channel", "image i bitdepth")
class Error(Exception):
pass
def weave(out, args):
"""Stack the input PNG files and extract channels
into a single output PNG.
"""
paths = args.input
if len(paths) < 1:
raise Error("Required input is missing.")
# List of Image instances
images = []
# Channel map. Maps from channel number (starting from 1)
# to an (image_index, channel_index) pair.
channel_map = dict()
channel = 1
for image_index, path in enumerate(paths):
inp = png.cli_open(path)
rows, info = png.Reader(file=inp).asDirect()[2:]
rows = list(rows)
image = Image(rows, info)
images.append(image)
# A later version of PyPNG may intelligently support
# PNG files with heterogenous bitdepths.
# For now, assumes bitdepth of all channels in image
# is the same.
channel_bitdepth = (image.info["bitdepth"],) * image.info["planes"]
for i in range(image.info["planes"]):
channel_map[channel + i] = Channel(image_index, i, channel_bitdepth[i])
channel += image.info["planes"]
assert channel - 1 == sum(image.info["planes"] for image in images)
# If no channels, select up to first 4 as default.
if not args.channel:
args.channel = range(1, channel)[:4]
out_channels = len(args.channel)
if not (0 < out_channels <= 4):
raise Error("Too many channels selected (must be 1 to 4)")
alpha = out_channels in (2, 4)
greyscale = out_channels in (1, 2)
bitdepth = tuple(image.info["bitdepth"] for image in images)
arraytype = "BH"[max(bitdepth) > 8]
size = [image.info["size"] for image in images]
# Currently, fail unless all images same size.
if len(set(size)) > 1:
raise NotImplementedError("Cannot cope when sizes differ - sorry!")
size = size[0]
# Values per row, of output image
vpr = out_channels * size[0]
def weave_row_iter():
"""
Yield each woven row in turn.
"""
# The zip call creates an iterator that yields
# a tuple with each element containing the next row
# for each of the input images.
for row_tuple in zip(*(image.rows for image in images)):
# output row
row = array(arraytype, [0] * vpr)
# for each output channel select correct input channel
for out_channel_i, selection in enumerate(args.channel):
channel = channel_map[selection]
# incoming row (make it an array)
irow = array(arraytype, row_tuple[channel.image])
n = images[channel.image].info["planes"]
row[out_channel_i::out_channels] = irow[channel.i :: n]
yield row
w = png.Writer(
size[0],
size[1],
greyscale=greyscale,
alpha=alpha,
bitdepth=bitdepth,
interlace=args.interlace,
)
w.write(out, weave_row_iter())
def comma_list(s):
"""
Type and return a list of integers.
"""
return [int(c) for c in re.findall(r"\d+", s)]
def main(argv=None):
import argparse
import itertools
import sys
if argv is None:
argv = sys.argv
argv = argv[1:]
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
"--channel",
action="append",
type=comma_list,
help="list of channels to extract",
)
parser.add_argument("--interlace", action="store_true", help="write interlaced PNG")
parser.add_argument("input", nargs="+")
args = parser.parse_args(argv)
if args.channel:
args.channel = list(itertools.chain(*args.channel))
return weave(png.binary_stdout(), args)
if __name__ == "__main__":
main()

7
Backend/venv/bin/qr Executable file
View File

@@ -0,0 +1,7 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
import sys
from qrcode.console_scripts import main
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(main())

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
# Copyright (C) AB Strakt
# See LICENSE for details.
"""
pyOpenSSL - A simple wrapper around the OpenSSL library
"""
from OpenSSL import SSL, crypto
from OpenSSL.version import (
__author__,
__copyright__,
__email__,
__license__,
__summary__,
__title__,
__uri__,
__version__,
)
__all__ = [
"SSL",
"__author__",
"__copyright__",
"__email__",
"__license__",
"__summary__",
"__title__",
"__uri__",
"__version__",
"crypto",
]

View File

@@ -0,0 +1,129 @@
from __future__ import annotations
import os
import sys
import warnings
from typing import Any, Callable, NoReturn, Union
from cryptography.hazmat.bindings.openssl.binding import Binding
if sys.version_info >= (3, 9):
StrOrBytesPath = Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]
else:
StrOrBytesPath = Union[str, bytes, os.PathLike]
binding = Binding()
ffi = binding.ffi
lib: Any = binding.lib
# This is a special CFFI allocator that does not bother to zero its memory
# after allocation. This has vastly better performance on large allocations and
# so should be used whenever we don't need the memory zeroed out.
no_zero_allocator = ffi.new_allocator(should_clear_after_alloc=False)
def text(charp: Any) -> str:
"""
Get a native string type representing of the given CFFI ``char*`` object.
:param charp: A C-style string represented using CFFI.
:return: :class:`str`
"""
if not charp:
return ""
return ffi.string(charp).decode("utf-8")
def exception_from_error_queue(exception_type: type[Exception]) -> NoReturn:
"""
Convert an OpenSSL library failure into a Python exception.
When a call to the native OpenSSL library fails, this is usually signalled
by the return value, and an error code is stored in an error queue
associated with the current thread. The err library provides functions to
obtain these error codes and textual error messages.
"""
errors = []
while True:
error = lib.ERR_get_error()
if error == 0:
break
errors.append(
(
text(lib.ERR_lib_error_string(error)),
text(lib.ERR_func_error_string(error)),
text(lib.ERR_reason_error_string(error)),
)
)
raise exception_type(errors)
def make_assert(error: type[Exception]) -> Callable[[bool], Any]:
"""
Create an assert function that uses :func:`exception_from_error_queue` to
raise an exception wrapped by *error*.
"""
def openssl_assert(ok: bool) -> None:
"""
If *ok* is not True, retrieve the error from OpenSSL and raise it.
"""
if ok is not True:
exception_from_error_queue(error)
return openssl_assert
def path_bytes(s: StrOrBytesPath) -> bytes:
"""
Convert a Python path to a :py:class:`bytes` for the path which can be
passed into an OpenSSL API accepting a filename.
:param s: A path (valid for os.fspath).
:return: An instance of :py:class:`bytes`.
"""
b = os.fspath(s)
if isinstance(b, str):
return b.encode(sys.getfilesystemencoding())
else:
return b
def byte_string(s: str) -> bytes:
return s.encode("charmap")
# A marker object to observe whether some optional arguments are passed any
# value or not.
UNSPECIFIED = object()
_TEXT_WARNING = "str for {0} is no longer accepted, use bytes"
def text_to_bytes_and_warn(label: str, obj: Any) -> Any:
"""
If ``obj`` is text, emit a warning that it should be bytes instead and try
to convert it to bytes automatically.
:param str label: The name of the parameter from which ``obj`` was taken
(so a developer can easily find the source of the problem and correct
it).
:return: If ``obj`` is the text string type, a ``bytes`` object giving the
UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is
returned.
"""
if isinstance(obj, str):
warnings.warn(
_TEXT_WARNING.format(label),
category=DeprecationWarning,
stacklevel=3,
)
return obj.encode("utf-8")
return obj

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,40 @@
import ssl
import sys
import cffi
import cryptography
import OpenSSL.SSL
from . import version
_env_info = """\
pyOpenSSL: {pyopenssl}
cryptography: {cryptography}
cffi: {cffi}
cryptography's compiled against OpenSSL: {crypto_openssl_compile}
cryptography's linked OpenSSL: {crypto_openssl_link}
Python's OpenSSL: {python_openssl}
Python executable: {python}
Python version: {python_version}
Platform: {platform}
sys.path: {sys_path}""".format(
pyopenssl=version.__version__,
crypto_openssl_compile=OpenSSL._util.ffi.string(
OpenSSL._util.lib.OPENSSL_VERSION_TEXT,
).decode("ascii"),
crypto_openssl_link=OpenSSL.SSL.SSLeay_version(
OpenSSL.SSL.SSLEAY_VERSION
).decode("ascii"),
python_openssl=getattr(ssl, "OPENSSL_VERSION", "n/a"),
cryptography=cryptography.__version__,
cffi=cffi.__version__,
python=sys.executable,
python_version=sys.version,
platform=sys.platform,
sys_path=sys.path,
)
if __name__ == "__main__":
print(_env_info)

View File

@@ -0,0 +1,50 @@
"""
PRNG management routines, thin wrappers.
"""
from __future__ import annotations
import warnings
from OpenSSL._util import lib as _lib
warnings.warn(
"OpenSSL.rand is deprecated - you should use os.urandom instead",
DeprecationWarning,
stacklevel=3,
)
def add(buffer: bytes, entropy: int) -> None:
"""
Mix bytes from *string* into the PRNG state.
The *entropy* argument is (the lower bound of) an estimate of how much
randomness is contained in *string*, measured in bytes.
For more information, see e.g. :rfc:`1750`.
This function is only relevant if you are forking Python processes and
need to reseed the CSPRNG after fork.
:param buffer: Buffer with random data.
:param entropy: The entropy (in bytes) measurement of the buffer.
:return: :obj:`None`
"""
if not isinstance(buffer, bytes):
raise TypeError("buffer must be a byte string")
if not isinstance(entropy, int):
raise TypeError("entropy must be an integer")
_lib.RAND_add(buffer, len(buffer), entropy)
def status() -> int:
"""
Check whether the PRNG has been seeded with enough data.
:return: 1 if the PRNG is seeded enough, 0 otherwise.
"""
return _lib.RAND_status()

View File

@@ -0,0 +1,28 @@
# Copyright (C) AB Strakt
# Copyright (C) Jean-Paul Calderone
# See LICENSE for details.
"""
pyOpenSSL - A simple wrapper around the OpenSSL library
"""
__all__ = [
"__author__",
"__copyright__",
"__email__",
"__license__",
"__summary__",
"__title__",
"__uri__",
"__version__",
]
__version__ = "25.3.0"
__title__ = "pyOpenSSL"
__uri__ = "https://pyopenssl.org/"
__summary__ = "Python wrapper module around the OpenSSL library"
__author__ = "The pyOpenSSL developers"
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = f"Copyright 2001-2025 {__author__}"

View File

@@ -1,173 +0,0 @@
cryptography-41.0.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cryptography-41.0.7.dist-info/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197
cryptography-41.0.7.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
cryptography-41.0.7.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
cryptography-41.0.7.dist-info/METADATA,sha256=h4C2cL9sbR7ObF6jD7hUT7xOfSvzZBli6AmX-vngctA,5159
cryptography-41.0.7.dist-info/RECORD,,
cryptography-41.0.7.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography-41.0.7.dist-info/WHEEL,sha256=Bnup3_Y_tMShHsCuO2E9NdrjRJkTtSD1dYVt3WSGhpU,112
cryptography-41.0.7.dist-info/top_level.txt,sha256=KNaT-Sn2K4uxNaEbe6mYdDn3qWDMlp4y-MtWfB73nJc,13
cryptography/__about__.py,sha256=uPXMbbcptt7EzZ_jllGRx0pVdMn-NBsAM4L74hOv-b0,445
cryptography/__init__.py,sha256=iVPlBlXWTJyiFeRedxcbMPhyHB34viOM10d72vGnWuE,364
cryptography/__pycache__/__about__.cpython-312.pyc,,
cryptography/__pycache__/__init__.cpython-312.pyc,,
cryptography/__pycache__/exceptions.cpython-312.pyc,,
cryptography/__pycache__/fernet.cpython-312.pyc,,
cryptography/__pycache__/utils.cpython-312.pyc,,
cryptography/exceptions.py,sha256=EHe7XM2_OtdOM1bZE0ci-4GUhtOlEQ6fQXhK2Igf0qA,1118
cryptography/fernet.py,sha256=TVZy4Dtkpl7kWIpvuKcNldE95IEjTQ0MfHgRsLdnDSM,6886
cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455
cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,,
cryptography/hazmat/_oid.py,sha256=gxhMHKpu9Xsi6uHCGZ_-soYMXj_izOIFaxjUKWbCPeE,14441
cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361
cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305
cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/aead.py,sha256=s3zXcVQf0COIOuOzI8usebWpznGnyZ7GhnmlJYu7QXA,15967
cryptography/hazmat/backends/openssl/backend.py,sha256=491FCrjeOG7S9bXskUosirXFP84ntwAQ-U0BxcibtqM,73321
cryptography/hazmat/backends/openssl/ciphers.py,sha256=lxWrvnufudsDI2bpwNs2c8XLILbAE2j2rMSD1nhnPVg,10358
cryptography/hazmat/backends/openssl/cmac.py,sha256=pHgQOIRfR4cIDa5ltcKFtgjqPTXbOLyRQmmqv9JlbUk,3035
cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=kz6gys8wuJhrx4QyU6enYx7UatNHr0LB3TI1jH3oQ54,1148
cryptography/hazmat/backends/openssl/ec.py,sha256=GKzh3mZKvgsM1jqM88-4XikHHalpV-Efyskclt8yxYg,11474
cryptography/hazmat/backends/openssl/rsa.py,sha256=P_ak-2zvA6VBt_P0ldzTSCUkcjo2GhYt_HLn8CVvWtE,21825
cryptography/hazmat/backends/openssl/utils.py,sha256=UoguO26QzwN4lsMAltsIrgAlbi3SOeSrexZs1-QPNu8,2190
cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/bindings/_rust.abi3.so,sha256=qkbrd72TN7vk0ivAz_VE-ZefNyDxCLwLSiAzhgMF8-Q,13787648
cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=IumK7zP9Ko3HjLLb5hwZiY2rbfmfsuyTZLLcHOMvSdk,981
cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230
cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=9CyI-grOsLQB_hfnhJPoG9dNOdJ7Zg6B0iUpzCowh44,592
cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640
cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=RzVaLkY0y9L8W8opAL_uVD8bySKxP23pSQtEbLOStXI,905
cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=j764U4RRBZbDuOfjQxRqU7rCf74kgM-3AnTIjLdRy3E,970
cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=0FVY1t5qM9HV_ZKDIcdJI2a72i1fHKyTvYIJb5UnH4M,896
cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=43in4PCsm2kz_H7RQFLBKqhDsUmb4yWop6dpYeVDg-4,764
cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=E2GXAgibfRGqKxskH8MfZI8gHFoMJJOTjG7Elg2gOww,629
cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=pk_kx5Biq8O53d2joOT-cXuwCrbFPicV7iaqYdeiIAI,603
cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=J8HoN0GdtPcjRAfNHr5Elva_nkmQfq63L75_z9dd8Uc,573
cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=ZmLJ73pmxcZFC1XosWEiXMRYtvJJor3ZLdCQOJu85Cw,662
cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=wPS5c7NLspM2632II0I4iH1RSxZvSRtBOVqmpyQATfk,544
cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=9iogF7Q4i81IkOS-IMXp6HvxFF_3cNy_ucrAjVQnn14,540
cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=-1F5QDZfrdhmDLKTeSERuuDUHBTV-EhxIYk9mjpwcG4,616
cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=SdL4blscYBEvuWY4SuNAY1s5zFaGj38eQ-bulVBZvFg,590
cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=VkTC78wjJgb_qrboOYIFPuFZ3W46zsr6zsxnlrOMwao,460
cryptography/hazmat/bindings/_rust/x509.pyi,sha256=j6AbXBZSXeJHLSrXnaapbiPfle-znfk9uJUa_zqxgy4,1878
cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DeECq7AKguhs390ZmxgItdqPLzyrKGJk-3KlHJMkXoY,9098
cryptography/hazmat/bindings/openssl/binding.py,sha256=0x3kzvq2grHu4gbbgEIzEVrX6unp71EEs1hx0o-uuOM,6696
cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,,
cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532
cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=7LPkpw-DrgyvmBMUjvXeBvojVZPtXhFgfelUftnxPGw,1093
cryptography/hazmat/primitives/_serialization.py,sha256=U0DU0ZzOLJppCQsh9EJH6vGYoHotBolfNyRyx3wr1l0,5216
cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/dh.py,sha256=XsthqjvExWWOyePs0PxT4MestU9QeGuL-Hx7fWzTguQ,7013
cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=aaTY7EMLTzaWs-jhOMpMAfa2GnfhoqsCKZPKAs35L40,8263
cryptography/hazmat/primitives/asymmetric/ec.py,sha256=L1WoWPYevJ6Pk2T1etbnHbvr6AeXFccckPNNiyUVoNM,12867
cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=wl2NCCP4bZdUCqZGMkOOd6eaxjU1vXPAIwzUuFPE__w,3489
cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=2MCJ87qcyCCsjj0OvrfWFxPX8CgaC3d0mr78bt_vDIY,3440
cryptography/hazmat/primitives/asymmetric/padding.py,sha256=6p8Ojiax_2tcm1aTnNOAkinriCJ67nSTxugg34f-hzk,2717
cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=vxvOryF00WL8mZQv9bs_-LlgobYLiPYfX246_j_ICtA,11623
cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996
cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790
cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=8YJAIaU7w09jTnPU_cLwd98fMHIECgfA3R7P3Ktv-CA,3437
cryptography/hazmat/primitives/asymmetric/x448.py,sha256=y-Yj-rgciiuH1g6FJLZftvAqgOnzT1on9gCisru7vBc,3358
cryptography/hazmat/primitives/ciphers/__init__.py,sha256=kAyb9NSczqTrCWj0HEoVp3Cxo7AHW8ibPFQz-ZHsOtA,680
cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/aead.py,sha256=DY7qKmbt0bgB1GB7i-fQrbjEfwFG8wfUfVHvc7DA2YY,12067
cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=SCDskXc9xyzsz0NjND6tAX8t17jYTbUB2sww1ub9GuY,5000
cryptography/hazmat/primitives/ciphers/base.py,sha256=PqNDltHdDxBhLhgtfO707H07sSOLA6ZVwjZlalOJTAo,8286
cryptography/hazmat/primitives/ciphers/modes.py,sha256=YJQXi4PJGIIZ1rgchbMH47Ed-YiUcUSjLPEOuV8rgGE,8361
cryptography/hazmat/primitives/cmac.py,sha256=YaeWksCYaqVoqf9zHRThAJ95ZvPUioAOfXwZUWiPzD8,2065
cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422
cryptography/hazmat/primitives/hashes.py,sha256=VJpnbK2sQN2bEqwRTOoCB4nuxYx5CnqFiScMJNyhsrI,5115
cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423
cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750
cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=wGYWgILmxQWnCPkbAH1RpsCHrdKgmYrCEVrCvXVGCo8,3726
cryptography/hazmat/primitives/kdf/hkdf.py,sha256=bBYr1yUIbOlJIEd6ZoLYcXm_yd-H54An9kNcFIJ3kbo,3045
cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=qPL6TmDUmkus6CW3ylTJfG8N8egZhjQOyXrSyLLpnak,9232
cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1CCH9Q5gXUpnZd3c8d8bCXgpJ3s2hZZGBnuG7FH1waM,2012
cryptography/hazmat/primitives/kdf/scrypt.py,sha256=4QONhjxA_ZtuQtQ7QV3FnbB8ftrFnM52B4HPfV7hFys,2354
cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=S3B4Enk2Yxj9txpairotaXkavuZqQ6t6MB5a28U02ek,2002
cryptography/hazmat/primitives/keywrap.py,sha256=Qb_N2V_E1Dti5VtDXnrtTYtJDZ8aMpur8BY5yxrXclg,5678
cryptography/hazmat/primitives/padding.py,sha256=8pCeLaqwQPSGf51j06U5C_INvgYWVWPv3m9mxUERGmU,6242
cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355
cryptography/hazmat/primitives/serialization/__init__.py,sha256=6ZlL3EicEzoGdMOat86w8y_XICCnlHdCjFI97rMxRDg,1653
cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/base.py,sha256=VZjIIqnbb-x38qpg2Wf_IxZvqjsgcEzNQtQoeJiQfpw,1986
cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=NOzFxArlZhdjfgfugs8nERho1eyaxujXKGUKINchek4,6767
cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=BCvlPubXQOunb76emISK89PX9qXcBQI2CRPNe85VTZk,7392
cryptography/hazmat/primitives/serialization/ssh.py,sha256=aLCYLPY3W1kerfCwadn5aYNzwcwIQl9c7RcsB8CKfuc,51027
cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258
cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/hotp.py,sha256=uZ0PSKYDZOL0aAobiw1Zd2HD0W2Ei1niUNC2v7Tnpc8,3010
cryptography/hazmat/primitives/twofactor/totp.py,sha256=cMbWlAapOM1SfezEx9MoMHpCW9ingNXCg6OsGv4T8jc,1473
cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography/utils.py,sha256=DfdXc9M4kmAboE2a0pPiISt5LVnW-jhhXURy8nDHae0,4018
cryptography/x509/__init__.py,sha256=DzZE8bR-3iiVi3Wrcq7-g5Pm64fCr5aqsTNyi_rjJu0,7870
cryptography/x509/__pycache__/__init__.cpython-312.pyc,,
cryptography/x509/__pycache__/base.cpython-312.pyc,,
cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,,
cryptography/x509/__pycache__/extensions.cpython-312.pyc,,
cryptography/x509/__pycache__/general_name.cpython-312.pyc,,
cryptography/x509/__pycache__/name.cpython-312.pyc,,
cryptography/x509/__pycache__/ocsp.cpython-312.pyc,,
cryptography/x509/__pycache__/oid.cpython-312.pyc,,
cryptography/x509/base.py,sha256=FbS6EFE3uJ3O-zbFPRjsO6DckrNSN5TJNZMJcnzUWFQ,35677
cryptography/x509/certificate_transparency.py,sha256=6HvzAD0dlSQVxy6tnDhGj0-pisp1MaJ9bxQNRr92inI,2261
cryptography/x509/extensions.py,sha256=rFEcfZiFvcONs1ot03d68dAMK2U75w0s3g9mhyWBRcI,68365
cryptography/x509/general_name.py,sha256=zm8GxNgVJuLD6rN488c5zdHhxp5gUxeRzw8enZMWDQ0,7868
cryptography/x509/name.py,sha256=aZ2dpsinhkza3eTxT1vNmWuFMQ7fmcA0hs4npgnkf9Q,14855
cryptography/x509/ocsp.py,sha256=48iW7xbZ9mZLELSEl7Wwjb4vYhOQ3KcNtqgKsAb_UD0,18534
cryptography/x509/oid.py,sha256=fFosjGsnIB_w_0YrzZv1ggkSVwZl7xmY0zofKZNZkDA,829

View File

@@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: false
Tag: cp37-abi3-manylinux_2_28_x86_64

View File

@@ -1,18 +1,8 @@
Metadata-Version: 2.1
Metadata-Version: 2.4
Name: cryptography
Version: 41.0.7
Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
Author-email: The Python Cryptographic Authority and individual contributors <cryptography-dev@python.org>
License: Apache-2.0 OR BSD-3-Clause
Project-URL: homepage, https://github.com/pyca/cryptography
Project-URL: documentation, https://cryptography.io/
Project-URL: source, https://github.com/pyca/cryptography/
Project-URL: issues, https://github.com/pyca/cryptography/issues
Project-URL: changelog, https://cryptography.io/en/latest/changelog/
Version: 46.0.3
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: License :: OSI Approved :: BSD License
Classifier: Natural Language :: English
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: POSIX
@@ -22,46 +12,62 @@ Classifier: Operating System :: Microsoft :: Windows
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: 3.14
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Programming Language :: Python :: Free Threading :: 3 - Stable
Classifier: Topic :: Security :: Cryptography
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
Requires-Dist: cffi>=1.14 ; python_full_version == '3.8.*' and platform_python_implementation != 'PyPy'
Requires-Dist: cffi>=2.0.0 ; python_full_version >= '3.9' and platform_python_implementation != 'PyPy'
Requires-Dist: typing-extensions>=4.13.2 ; python_full_version < '3.11'
Requires-Dist: bcrypt>=3.1.5 ; extra == 'ssh'
Requires-Dist: nox[uv]>=2024.4.15 ; extra == 'nox'
Requires-Dist: cryptography-vectors==46.0.3 ; extra == 'test'
Requires-Dist: pytest>=7.4.0 ; extra == 'test'
Requires-Dist: pytest-benchmark>=4.0 ; extra == 'test'
Requires-Dist: pytest-cov>=2.10.1 ; extra == 'test'
Requires-Dist: pytest-xdist>=3.5.0 ; extra == 'test'
Requires-Dist: pretend>=0.7 ; extra == 'test'
Requires-Dist: certifi>=2024 ; extra == 'test'
Requires-Dist: pytest-randomly ; extra == 'test-randomorder'
Requires-Dist: sphinx>=5.3.0 ; extra == 'docs'
Requires-Dist: sphinx-rtd-theme>=3.0.0 ; extra == 'docs'
Requires-Dist: sphinx-inline-tabs ; extra == 'docs'
Requires-Dist: pyenchant>=3 ; extra == 'docstest'
Requires-Dist: readme-renderer>=30.0 ; extra == 'docstest'
Requires-Dist: sphinxcontrib-spelling>=7.3.1 ; extra == 'docstest'
Requires-Dist: build>=1.0.0 ; extra == 'sdist'
Requires-Dist: ruff>=0.11.11 ; extra == 'pep8test'
Requires-Dist: mypy>=1.14 ; extra == 'pep8test'
Requires-Dist: check-sdist ; extra == 'pep8test'
Requires-Dist: click>=8.0.1 ; extra == 'pep8test'
Provides-Extra: ssh
Provides-Extra: nox
Provides-Extra: test
Provides-Extra: test-randomorder
Provides-Extra: docs
Provides-Extra: docstest
Provides-Extra: sdist
Provides-Extra: pep8test
License-File: LICENSE
License-File: LICENSE.APACHE
License-File: LICENSE.BSD
Requires-Dist: cffi >=1.12
Provides-Extra: docs
Requires-Dist: sphinx >=5.3.0 ; extra == 'docs'
Requires-Dist: sphinx-rtd-theme >=1.1.1 ; extra == 'docs'
Provides-Extra: docstest
Requires-Dist: pyenchant >=1.6.11 ; extra == 'docstest'
Requires-Dist: twine >=1.12.0 ; extra == 'docstest'
Requires-Dist: sphinxcontrib-spelling >=4.0.1 ; extra == 'docstest'
Provides-Extra: nox
Requires-Dist: nox ; extra == 'nox'
Provides-Extra: pep8test
Requires-Dist: black ; extra == 'pep8test'
Requires-Dist: ruff ; extra == 'pep8test'
Requires-Dist: mypy ; extra == 'pep8test'
Requires-Dist: check-sdist ; extra == 'pep8test'
Provides-Extra: sdist
Requires-Dist: build ; extra == 'sdist'
Provides-Extra: ssh
Requires-Dist: bcrypt >=3.1.5 ; extra == 'ssh'
Provides-Extra: test
Requires-Dist: pytest >=6.2.0 ; extra == 'test'
Requires-Dist: pytest-benchmark ; extra == 'test'
Requires-Dist: pytest-cov ; extra == 'test'
Requires-Dist: pytest-xdist ; extra == 'test'
Requires-Dist: pretend ; extra == 'test'
Provides-Extra: test-randomorder
Requires-Dist: pytest-randomly ; extra == 'test-randomorder'
Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
Author-email: The Python Cryptographic Authority and individual contributors <cryptography-dev@python.org>
License-Expression: Apache-2.0 OR BSD-3-Clause
Requires-Python: >=3.8, !=3.9.0, !=3.9.1
Description-Content-Type: text/x-rst; charset=UTF-8
Project-URL: homepage, https://github.com/pyca/cryptography
Project-URL: documentation, https://cryptography.io/
Project-URL: source, https://github.com/pyca/cryptography/
Project-URL: issues, https://github.com/pyca/cryptography/issues
Project-URL: changelog, https://cryptography.io/en/latest/changelog/
pyca/cryptography
=================
@@ -74,13 +80,12 @@ pyca/cryptography
:target: https://cryptography.io
:alt: Latest Docs
.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main
:target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain
.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg
:target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain
``cryptography`` is a package which provides cryptographic recipes and
primitives to Python developers. Our goal is for it to be your "cryptographic
standard library". It supports Python 3.7+ and PyPy3 7.3.10+.
standard library". It supports Python 3.8+ and PyPy3 7.3.11+.
``cryptography`` includes both high level recipes and low level interfaces to
common cryptographic algorithms such as symmetric ciphers, message digests, and
@@ -131,3 +136,4 @@ documentation.
.. _`issue tracker`: https://github.com/pyca/cryptography/issues
.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
.. _`security reporting`: https://cryptography.io/en/latest/security/

View File

@@ -0,0 +1,180 @@
cryptography-46.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cryptography-46.0.3.dist-info/METADATA,sha256=bx2LyCEmOVUC8FH5hsGEZewWPiZoIIYTq0hM9mu9r4s,5748
cryptography-46.0.3.dist-info/RECORD,,
cryptography-46.0.3.dist-info/WHEEL,sha256=jkxrJemT4jZpYSr-u9xPalWqoow8benNmiXfjKXLlJw,108
cryptography-46.0.3.dist-info/licenses/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197
cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
cryptography-46.0.3.dist-info/licenses/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
cryptography/__about__.py,sha256=QCLxNH_Abbygdc9RQGpUmrK14Wp3Cl_SEiB2byLwyxo,445
cryptography/__init__.py,sha256=mthuUrTd4FROCpUYrTIqhjz6s6T9djAZrV7nZ1oMm2o,364
cryptography/__pycache__/__about__.cpython-312.pyc,,
cryptography/__pycache__/__init__.cpython-312.pyc,,
cryptography/__pycache__/exceptions.cpython-312.pyc,,
cryptography/__pycache__/fernet.cpython-312.pyc,,
cryptography/__pycache__/utils.cpython-312.pyc,,
cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087
cryptography/fernet.py,sha256=3Cvxkh0KJSbX8HbnCHu4wfCW7U0GgfUA3v_qQ8a8iWc,6963
cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455
cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,,
cryptography/hazmat/_oid.py,sha256=p8ThjwJB56Ci_rAIrjyJ1f8VjgD6e39es2dh8JIUBOw,17240
cryptography/hazmat/asn1/__init__.py,sha256=hS_EWx3wVvZzfbCcNV8hzcDnyMM8H-BhIoS1TipUosk,293
cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc,,
cryptography/hazmat/asn1/asn1.py,sha256=eMEThEXa19LQjcyVofgHsW6tsZnjp3ddH7bWkkcxfLM,3860
cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361
cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305
cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/backend.py,sha256=tV5AxBoFJ2GfA0DMWSY-0TxQJrpQoexzI9R4Kybb--4,10215
cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/bindings/_rust.abi3.so,sha256=4bUN0J2p_ZQMdgmAc9eL0VMj_lgbTsHUmX4doekVIJ4,12955672
cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=KhqLhXFPArPzzJ7DYO9Fl8FoXB_BagAd_r4Dm_Ze9Xo,1257
cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230
cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354
cryptography/hazmat/bindings/_rust/declarative_asn1.pyi,sha256=2ECFmYue1EPkHEE2Bm7aLwkjB0mSUTpr23v9MN4pri4,892
cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640
cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=VPVWuKHI9EMs09ZLRYAGvR0Iz0mCMmEzXAkgJHovpoM,4020
cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=iOAMDyHoNwwCSZfZzuXDr64g4GpGUeDgEN-LjXqdrBM,1522
cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=4Nddw6-ynzIB3w2W86WvkGKTLlTDk_6F5l54RHCuy3E,2688
cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=LhPzHWSXJq4grAJXn6zSvSSdV-aYIIscHDwIPlJGGPs,1315
cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564
cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564
cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299
cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691
cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=VXfXd5G6hUivg399R1DYdmW3eTb0EebzDTqjRC2gaRw,532
cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=Yx49lqdnjsD7bxiDV1kcaMrDktug5evi5a6zerMiy2s,514
cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=OWZvBx7xfo_HJl41Nc--DugVyCVPIprZ3HlOPTSWH9g,984
cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=BXZn7NDjL3JAbYW0SQ8pg1iyC5DbQXVhUAiwsi8DFR8,702
cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=xXfFBb9QehHfDtEaxV_65Z0YK7NquOVIChpTLkgAs_k,2029
cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=teIt8M6ZEMJrn4s3W0UnW0DZ-30Jd68WnSsKKG124l0,912
cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=_SW9NtQ5FDlAbdclFtWpT4lGmxKIKHpN-4j8J2BzYfQ,585
cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364
cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=ewn4GpQyb7zPwE-ni7GtyQgMC0A1mLuqYsSyqv6nI_s,523
cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=juTZTmli8jO_5Vcufg-vHvx_tCyezmSLIh_9PU3TczI,505
cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=vEEd5wDiZvb8ZGFaziLCaWLzAwoG_tvPUxLQw5_uOl8,1605
cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=txGBJijqZshEcqra6byPNbnisIdlxzOSIHP2hl9arPs,1601
cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=PPhld-WkO743iXFPebeG0LtgK0aTzGdjcIsay1Gm5GE,757
cryptography/hazmat/bindings/_rust/x509.pyi,sha256=n9X0IQ6ICbdIi-ExdCFZoBgeY6njm3QOVAVZwDQdnbk,9784
cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DMOpA_XN4l70zTc5_J9DpwlbQeUBRTWpfIJ4yRIn1-U,5791
cryptography/hazmat/bindings/openssl/binding.py,sha256=x8eocEmukO4cm7cHqfVmOoYY7CCXdoF1v1WhZQt9neo,4610
cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216
cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216
cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc,,
cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=YrKgHS4MfwWaMmPBYRymRRlC0phwWp9ycICFezeJPGk,2595
cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,,
cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532
cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=Eh3i7lwedHfi0eLSsH93PZxQKzY9I6lkK67vL4V5tOc,1522
cryptography/hazmat/primitives/_serialization.py,sha256=chgPCSF2jxI2Cr5gB-qbWXOvOfupBh4CARS0KAhv9AM,5123
cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/dh.py,sha256=0v_vEFFz5pQ1QG-FkWDyvgv7IfuVZSH5Q6LyFI5A8rg,3645
cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=Ld_bbbqQFz12dObHxIkzEQzX0SWWP41RLSWkYSaKhqE,4213
cryptography/hazmat/primitives/asymmetric/ec.py,sha256=Vf5ig2PcS3PVnsb5N49Kx1uIkFBJyhg4BWXThDz5cug,12999
cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=jZW5cs472wXXV3eB0sE1b8w64gdazwwU0_MT5UOTiXs,3700
cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=yAetgn2f2JYf0BO8MapGzXeThsvSMG5LmUCrxVOidAA,3729
cryptography/hazmat/primitives/asymmetric/padding.py,sha256=vQ6l6gOg9HqcbOsvHrSiJRVLdEj9L4m4HkRGYziTyFA,2854
cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=ZnKOo2f34MCCOupC03Y1uR-_jiSG5IrelHEmxaME3D4,8303
cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996
cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790
cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=_4nQeZ3yJ3Lg0RpXnaqA-1yt6vbx1F-wzLcaZHwSpeE,3613
cryptography/hazmat/primitives/asymmetric/x448.py,sha256=WKBLtuVfJqiBRro654fGaQAlvsKbqbNkK7c4A_ZCdV0,3642
cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680
cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634
cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=Q7ZJwcsx83Mgxv5y7r6CyJKSdsOwC-my-5A67-ma2vw,3407
cryptography/hazmat/primitives/ciphers/base.py,sha256=aBC7HHBBoixebmparVr0UlODs3VD0A7B6oz_AaRjDv8,4253
cryptography/hazmat/primitives/ciphers/modes.py,sha256=20stpwhDtbAvpH0SMf9EDHIciwmTF-JMBUOZ9bU8WiQ,8318
cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338
cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422
cryptography/hazmat/primitives/hashes.py,sha256=M8BrlKB3U6DEtHvWTV5VRjpteHv1kS3Zxm_Bsk04cr8,5184
cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423
cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750
cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/argon2.py,sha256=UFDNXG0v-rw3DqAQTB1UQAsQC2M5Ejg0k_6OCyhLKus,460
cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=Ua8KoLXXnzgsrAUmHpyKymaPt8aPRP0EHEaBz7QCQ9I,3737
cryptography/hazmat/primitives/kdf/hkdf.py,sha256=M0lAEfRoc4kpp4-nwDj9yB-vNZukIOYEQrUlWsBNn9o,543
cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=oZepvo4evhKkkJQWRDwaPoIbyTaFmDc5NPimxg6lfKg,9165
cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1WIwhELR0w8ztTpTu8BrFiYWmK3hUfJq08I79TxwieE,1957
cryptography/hazmat/primitives/kdf/scrypt.py,sha256=XyWUdUUmhuI9V6TqAPOvujCSMGv1XQdg0a21IWCmO-U,590
cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=zLTcF665QFvXX2f8TS7fmBZTteXpFjKahzfjjQcCJyw,1999
cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650
cryptography/hazmat/primitives/padding.py,sha256=QT-U-NvV2eQGO1wVPbDiNGNSc9keRDS-ig5cQOrLz0E,1865
cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355
cryptography/hazmat/primitives/serialization/__init__.py,sha256=Q7uTgDlt7n3WfsMT6jYwutC6DIg_7SEeoAm1GHZ5B5E,1705
cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615
cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=mS9cFNG4afzvseoc5e1MWoY2VskfL8N8Y_OFjl67luY,5104
cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=5OR_Tkysxaprn4FegvJIfbep9rJ9wok6FLWvWwQ5-Mg,13943
cryptography/hazmat/primitives/serialization/ssh.py,sha256=hPV5obFznz0QhFfXFPOeQ8y6MsurA0xVMQiLnLESEs8,53700
cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258
cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/hotp.py,sha256=ivZo5BrcCGWLsqql4nZV0XXCjyGPi_iHfDFltGlOJwk,3256
cryptography/hazmat/primitives/twofactor/totp.py,sha256=m5LPpRL00kp4zY8gTjr55Hfz9aMlPS53kHmVkSQCmdY,1652
cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography/utils.py,sha256=bZAjFC5KVpfmF29qS_18vvpW3mKxmdiRALcusHhTTkg,4301
cryptography/x509/__init__.py,sha256=xloN0swseNx-m2WFZmCA17gOoxQWqeU82UVjEdJBePQ,8257
cryptography/x509/__pycache__/__init__.cpython-312.pyc,,
cryptography/x509/__pycache__/base.cpython-312.pyc,,
cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,,
cryptography/x509/__pycache__/extensions.cpython-312.pyc,,
cryptography/x509/__pycache__/general_name.cpython-312.pyc,,
cryptography/x509/__pycache__/name.cpython-312.pyc,,
cryptography/x509/__pycache__/ocsp.cpython-312.pyc,,
cryptography/x509/__pycache__/oid.cpython-312.pyc,,
cryptography/x509/__pycache__/verification.cpython-312.pyc,,
cryptography/x509/base.py,sha256=OrmTw3y8B6AE_nGXQPN8x9kq-d7rDWeH13gCq6T6D6U,27997
cryptography/x509/certificate_transparency.py,sha256=JqoOIDhlwInrYMFW6IFn77WJ0viF-PB_rlZV3vs9MYc,797
cryptography/x509/extensions.py,sha256=QxYrqR6SF1qzR9ZraP8wDiIczlEVlAFuwDRVcltB6Tk,77724
cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836
cryptography/x509/name.py,sha256=ty0_xf0LnHwZAdEf-d8FLO1K4hGqx_7DsD3CHwoLJiY,15101
cryptography/x509/ocsp.py,sha256=Yey6NdFV1MPjop24Mj_VenjEpg3kUaMopSWOK0AbeBs,12699
cryptography/x509/oid.py,sha256=BUzgXXGVWilkBkdKPTm9R4qElE9gAGHgdYPMZAp7PJo,931
cryptography/x509/verification.py,sha256=gR2C2c-XZQtblZhT5T5vjSKOtCb74ef2alPVmEcwFlM,958

View File

@@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: maturin (1.9.4)
Root-Is-Purelib: false
Tag: cp311-abi3-manylinux_2_34_x86_64

View File

@@ -5,13 +5,13 @@
from __future__ import annotations
__all__ = [
"__version__",
"__author__",
"__copyright__",
"__version__",
]
__version__ = "41.0.7"
__version__ = "46.0.3"
__author__ = "The Python Cryptographic Authority and individual contributors"
__copyright__ = f"Copyright 2013-2023 {__author__}"
__copyright__ = f"Copyright 2013-2025 {__author__}"

View File

@@ -7,7 +7,7 @@ from __future__ import annotations
from cryptography.__about__ import __author__, __copyright__, __version__
__all__ = [
"__version__",
"__author__",
"__copyright__",
"__version__",
]

View File

@@ -15,9 +15,7 @@ _Reasons = rust_exceptions._Reasons
class UnsupportedAlgorithm(Exception):
def __init__(
self, message: str, reason: typing.Optional[_Reasons] = None
) -> None:
def __init__(self, message: str, reason: _Reasons | None = None) -> None:
super().__init__(message)
self._reason = reason
@@ -44,7 +42,7 @@ class InvalidSignature(Exception):
class InternalError(Exception):
def __init__(
self, msg: str, err_code: typing.List[rust_openssl.OpenSSLError]
self, msg: str, err_code: list[rust_openssl.OpenSSLError]
) -> None:
super().__init__(msg)
self.err_code = err_code

View File

@@ -9,6 +9,7 @@ import binascii
import os
import time
import typing
from collections.abc import Iterable
from cryptography import utils
from cryptography.exceptions import InvalidSignature
@@ -27,7 +28,7 @@ _MAX_CLOCK_SKEW = 60
class Fernet:
def __init__(
self,
key: typing.Union[bytes, str],
key: bytes | str,
backend: typing.Any = None,
) -> None:
try:
@@ -80,9 +81,7 @@ class Fernet:
hmac = h.finalize()
return base64.urlsafe_b64encode(basic_parts + hmac)
def decrypt(
self, token: typing.Union[bytes, str], ttl: typing.Optional[int] = None
) -> bytes:
def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes:
timestamp, data = Fernet._get_unverified_token_data(token)
if ttl is None:
time_info = None
@@ -91,7 +90,7 @@ class Fernet:
return self._decrypt_data(data, timestamp, time_info)
def decrypt_at_time(
self, token: typing.Union[bytes, str], ttl: int, current_time: int
self, token: bytes | str, ttl: int, current_time: int
) -> bytes:
if ttl is None:
raise ValueError(
@@ -100,16 +99,14 @@ class Fernet:
timestamp, data = Fernet._get_unverified_token_data(token)
return self._decrypt_data(data, timestamp, (ttl, current_time))
def extract_timestamp(self, token: typing.Union[bytes, str]) -> int:
def extract_timestamp(self, token: bytes | str) -> int:
timestamp, data = Fernet._get_unverified_token_data(token)
# Verify the token was not tampered with.
self._verify_signature(data)
return timestamp
@staticmethod
def _get_unverified_token_data(
token: typing.Union[bytes, str]
) -> typing.Tuple[int, bytes]:
def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]:
if not isinstance(token, (str, bytes)):
raise TypeError("token must be bytes or str")
@@ -139,7 +136,7 @@ class Fernet:
self,
data: bytes,
timestamp: int,
time_info: typing.Optional[typing.Tuple[int, int]],
time_info: tuple[int, int] | None,
) -> bytes:
if time_info is not None:
ttl, current_time = time_info
@@ -172,7 +169,7 @@ class Fernet:
class MultiFernet:
def __init__(self, fernets: typing.Iterable[Fernet]):
def __init__(self, fernets: Iterable[Fernet]):
fernets = list(fernets)
if not fernets:
raise ValueError(
@@ -186,7 +183,7 @@ class MultiFernet:
def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes:
return self._fernets[0].encrypt_at_time(msg, current_time)
def rotate(self, msg: typing.Union[bytes, str]) -> bytes:
def rotate(self, msg: bytes | str) -> bytes:
timestamp, data = Fernet._get_unverified_token_data(msg)
for f in self._fernets:
try:
@@ -200,9 +197,7 @@ class MultiFernet:
iv = os.urandom(16)
return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
def decrypt(
self, msg: typing.Union[bytes, str], ttl: typing.Optional[int] = None
) -> bytes:
def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes:
for f in self._fernets:
try:
return f.decrypt(msg, ttl)
@@ -211,7 +206,7 @@ class MultiFernet:
raise InvalidToken
def decrypt_at_time(
self, msg: typing.Union[bytes, str], ttl: int, current_time: int
self, msg: bytes | str, ttl: int, current_time: int
) -> bytes:
for f in self._fernets:
try:
@@ -219,3 +214,11 @@ class MultiFernet:
except InvalidToken:
pass
raise InvalidToken
def extract_timestamp(self, msg: bytes | str) -> int:
for f in self._fernets:
try:
return f.extract_timestamp(msg)
except InvalidToken:
pass
raise InvalidToken

View File

@@ -4,8 +4,6 @@
from __future__ import annotations
import typing
from cryptography.hazmat.bindings._rust import (
ObjectIdentifier as ObjectIdentifier,
)
@@ -16,6 +14,7 @@ class ExtensionOID:
SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
KEY_USAGE = ObjectIdentifier("2.5.29.15")
PRIVATE_KEY_USAGE_PERIOD = ObjectIdentifier("2.5.29.16")
SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
@@ -41,6 +40,7 @@ class ExtensionOID:
PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5")
MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7")
ADMISSIONS = ObjectIdentifier("1.3.36.8.3.3")
class OCSPExtensionOID:
@@ -60,6 +60,7 @@ class NameOID:
LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
STREET_ADDRESS = ObjectIdentifier("2.5.4.9")
ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97")
ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
@@ -123,9 +124,7 @@ class SignatureAlgorithmOID:
GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3")
_SIG_OIDS_TO_HASH: typing.Dict[
ObjectIdentifier, typing.Optional[hashes.HashAlgorithm]
] = {
_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = {
SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
@@ -157,6 +156,33 @@ _SIG_OIDS_TO_HASH: typing.Dict[
}
class HashAlgorithmOID:
SHA1 = ObjectIdentifier("1.3.14.3.2.26")
SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.2.4")
SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.2.1")
SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.2.2")
SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.2.3")
SHA3_224 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.224")
SHA3_256 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.256")
SHA3_384 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.384")
SHA3_512 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.512")
SHA3_224_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.7")
SHA3_256_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.8")
SHA3_384_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.9")
SHA3_512_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.10")
class PublicKeyAlgorithmOID:
DSA = ObjectIdentifier("1.2.840.10040.4.1")
EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1")
RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1")
RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
X25519 = ObjectIdentifier("1.3.101.110")
X448 = ObjectIdentifier("1.3.101.111")
ED25519 = ObjectIdentifier("1.3.101.112")
ED448 = ObjectIdentifier("1.3.101.113")
class ExtendedKeyUsageOID:
SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
@@ -168,9 +194,20 @@ class ExtendedKeyUsageOID:
SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2")
KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5")
IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17")
BUNDLE_SECURITY = ObjectIdentifier("1.3.6.1.5.5.7.3.35")
CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4")
class OtherNameFormOID:
PERMANENT_IDENTIFIER = ObjectIdentifier("1.3.6.1.5.5.7.8.3")
HW_MODULE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.4")
DNS_SRV = ObjectIdentifier("1.3.6.1.5.5.7.8.7")
NAI_REALM = ObjectIdentifier("1.3.6.1.5.5.7.8.8")
SMTP_UTF8_MAILBOX = ObjectIdentifier("1.3.6.1.5.5.7.8.9")
ACP_NODE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.10")
BUNDLE_EID = ObjectIdentifier("1.3.6.1.5.5.7.8.11")
class AuthorityInformationAccessOID:
CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
@@ -228,7 +265,7 @@ _OID_NAMES = {
SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
SignatureAlgorithmOID.RSASSA_PSS: "rsassaPss",
SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
@@ -248,6 +285,24 @@ _OID_NAMES = {
SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: (
"GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)"
),
HashAlgorithmOID.SHA1: "sha1",
HashAlgorithmOID.SHA224: "sha224",
HashAlgorithmOID.SHA256: "sha256",
HashAlgorithmOID.SHA384: "sha384",
HashAlgorithmOID.SHA512: "sha512",
HashAlgorithmOID.SHA3_224: "sha3_224",
HashAlgorithmOID.SHA3_256: "sha3_256",
HashAlgorithmOID.SHA3_384: "sha3_384",
HashAlgorithmOID.SHA3_512: "sha3_512",
HashAlgorithmOID.SHA3_224_NIST: "sha3_224",
HashAlgorithmOID.SHA3_256_NIST: "sha3_256",
HashAlgorithmOID.SHA3_384_NIST: "sha3_384",
HashAlgorithmOID.SHA3_512_NIST: "sha3_512",
PublicKeyAlgorithmOID.DSA: "dsaEncryption",
PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey",
PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption",
PublicKeyAlgorithmOID.X25519: "X25519",
PublicKeyAlgorithmOID.X448: "X448",
ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
@@ -259,6 +314,7 @@ _OID_NAMES = {
ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
ExtensionOID.KEY_USAGE: "keyUsage",
ExtensionOID.PRIVATE_KEY_USAGE_PERIOD: "privateKeyUsagePeriod",
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
@@ -270,6 +326,7 @@ _OID_NAMES = {
),
ExtensionOID.PRECERT_POISON: "ctPoison",
ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate",
ExtensionOID.ADMISSIONS: "Admissions",
CRLEntryExtensionOID.CRL_REASON: "cRLReason",
CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
@@ -282,7 +339,7 @@ _OID_NAMES = {
ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
ExtensionOID.FRESHEST_CRL: "freshestCRL",
ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"),
ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint",
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",

View File

@@ -0,0 +1,10 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography.hazmat.asn1.asn1 import encode_der, sequence
__all__ = [
"encode_der",
"sequence",
]

View File

@@ -0,0 +1,116 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import dataclasses
import sys
import typing
if sys.version_info < (3, 11):
import typing_extensions
# We use the `include_extras` parameter of `get_type_hints`, which was
# added in Python 3.9. This can be replaced by the `typing` version
# once the min version is >= 3.9
if sys.version_info < (3, 9):
get_type_hints = typing_extensions.get_type_hints
else:
get_type_hints = typing.get_type_hints
else:
get_type_hints = typing.get_type_hints
from cryptography.hazmat.bindings._rust import declarative_asn1
T = typing.TypeVar("T", covariant=True)
U = typing.TypeVar("U")
encode_der = declarative_asn1.encode_der
def _normalize_field_type(
field_type: typing.Any, field_name: str
) -> declarative_asn1.AnnotatedType:
annotation = declarative_asn1.Annotation()
if hasattr(field_type, "__asn1_root__"):
annotated_root = field_type.__asn1_root__
if not isinstance(annotated_root, declarative_asn1.AnnotatedType):
raise TypeError(f"unsupported root type: {annotated_root}")
return annotated_root
else:
rust_field_type = declarative_asn1.non_root_python_to_rust(field_type)
return declarative_asn1.AnnotatedType(rust_field_type, annotation)
def _annotate_fields(
raw_fields: dict[str, type],
) -> dict[str, declarative_asn1.AnnotatedType]:
fields = {}
for field_name, field_type in raw_fields.items():
# Recursively normalize the field type into something that the
# Rust code can understand.
annotated_field_type = _normalize_field_type(field_type, field_name)
fields[field_name] = annotated_field_type
return fields
def _register_asn1_sequence(cls: type[U]) -> None:
raw_fields = get_type_hints(cls, include_extras=True)
root = declarative_asn1.AnnotatedType(
declarative_asn1.Type.Sequence(cls, _annotate_fields(raw_fields)),
declarative_asn1.Annotation(),
)
setattr(cls, "__asn1_root__", root)
# Due to https://github.com/python/mypy/issues/19731, we can't define an alias
# for `dataclass_transform` that conditionally points to `typing` or
# `typing_extensions` depending on the Python version (like we do for
# `get_type_hints`).
# We work around it by making the whole decorated class conditional on the
# Python version.
if sys.version_info < (3, 11):
@typing_extensions.dataclass_transform(kw_only_default=True)
def sequence(cls: type[U]) -> type[U]:
# We use `dataclasses.dataclass` to add an __init__ method
# to the class with keyword-only parameters.
if sys.version_info >= (3, 10):
dataclass_cls = dataclasses.dataclass(
repr=False,
eq=False,
# `match_args` was added in Python 3.10 and defaults
# to True
match_args=False,
# `kw_only` was added in Python 3.10 and defaults to
# False
kw_only=True,
)(cls)
else:
dataclass_cls = dataclasses.dataclass(
repr=False,
eq=False,
)(cls)
_register_asn1_sequence(dataclass_cls)
return dataclass_cls
else:
@typing.dataclass_transform(kw_only_default=True)
def sequence(cls: type[U]) -> type[U]:
# Only add an __init__ method, with keyword-only
# parameters.
dataclass_cls = dataclasses.dataclass(
repr=False,
eq=False,
match_args=False,
kw_only=True,
)(cls)
_register_asn1_sequence(dataclass_cls)
return dataclass_cls

View File

@@ -1,527 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import typing
from cryptography.exceptions import InvalidTag
if typing.TYPE_CHECKING:
from cryptography.hazmat.backends.openssl.backend import Backend
from cryptography.hazmat.primitives.ciphers.aead import (
AESCCM,
AESGCM,
AESOCB3,
AESSIV,
ChaCha20Poly1305,
)
_AEADTypes = typing.Union[
AESCCM, AESGCM, AESOCB3, AESSIV, ChaCha20Poly1305
]
def _is_evp_aead_supported_cipher(
backend: Backend, cipher: _AEADTypes
) -> bool:
"""
Checks whether the given cipher is supported through
EVP_AEAD rather than the normal OpenSSL EVP_CIPHER API.
"""
from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305
return backend._lib.Cryptography_HAS_EVP_AEAD and isinstance(
cipher, ChaCha20Poly1305
)
def _aead_cipher_supported(backend: Backend, cipher: _AEADTypes) -> bool:
if _is_evp_aead_supported_cipher(backend, cipher):
return True
else:
cipher_name = _evp_cipher_cipher_name(cipher)
if backend._fips_enabled and cipher_name not in backend._fips_aead:
return False
# SIV isn't loaded through get_cipherbyname but instead a new fetch API
# only available in 3.0+. But if we know we're on 3.0+ then we know
# it's supported.
if cipher_name.endswith(b"-siv"):
return backend._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER == 1
else:
return (
backend._lib.EVP_get_cipherbyname(cipher_name)
!= backend._ffi.NULL
)
def _aead_create_ctx(
backend: Backend,
cipher: _AEADTypes,
key: bytes,
):
if _is_evp_aead_supported_cipher(backend, cipher):
return _evp_aead_create_ctx(backend, cipher, key)
else:
return _evp_cipher_create_ctx(backend, cipher, key)
def _encrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any = None,
) -> bytes:
if _is_evp_aead_supported_cipher(backend, cipher):
return _evp_aead_encrypt(
backend, cipher, nonce, data, associated_data, tag_length, ctx
)
else:
return _evp_cipher_encrypt(
backend, cipher, nonce, data, associated_data, tag_length, ctx
)
def _decrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any = None,
) -> bytes:
if _is_evp_aead_supported_cipher(backend, cipher):
return _evp_aead_decrypt(
backend, cipher, nonce, data, associated_data, tag_length, ctx
)
else:
return _evp_cipher_decrypt(
backend, cipher, nonce, data, associated_data, tag_length, ctx
)
def _evp_aead_create_ctx(
backend: Backend,
cipher: _AEADTypes,
key: bytes,
tag_len: typing.Optional[int] = None,
):
aead_cipher = _evp_aead_get_cipher(backend, cipher)
assert aead_cipher is not None
key_ptr = backend._ffi.from_buffer(key)
tag_len = (
backend._lib.EVP_AEAD_DEFAULT_TAG_LENGTH
if tag_len is None
else tag_len
)
ctx = backend._lib.Cryptography_EVP_AEAD_CTX_new(
aead_cipher, key_ptr, len(key), tag_len
)
backend.openssl_assert(ctx != backend._ffi.NULL)
ctx = backend._ffi.gc(ctx, backend._lib.EVP_AEAD_CTX_free)
return ctx
def _evp_aead_get_cipher(backend: Backend, cipher: _AEADTypes):
from cryptography.hazmat.primitives.ciphers.aead import (
ChaCha20Poly1305,
)
# Currently only ChaCha20-Poly1305 is supported using this API
assert isinstance(cipher, ChaCha20Poly1305)
return backend._lib.EVP_aead_chacha20_poly1305()
def _evp_aead_encrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any,
) -> bytes:
assert ctx is not None
aead_cipher = _evp_aead_get_cipher(backend, cipher)
assert aead_cipher is not None
out_len = backend._ffi.new("size_t *")
# max_out_len should be in_len plus the result of
# EVP_AEAD_max_overhead.
max_out_len = len(data) + backend._lib.EVP_AEAD_max_overhead(aead_cipher)
out_buf = backend._ffi.new("uint8_t[]", max_out_len)
data_ptr = backend._ffi.from_buffer(data)
nonce_ptr = backend._ffi.from_buffer(nonce)
aad = b"".join(associated_data)
aad_ptr = backend._ffi.from_buffer(aad)
res = backend._lib.EVP_AEAD_CTX_seal(
ctx,
out_buf,
out_len,
max_out_len,
nonce_ptr,
len(nonce),
data_ptr,
len(data),
aad_ptr,
len(aad),
)
backend.openssl_assert(res == 1)
encrypted_data = backend._ffi.buffer(out_buf, out_len[0])[:]
return encrypted_data
def _evp_aead_decrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any,
) -> bytes:
if len(data) < tag_length:
raise InvalidTag
assert ctx is not None
out_len = backend._ffi.new("size_t *")
# max_out_len should at least in_len
max_out_len = len(data)
out_buf = backend._ffi.new("uint8_t[]", max_out_len)
data_ptr = backend._ffi.from_buffer(data)
nonce_ptr = backend._ffi.from_buffer(nonce)
aad = b"".join(associated_data)
aad_ptr = backend._ffi.from_buffer(aad)
res = backend._lib.EVP_AEAD_CTX_open(
ctx,
out_buf,
out_len,
max_out_len,
nonce_ptr,
len(nonce),
data_ptr,
len(data),
aad_ptr,
len(aad),
)
if res == 0:
backend._consume_errors()
raise InvalidTag
decrypted_data = backend._ffi.buffer(out_buf, out_len[0])[:]
return decrypted_data
_ENCRYPT = 1
_DECRYPT = 0
def _evp_cipher_cipher_name(cipher: _AEADTypes) -> bytes:
from cryptography.hazmat.primitives.ciphers.aead import (
AESCCM,
AESGCM,
AESOCB3,
AESSIV,
ChaCha20Poly1305,
)
if isinstance(cipher, ChaCha20Poly1305):
return b"chacha20-poly1305"
elif isinstance(cipher, AESCCM):
return f"aes-{len(cipher._key) * 8}-ccm".encode("ascii")
elif isinstance(cipher, AESOCB3):
return f"aes-{len(cipher._key) * 8}-ocb".encode("ascii")
elif isinstance(cipher, AESSIV):
return f"aes-{len(cipher._key) * 8 // 2}-siv".encode("ascii")
else:
assert isinstance(cipher, AESGCM)
return f"aes-{len(cipher._key) * 8}-gcm".encode("ascii")
def _evp_cipher(cipher_name: bytes, backend: Backend):
if cipher_name.endswith(b"-siv"):
evp_cipher = backend._lib.EVP_CIPHER_fetch(
backend._ffi.NULL,
cipher_name,
backend._ffi.NULL,
)
backend.openssl_assert(evp_cipher != backend._ffi.NULL)
evp_cipher = backend._ffi.gc(evp_cipher, backend._lib.EVP_CIPHER_free)
else:
evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
backend.openssl_assert(evp_cipher != backend._ffi.NULL)
return evp_cipher
def _evp_cipher_create_ctx(
backend: Backend,
cipher: _AEADTypes,
key: bytes,
):
ctx = backend._lib.EVP_CIPHER_CTX_new()
backend.openssl_assert(ctx != backend._ffi.NULL)
ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
cipher_name = _evp_cipher_cipher_name(cipher)
evp_cipher = _evp_cipher(cipher_name, backend)
key_ptr = backend._ffi.from_buffer(key)
res = backend._lib.EVP_CipherInit_ex(
ctx,
evp_cipher,
backend._ffi.NULL,
key_ptr,
backend._ffi.NULL,
0,
)
backend.openssl_assert(res != 0)
return ctx
def _evp_cipher_aead_setup(
backend: Backend,
cipher_name: bytes,
key: bytes,
nonce: bytes,
tag: typing.Optional[bytes],
tag_len: int,
operation: int,
):
evp_cipher = _evp_cipher(cipher_name, backend)
ctx = backend._lib.EVP_CIPHER_CTX_new()
ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
res = backend._lib.EVP_CipherInit_ex(
ctx,
evp_cipher,
backend._ffi.NULL,
backend._ffi.NULL,
backend._ffi.NULL,
int(operation == _ENCRYPT),
)
backend.openssl_assert(res != 0)
# CCM requires the IVLEN to be set before calling SET_TAG on decrypt
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx,
backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
len(nonce),
backend._ffi.NULL,
)
backend.openssl_assert(res != 0)
if operation == _DECRYPT:
assert tag is not None
_evp_cipher_set_tag(backend, ctx, tag)
elif cipher_name.endswith(b"-ccm"):
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx,
backend._lib.EVP_CTRL_AEAD_SET_TAG,
tag_len,
backend._ffi.NULL,
)
backend.openssl_assert(res != 0)
nonce_ptr = backend._ffi.from_buffer(nonce)
key_ptr = backend._ffi.from_buffer(key)
res = backend._lib.EVP_CipherInit_ex(
ctx,
backend._ffi.NULL,
backend._ffi.NULL,
key_ptr,
nonce_ptr,
int(operation == _ENCRYPT),
)
backend.openssl_assert(res != 0)
return ctx
def _evp_cipher_set_tag(backend, ctx, tag: bytes) -> None:
tag_ptr = backend._ffi.from_buffer(tag)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag_ptr
)
backend.openssl_assert(res != 0)
def _evp_cipher_set_nonce_operation(
backend, ctx, nonce: bytes, operation: int
) -> None:
nonce_ptr = backend._ffi.from_buffer(nonce)
res = backend._lib.EVP_CipherInit_ex(
ctx,
backend._ffi.NULL,
backend._ffi.NULL,
backend._ffi.NULL,
nonce_ptr,
int(operation == _ENCRYPT),
)
backend.openssl_assert(res != 0)
def _evp_cipher_set_length(backend: Backend, ctx, data_len: int) -> None:
intptr = backend._ffi.new("int *")
res = backend._lib.EVP_CipherUpdate(
ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len
)
backend.openssl_assert(res != 0)
def _evp_cipher_process_aad(
backend: Backend, ctx, associated_data: bytes
) -> None:
outlen = backend._ffi.new("int *")
a_data_ptr = backend._ffi.from_buffer(associated_data)
res = backend._lib.EVP_CipherUpdate(
ctx, backend._ffi.NULL, outlen, a_data_ptr, len(associated_data)
)
backend.openssl_assert(res != 0)
def _evp_cipher_process_data(backend: Backend, ctx, data: bytes) -> bytes:
outlen = backend._ffi.new("int *")
buf = backend._ffi.new("unsigned char[]", len(data))
data_ptr = backend._ffi.from_buffer(data)
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data_ptr, len(data))
if res == 0:
# AES SIV can error here if the data is invalid on decrypt
backend._consume_errors()
raise InvalidTag
return backend._ffi.buffer(buf, outlen[0])[:]
def _evp_cipher_encrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any = None,
) -> bytes:
from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
if ctx is None:
cipher_name = _evp_cipher_cipher_name(cipher)
ctx = _evp_cipher_aead_setup(
backend,
cipher_name,
cipher._key,
nonce,
None,
tag_length,
_ENCRYPT,
)
else:
_evp_cipher_set_nonce_operation(backend, ctx, nonce, _ENCRYPT)
# CCM requires us to pass the length of the data before processing
# anything.
# However calling this with any other AEAD results in an error
if isinstance(cipher, AESCCM):
_evp_cipher_set_length(backend, ctx, len(data))
for ad in associated_data:
_evp_cipher_process_aad(backend, ctx, ad)
processed_data = _evp_cipher_process_data(backend, ctx, data)
outlen = backend._ffi.new("int *")
# All AEADs we support besides OCB are streaming so they return nothing
# in finalization. OCB can return up to (16 byte block - 1) bytes so
# we need a buffer here too.
buf = backend._ffi.new("unsigned char[]", 16)
res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
backend.openssl_assert(res != 0)
processed_data += backend._ffi.buffer(buf, outlen[0])[:]
tag_buf = backend._ffi.new("unsigned char[]", tag_length)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
)
backend.openssl_assert(res != 0)
tag = backend._ffi.buffer(tag_buf)[:]
if isinstance(cipher, AESSIV):
# RFC 5297 defines the output as IV || C, where the tag we generate
# is the "IV" and C is the ciphertext. This is the opposite of our
# other AEADs, which are Ciphertext || Tag
backend.openssl_assert(len(tag) == 16)
return tag + processed_data
else:
return processed_data + tag
def _evp_cipher_decrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any = None,
) -> bytes:
from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
if len(data) < tag_length:
raise InvalidTag
if isinstance(cipher, AESSIV):
# RFC 5297 defines the output as IV || C, where the tag we generate
# is the "IV" and C is the ciphertext. This is the opposite of our
# other AEADs, which are Ciphertext || Tag
tag = data[:tag_length]
data = data[tag_length:]
else:
tag = data[-tag_length:]
data = data[:-tag_length]
if ctx is None:
cipher_name = _evp_cipher_cipher_name(cipher)
ctx = _evp_cipher_aead_setup(
backend,
cipher_name,
cipher._key,
nonce,
tag,
tag_length,
_DECRYPT,
)
else:
_evp_cipher_set_nonce_operation(backend, ctx, nonce, _DECRYPT)
_evp_cipher_set_tag(backend, ctx, tag)
# CCM requires us to pass the length of the data before processing
# anything.
# However calling this with any other AEAD results in an error
if isinstance(cipher, AESCCM):
_evp_cipher_set_length(backend, ctx, len(data))
for ad in associated_data:
_evp_cipher_process_aad(backend, ctx, ad)
# CCM has a different error path if the tag doesn't match. Errors are
# raised in Update and Final is irrelevant.
if isinstance(cipher, AESCCM):
outlen = backend._ffi.new("int *")
buf = backend._ffi.new("unsigned char[]", len(data))
d_ptr = backend._ffi.from_buffer(data)
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, d_ptr, len(data))
if res != 1:
backend._consume_errors()
raise InvalidTag
processed_data = backend._ffi.buffer(buf, outlen[0])[:]
else:
processed_data = _evp_cipher_process_data(backend, ctx, data)
outlen = backend._ffi.new("int *")
# OCB can return up to 15 bytes (16 byte block - 1) in finalization
buf = backend._ffi.new("unsigned char[]", 16)
res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
processed_data += backend._ffi.buffer(buf, outlen[0])[:]
if res == 0:
backend._consume_errors()
raise InvalidTag
return processed_data

Some files were not shown because too many files have changed in this diff Show More