diff --git a/Backend/.env.example b/Backend/.env.example index 5aa5b0cd..a6c859cc 100644 --- a/Backend/.env.example +++ b/Backend/.env.example @@ -35,6 +35,14 @@ JWT_ALGORITHM=HS256 JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 JWT_REFRESH_TOKEN_EXPIRE_DAYS=7 +# ============================================ +# Encryption Configuration +# ============================================ +# Base64-encoded encryption key for data encryption at rest +# Generate a new key using: python3 -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())" +# If not set, a temporary key will be generated (not recommended for production) +ENCRYPTION_KEY= + # ============================================ # CORS & Client Configuration # ============================================ diff --git a/Backend/alembic/versions/__pycache__/add_group_booking_tables.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_group_booking_tables.cpython-312.pyc new file mode 100644 index 00000000..2403c8ae Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_group_booking_tables.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_guest_profile_crm_tables.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_guest_profile_crm_tables.cpython-312.pyc index 12987963..4e3be4d6 100644 Binary files a/Backend/alembic/versions/__pycache__/add_guest_profile_crm_tables.cpython-312.pyc and b/Backend/alembic/versions/__pycache__/add_guest_profile_crm_tables.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_rate_plan_id_to_bookings.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_rate_plan_id_to_bookings.cpython-312.pyc new file mode 100644 index 00000000..e6927cd9 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_rate_plan_id_to_bookings.cpython-312.pyc differ diff --git a/Backend/alembic/versions/add_borica_payment_method.py b/Backend/alembic/versions/add_borica_payment_method.py new file mode 100644 index 00000000..8c5c9ade --- /dev/null +++ b/Backend/alembic/versions/add_borica_payment_method.py @@ -0,0 +1,20 @@ +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql +revision = 'add_borica_payment_method' +down_revision = 'd9aff6c5f0d4' +branch_labels = None +depends_on = None + +def upgrade() -> None: + bind = op.get_bind() + if bind.dialect.name == 'mysql': + op.execute("ALTER TABLE payments MODIFY COLUMN payment_method ENUM('cash', 'credit_card', 'debit_card', 'bank_transfer', 'e_wallet', 'stripe', 'paypal', 'borica') NOT NULL") + else: + pass + +def downgrade() -> None: + bind = op.get_bind() + if bind.dialect.name == 'mysql': + op.execute("ALTER TABLE payments MODIFY COLUMN payment_method ENUM('cash', 'credit_card', 'debit_card', 'bank_transfer', 'e_wallet', 'stripe', 'paypal') NOT NULL") + diff --git a/Backend/alembic/versions/add_group_booking_tables.py b/Backend/alembic/versions/add_group_booking_tables.py new file mode 100644 index 00000000..dd2c0f72 --- /dev/null +++ b/Backend/alembic/versions/add_group_booking_tables.py @@ -0,0 +1,193 @@ +"""add group booking tables + +Revision ID: add_group_booking_001 +Revises: add_guest_profile_crm +Create Date: 2024-01-15 00:00:00.000000 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'add_group_booking_001' +down_revision = 'add_guest_profile_crm' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Create group_bookings table + op.create_table( + 'group_bookings', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('group_booking_number', sa.String(length=50), nullable=False), + sa.Column('coordinator_id', sa.Integer(), nullable=False), + sa.Column('coordinator_name', sa.String(length=100), nullable=False), + sa.Column('coordinator_email', sa.String(length=100), nullable=False), + sa.Column('coordinator_phone', sa.String(length=20), nullable=True), + sa.Column('group_name', sa.String(length=200), nullable=True), + sa.Column('group_type', sa.String(length=50), nullable=True), + sa.Column('total_rooms', sa.Integer(), nullable=False, server_default='0'), + sa.Column('total_guests', sa.Integer(), nullable=False, server_default='0'), + sa.Column('check_in_date', sa.DateTime(), nullable=False), + sa.Column('check_out_date', sa.DateTime(), nullable=False), + sa.Column('base_rate_per_room', sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column('group_discount_percentage', sa.Numeric(precision=5, scale=2), nullable=True, server_default='0'), + sa.Column('group_discount_amount', sa.Numeric(precision=10, scale=2), nullable=True, server_default='0'), + sa.Column('original_total_price', sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=True, server_default='0'), + sa.Column('total_price', sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column('payment_option', sa.Enum('coordinator_pays_all', 'individual_payments', 'split_payment', name='paymentoption'), nullable=False, server_default='coordinator_pays_all'), + sa.Column('deposit_required', sa.Boolean(), nullable=False, server_default='0'), + sa.Column('deposit_percentage', sa.Integer(), nullable=True), + sa.Column('deposit_amount', sa.Numeric(precision=10, scale=2), nullable=True), + sa.Column('amount_paid', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'), + sa.Column('balance_due', sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column('status', sa.Enum('draft', 'pending', 'confirmed', 'partially_confirmed', 'checked_in', 'checked_out', 'cancelled', name='groupbookingstatus'), nullable=False, server_default='draft'), + sa.Column('cancellation_policy', sa.Text(), nullable=True), + sa.Column('cancellation_deadline', sa.DateTime(), nullable=True), + sa.Column('cancellation_penalty_percentage', sa.Numeric(precision=5, scale=2), nullable=True, server_default='0'), + sa.Column('special_requests', sa.Text(), nullable=True), + sa.Column('notes', sa.Text(), nullable=True), + sa.Column('contract_terms', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.Column('confirmed_at', sa.DateTime(), nullable=True), + sa.Column('cancelled_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['coordinator_id'], ['users.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('group_booking_number') + ) + op.create_index(op.f('ix_group_bookings_id'), 'group_bookings', ['id'], unique=False) + op.create_index(op.f('ix_group_bookings_group_booking_number'), 'group_bookings', ['group_booking_number'], unique=True) + op.create_index(op.f('ix_group_bookings_coordinator_id'), 'group_bookings', ['coordinator_id'], unique=False) + op.create_index(op.f('ix_group_bookings_status'), 'group_bookings', ['status'], unique=False) + op.create_index(op.f('ix_group_bookings_check_in_date'), 'group_bookings', ['check_in_date'], unique=False) + op.create_index(op.f('ix_group_bookings_check_out_date'), 'group_bookings', ['check_out_date'], unique=False) + + # Create group_room_blocks table + op.create_table( + 'group_room_blocks', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('group_booking_id', sa.Integer(), nullable=False), + sa.Column('room_type_id', sa.Integer(), nullable=False), + sa.Column('rooms_blocked', sa.Integer(), nullable=False, server_default='0'), + sa.Column('rooms_confirmed', sa.Integer(), nullable=False, server_default='0'), + sa.Column('rooms_available', sa.Integer(), nullable=False, server_default='0'), + sa.Column('rate_per_room', sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column('total_block_price', sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'), + sa.Column('block_released_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['group_booking_id'], ['group_bookings.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['room_type_id'], ['room_types.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_group_room_blocks_id'), 'group_room_blocks', ['id'], unique=False) + op.create_index(op.f('ix_group_room_blocks_group_booking_id'), 'group_room_blocks', ['group_booking_id'], unique=False) + op.create_index(op.f('ix_group_room_blocks_room_type_id'), 'group_room_blocks', ['room_type_id'], unique=False) + + # Create group_booking_members table + op.create_table( + 'group_booking_members', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('group_booking_id', sa.Integer(), nullable=False), + sa.Column('full_name', sa.String(length=100), nullable=False), + sa.Column('email', sa.String(length=100), nullable=True), + sa.Column('phone', sa.String(length=20), nullable=True), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('room_block_id', sa.Integer(), nullable=True), + sa.Column('assigned_room_id', sa.Integer(), nullable=True), + sa.Column('individual_booking_id', sa.Integer(), nullable=True), + sa.Column('special_requests', sa.Text(), nullable=True), + sa.Column('preferences', sa.JSON(), nullable=True), + sa.Column('individual_amount', sa.Numeric(precision=10, scale=2), nullable=True), + sa.Column('individual_paid', sa.Numeric(precision=10, scale=2), nullable=True, server_default='0'), + sa.Column('individual_balance', sa.Numeric(precision=10, scale=2), nullable=True, server_default='0'), + sa.Column('is_checked_in', sa.Boolean(), nullable=False, server_default='0'), + sa.Column('checked_in_at', sa.DateTime(), nullable=True), + sa.Column('is_checked_out', sa.Boolean(), nullable=False, server_default='0'), + sa.Column('checked_out_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['group_booking_id'], ['group_bookings.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['room_block_id'], ['group_room_blocks.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['assigned_room_id'], ['rooms.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['individual_booking_id'], ['bookings.id'], ondelete='SET NULL'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_group_booking_members_id'), 'group_booking_members', ['id'], unique=False) + op.create_index(op.f('ix_group_booking_members_group_booking_id'), 'group_booking_members', ['group_booking_id'], unique=False) + op.create_index(op.f('ix_group_booking_members_user_id'), 'group_booking_members', ['user_id'], unique=False) + + # Create group_payments table + op.create_table( + 'group_payments', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('group_booking_id', sa.Integer(), nullable=False), + sa.Column('amount', sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column('payment_method', sa.String(length=50), nullable=False), + sa.Column('payment_type', sa.String(length=50), nullable=False, server_default='deposit'), + sa.Column('payment_status', sa.String(length=50), nullable=False, server_default='pending'), + sa.Column('transaction_id', sa.String(length=100), nullable=True), + sa.Column('payment_date', sa.DateTime(), nullable=True), + sa.Column('notes', sa.Text(), nullable=True), + sa.Column('paid_by_member_id', sa.Integer(), nullable=True), + sa.Column('paid_by_user_id', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['group_booking_id'], ['group_bookings.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['paid_by_member_id'], ['group_booking_members.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['paid_by_user_id'], ['users.id'], ondelete='SET NULL'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_group_payments_id'), 'group_payments', ['id'], unique=False) + op.create_index(op.f('ix_group_payments_group_booking_id'), 'group_payments', ['group_booking_id'], unique=False) + op.create_index(op.f('ix_group_payments_payment_status'), 'group_payments', ['payment_status'], unique=False) + + # Add group_booking_id to bookings table + op.add_column('bookings', sa.Column('group_booking_id', sa.Integer(), nullable=True)) + op.create_foreign_key('fk_bookings_group_booking', 'bookings', 'group_bookings', ['group_booking_id'], ['id'], ondelete='SET NULL') + op.create_index(op.f('ix_bookings_group_booking_id'), 'bookings', ['group_booking_id'], unique=False) + + +def downgrade() -> None: + # Drop foreign key and column from bookings table + op.drop_index(op.f('ix_bookings_group_booking_id'), table_name='bookings') + op.drop_constraint('fk_bookings_group_booking', 'bookings', type_='foreignkey') + op.drop_column('bookings', 'group_booking_id') + + # Drop group_payments table + op.drop_index(op.f('ix_group_payments_payment_status'), table_name='group_payments') + op.drop_index(op.f('ix_group_payments_group_booking_id'), table_name='group_payments') + op.drop_index(op.f('ix_group_payments_id'), table_name='group_payments') + op.drop_table('group_payments') + + # Drop group_booking_members table + op.drop_index(op.f('ix_group_booking_members_user_id'), table_name='group_booking_members') + op.drop_index(op.f('ix_group_booking_members_group_booking_id'), table_name='group_booking_members') + op.drop_index(op.f('ix_group_booking_members_id'), table_name='group_booking_members') + op.drop_table('group_booking_members') + + # Drop group_room_blocks table + op.drop_index(op.f('ix_group_room_blocks_room_type_id'), table_name='group_room_blocks') + op.drop_index(op.f('ix_group_room_blocks_group_booking_id'), table_name='group_room_blocks') + op.drop_index(op.f('ix_group_room_blocks_id'), table_name='group_room_blocks') + op.drop_table('group_room_blocks') + + # Drop group_bookings table + op.drop_index(op.f('ix_group_bookings_check_out_date'), table_name='group_bookings') + op.drop_index(op.f('ix_group_bookings_check_in_date'), table_name='group_bookings') + op.drop_index(op.f('ix_group_bookings_status'), table_name='group_bookings') + op.drop_index(op.f('ix_group_bookings_coordinator_id'), table_name='group_bookings') + op.drop_index(op.f('ix_group_bookings_group_booking_number'), table_name='group_bookings') + op.drop_index(op.f('ix_group_bookings_id'), table_name='group_bookings') + op.drop_table('group_bookings') + + # Drop enums + op.execute("DROP TYPE IF EXISTS paymentoption") + op.execute("DROP TYPE IF EXISTS groupbookingstatus") + diff --git a/Backend/alembic/versions/add_rate_plan_id_to_bookings.py b/Backend/alembic/versions/add_rate_plan_id_to_bookings.py new file mode 100644 index 00000000..a416498b --- /dev/null +++ b/Backend/alembic/versions/add_rate_plan_id_to_bookings.py @@ -0,0 +1,30 @@ +"""add rate_plan_id to bookings + +Revision ID: add_rate_plan_id_001 +Revises: add_group_booking_001 +Create Date: 2024-01-20 00:00:00.000000 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'add_rate_plan_id_001' +down_revision = ('add_group_booking_001', 'add_loyalty_tables_001') +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Add rate_plan_id column to bookings table + op.add_column('bookings', sa.Column('rate_plan_id', sa.Integer(), nullable=True)) + op.create_foreign_key('fk_bookings_rate_plan', 'bookings', 'rate_plans', ['rate_plan_id'], ['id'], ondelete='SET NULL') + op.create_index(op.f('ix_bookings_rate_plan_id'), 'bookings', ['rate_plan_id'], unique=False) + + +def downgrade() -> None: + # Drop foreign key, index, and column from bookings table + op.drop_index(op.f('ix_bookings_rate_plan_id'), table_name='bookings') + op.drop_constraint('fk_bookings_rate_plan', 'bookings', type_='foreignkey') + op.drop_column('bookings', 'rate_plan_id') + diff --git a/Backend/pytest.ini b/Backend/pytest.ini new file mode 100644 index 00000000..741d3416 --- /dev/null +++ b/Backend/pytest.ini @@ -0,0 +1,18 @@ +[pytest] +# Pytest configuration file +testpaths = src/tests +python_files = test_*.py +python_classes = Test* +python_functions = test_* +addopts = + -v + --strict-markers + --tb=short + --disable-warnings + --color=yes +markers = + slow: marks tests as slow (deselect with '-m "not slow"') + integration: marks tests as integration tests + unit: marks tests as unit tests +asyncio_mode = auto + diff --git a/Backend/requirements.txt b/Backend/requirements.txt index 7e719c73..fd03ba5a 100644 --- a/Backend/requirements.txt +++ b/Backend/requirements.txt @@ -21,6 +21,13 @@ paypal-checkout-serversdk>=1.0.3 pyotp==2.9.0 qrcode[pil]==7.4.2 httpx==0.25.2 +cryptography>=41.0.7 + +# Testing dependencies +pytest==7.4.3 +pytest-asyncio==0.21.1 +pytest-cov==4.1.0 +pytest-mock==3.12.0 # Enterprise features (optional but recommended) # redis==5.0.1 # Uncomment if using Redis caching diff --git a/Backend/run.py b/Backend/run.py index 7413afa3..210c489d 100644 --- a/Backend/run.py +++ b/Backend/run.py @@ -10,4 +10,4 @@ if __name__ == '__main__': base_dir = Path(__file__).parent src_dir = str(base_dir / 'src') use_reload = False - uvicorn.run('src.main:app', host=settings.HOST, port=8000, reload=use_reload, log_level=settings.LOG_LEVEL.lower(), reload_dirs=[src_dir] if use_reload else None, reload_excludes=['*.log', '*.pyc', '*.pyo', '*.pyd', '__pycache__', '**/__pycache__/**', '*.db', '*.sqlite', '*.sqlite3'], reload_delay=1.0) \ No newline at end of file + uvicorn.run('src.main:app', host=settings.HOST, port=settings.PORT, reload=use_reload, log_level=settings.LOG_LEVEL.lower(), reload_dirs=[src_dir] if use_reload else None, reload_excludes=['*.log', '*.pyc', '*.pyo', '*.pyd', '__pycache__', '**/__pycache__/**', '*.db', '*.sqlite', '*.sqlite3'], reload_delay=1.0) \ No newline at end of file diff --git a/Backend/src/__pycache__/main.cpython-312.pyc b/Backend/src/__pycache__/main.cpython-312.pyc index 10b06b58..cbbe5682 100644 Binary files a/Backend/src/__pycache__/main.cpython-312.pyc and b/Backend/src/__pycache__/main.cpython-312.pyc differ diff --git a/Backend/src/config/__pycache__/database.cpython-312.pyc b/Backend/src/config/__pycache__/database.cpython-312.pyc index 8bcd92ef..54f5dd6d 100644 Binary files a/Backend/src/config/__pycache__/database.cpython-312.pyc and b/Backend/src/config/__pycache__/database.cpython-312.pyc differ diff --git a/Backend/src/config/__pycache__/settings.cpython-312.pyc b/Backend/src/config/__pycache__/settings.cpython-312.pyc index 1a0c186b..3fb66bc8 100644 Binary files a/Backend/src/config/__pycache__/settings.cpython-312.pyc and b/Backend/src/config/__pycache__/settings.cpython-312.pyc differ diff --git a/Backend/src/config/settings.py b/Backend/src/config/settings.py index a5e00ed4..2f05da3b 100644 --- a/Backend/src/config/settings.py +++ b/Backend/src/config/settings.py @@ -21,6 +21,7 @@ class Settings(BaseSettings): JWT_ALGORITHM: str = Field(default='HS256', description='JWT algorithm') JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = Field(default=30, description='JWT access token expiration in minutes') JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = Field(default=7, description='JWT refresh token expiration in days') + ENCRYPTION_KEY: str = Field(default='', description='Base64-encoded encryption key for data encryption at rest') CLIENT_URL: str = Field(default='http://localhost:5173', description='Frontend client URL') CORS_ORIGINS: List[str] = Field(default_factory=lambda: ['http://localhost:5173', 'http://localhost:3000', 'http://127.0.0.1:5173'], description='Allowed CORS origins') RATE_LIMIT_ENABLED: bool = Field(default=True, description='Enable rate limiting') @@ -51,6 +52,12 @@ class Settings(BaseSettings): PAYPAL_CLIENT_ID: str = Field(default='', description='PayPal client ID') PAYPAL_CLIENT_SECRET: str = Field(default='', description='PayPal client secret') PAYPAL_MODE: str = Field(default='sandbox', description='PayPal mode: sandbox or live') + BORICA_TERMINAL_ID: str = Field(default='', description='Borica Terminal ID') + BORICA_MERCHANT_ID: str = Field(default='', description='Borica Merchant ID') + BORICA_PRIVATE_KEY_PATH: str = Field(default='', description='Borica private key file path') + BORICA_CERTIFICATE_PATH: str = Field(default='', description='Borica certificate file path') + BORICA_GATEWAY_URL: str = Field(default='https://3dsgate-dev.borica.bg/cgi-bin/cgi_link', description='Borica gateway URL (test or production)') + BORICA_MODE: str = Field(default='test', description='Borica mode: test or production') @property def database_url(self) -> str: diff --git a/Backend/src/main.py b/Backend/src/main.py index ad038dff..920396c4 100644 --- a/Backend/src/main.py +++ b/Backend/src/main.py @@ -95,9 +95,10 @@ async def metrics(): return {'status': 'success', 'service': settings.APP_NAME, 'version': settings.APP_VERSION, 'environment': settings.ENVIRONMENT, 'timestamp': datetime.utcnow().isoformat()} app.include_router(auth_routes.router, prefix='/api') app.include_router(auth_routes.router, prefix=settings.API_V1_PREFIX) -from .routes import room_routes, booking_routes, payment_routes, invoice_routes, banner_routes, favorite_routes, service_routes, service_booking_routes, promotion_routes, report_routes, review_routes, user_routes, audit_routes, admin_privacy_routes, system_settings_routes, contact_routes, page_content_routes, home_routes, about_routes, contact_content_routes, footer_routes, chat_routes, privacy_routes, terms_routes, refunds_routes, cancellation_routes, accessibility_routes, faq_routes, loyalty_routes, guest_profile_routes +from .routes import room_routes, booking_routes, payment_routes, invoice_routes, banner_routes, favorite_routes, service_routes, service_booking_routes, promotion_routes, report_routes, review_routes, user_routes, audit_routes, admin_privacy_routes, system_settings_routes, contact_routes, page_content_routes, home_routes, about_routes, contact_content_routes, footer_routes, chat_routes, privacy_routes, terms_routes, refunds_routes, cancellation_routes, accessibility_routes, faq_routes, loyalty_routes, guest_profile_routes, analytics_routes, workflow_routes, task_routes, notification_routes, group_booking_routes, advanced_room_routes, rate_plan_routes, package_routes, security_routes, email_campaign_routes app.include_router(room_routes.router, prefix='/api') app.include_router(booking_routes.router, prefix='/api') +app.include_router(group_booking_routes.router, prefix='/api') app.include_router(payment_routes.router, prefix='/api') app.include_router(invoice_routes.router, prefix='/api') app.include_router(banner_routes.router, prefix='/api') @@ -125,6 +126,15 @@ app.include_router(faq_routes.router, prefix='/api') app.include_router(chat_routes.router, prefix='/api') app.include_router(loyalty_routes.router, prefix='/api') app.include_router(guest_profile_routes.router, prefix='/api') +app.include_router(analytics_routes.router, prefix='/api') +app.include_router(workflow_routes.router, prefix='/api') +app.include_router(task_routes.router, prefix='/api') +app.include_router(notification_routes.router, prefix='/api') +app.include_router(advanced_room_routes.router, prefix='/api') +app.include_router(rate_plan_routes.router, prefix='/api') +app.include_router(package_routes.router, prefix='/api') +app.include_router(security_routes.router, prefix='/api') +app.include_router(email_campaign_routes.router, prefix='/api') app.include_router(room_routes.router, prefix=settings.API_V1_PREFIX) app.include_router(booking_routes.router, prefix=settings.API_V1_PREFIX) app.include_router(payment_routes.router, prefix=settings.API_V1_PREFIX) @@ -154,6 +164,13 @@ app.include_router(faq_routes.router, prefix=settings.API_V1_PREFIX) app.include_router(chat_routes.router, prefix=settings.API_V1_PREFIX) app.include_router(loyalty_routes.router, prefix=settings.API_V1_PREFIX) app.include_router(guest_profile_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(analytics_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(workflow_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(task_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(notification_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(advanced_room_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(rate_plan_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(package_routes.router, prefix=settings.API_V1_PREFIX) app.include_router(page_content_routes.router, prefix='/api') app.include_router(page_content_routes.router, prefix=settings.API_V1_PREFIX) logger.info('All routes registered successfully') diff --git a/Backend/src/middleware/ip_whitelist.py b/Backend/src/middleware/ip_whitelist.py new file mode 100644 index 00000000..1f17cfd2 --- /dev/null +++ b/Backend/src/middleware/ip_whitelist.py @@ -0,0 +1,195 @@ +from fastapi import Request, HTTPException, status +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import JSONResponse +from sqlalchemy.orm import Session +from typing import List +from ..config.database import get_db +from ..config.logging_config import get_logger +from ..config.settings import settings +from ..models.security_event import IPWhitelist, IPBlacklist, SecurityEvent, SecurityEventType, SecurityEventSeverity +from datetime import datetime +import ipaddress + +logger = get_logger(__name__) + +class IPWhitelistMiddleware(BaseHTTPMiddleware): + """Middleware to enforce IP whitelisting and blacklisting""" + + def __init__(self, app, enabled: bool = True, whitelist_only: bool = False): + super().__init__(app) + self.enabled = enabled + self.whitelist_only = whitelist_only # If True, only whitelisted IPs allowed + + async def dispatch(self, request: Request, call_next): + if not self.enabled: + return await call_next(request) + + # Skip IP check for health checks and public endpoints + if request.url.path in ['/health', '/api/health', '/metrics']: + return await call_next(request) + + client_ip = self._get_client_ip(request) + + if not client_ip: + logger.warning("Could not determine client IP address") + return await call_next(request) + + # Check blacklist first + if await self._is_blacklisted(client_ip): + await self._log_security_event( + request, + SecurityEventType.ip_blocked, + SecurityEventSeverity.high, + f"Blocked request from blacklisted IP: {client_ip}" + ) + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content={"status": "error", "message": "Access denied"} + ) + + # Check whitelist if whitelist_only mode is enabled + if self.whitelist_only: + if not await self._is_whitelisted(client_ip): + await self._log_security_event( + request, + SecurityEventType.permission_denied, + SecurityEventSeverity.medium, + f"Blocked request from non-whitelisted IP: {client_ip}" + ) + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content={"status": "error", "message": "Access denied. IP not whitelisted."} + ) + + return await call_next(request) + + def _get_client_ip(self, request: Request) -> str: + """Extract client IP address from request""" + # Check for forwarded IP (when behind proxy/load balancer) + forwarded_for = request.headers.get("X-Forwarded-For") + if forwarded_for: + # X-Forwarded-For can contain multiple IPs, take the first one + return forwarded_for.split(",")[0].strip() + + real_ip = request.headers.get("X-Real-IP") + if real_ip: + return real_ip.strip() + + # Fallback to direct client IP + if request.client: + return request.client.host + + return None + + async def _is_blacklisted(self, ip_address: str) -> bool: + """Check if IP address is blacklisted""" + try: + db_gen = get_db() + db = next(db_gen) + try: + # Check exact match + blacklist_entry = db.query(IPBlacklist).filter( + IPBlacklist.ip_address == ip_address, + IPBlacklist.is_active == True + ).first() + + if blacklist_entry: + # Check if temporary block has expired + if blacklist_entry.blocked_until and blacklist_entry.blocked_until < datetime.utcnow(): + # Block expired, deactivate it + blacklist_entry.is_active = False + db.commit() + return False + return True + + # Check CIDR ranges (if needed) + # This is a simplified version - you might want to cache this + all_blacklist = db.query(IPBlacklist).filter( + IPBlacklist.is_active == True + ).all() + + for entry in all_blacklist: + try: + if '/' in entry.ip_address: # CIDR notation + network = ipaddress.ip_network(entry.ip_address, strict=False) + if ipaddress.ip_address(ip_address) in network: + return True + except (ValueError, ipaddress.AddressValueError): + continue + + return False + finally: + db.close() + except Exception as e: + logger.error(f"Error checking IP blacklist: {str(e)}") + return False + + async def _is_whitelisted(self, ip_address: str) -> bool: + """Check if IP address is whitelisted""" + try: + db_gen = get_db() + db = next(db_gen) + try: + # Check exact match + whitelist_entry = db.query(IPWhitelist).filter( + IPWhitelist.ip_address == ip_address, + IPWhitelist.is_active == True + ).first() + + if whitelist_entry: + return True + + # Check CIDR ranges + all_whitelist = db.query(IPWhitelist).filter( + IPWhitelist.is_active == True + ).all() + + for entry in all_whitelist: + try: + if '/' in entry.ip_address: # CIDR notation + network = ipaddress.ip_network(entry.ip_address, strict=False) + if ipaddress.ip_address(ip_address) in network: + return True + except (ValueError, ipaddress.AddressValueError): + continue + + return False + finally: + db.close() + except Exception as e: + logger.error(f"Error checking IP whitelist: {str(e)}") + return False + + async def _log_security_event( + self, + request: Request, + event_type: SecurityEventType, + severity: SecurityEventSeverity, + description: str + ): + """Log security event""" + try: + db_gen = get_db() + db = next(db_gen) + try: + client_ip = self._get_client_ip(request) + event = SecurityEvent( + event_type=event_type, + severity=severity, + ip_address=client_ip, + user_agent=request.headers.get("User-Agent"), + request_path=str(request.url.path), + request_method=request.method, + description=description, + details={ + "url": str(request.url), + "headers": dict(request.headers) + } + ) + db.add(event) + db.commit() + finally: + db.close() + except Exception as e: + logger.error(f"Error logging security event: {str(e)}") + diff --git a/Backend/src/models/__init__.py b/Backend/src/models/__init__.py index 50280bc0..426064f5 100644 --- a/Backend/src/models/__init__.py +++ b/Backend/src/models/__init__.py @@ -32,4 +32,16 @@ from .guest_note import GuestNote from .guest_tag import GuestTag, guest_tag_association from .guest_communication import GuestCommunication, CommunicationType, CommunicationDirection from .guest_segment import GuestSegment, guest_segment_association -__all__ = ['Role', 'User', 'RefreshToken', 'PasswordResetToken', 'RoomType', 'Room', 'Booking', 'Payment', 'Service', 'ServiceUsage', 'ServiceBooking', 'ServiceBookingItem', 'ServicePayment', 'ServiceBookingStatus', 'ServicePaymentStatus', 'ServicePaymentMethod', 'Promotion', 'CheckInCheckOut', 'Banner', 'Review', 'Favorite', 'AuditLog', 'CookiePolicy', 'CookieIntegrationConfig', 'SystemSettings', 'Invoice', 'InvoiceItem', 'PageContent', 'PageType', 'Chat', 'ChatMessage', 'ChatStatus', 'LoyaltyTier', 'TierLevel', 'UserLoyalty', 'LoyaltyPointTransaction', 'TransactionType', 'TransactionSource', 'LoyaltyReward', 'RewardType', 'RewardStatus', 'RewardRedemption', 'RedemptionStatus', 'Referral', 'ReferralStatus', 'GuestPreference', 'GuestNote', 'GuestTag', 'guest_tag_association', 'GuestCommunication', 'CommunicationType', 'CommunicationDirection', 'GuestSegment', 'guest_segment_association'] \ No newline at end of file +from .workflow import Workflow, WorkflowInstance, Task, TaskComment, WorkflowType, WorkflowStatus, WorkflowTrigger, TaskStatus, TaskPriority +from .notification import Notification, NotificationTemplate, NotificationPreference, NotificationDeliveryLog, NotificationChannel, NotificationStatus, NotificationType +from .group_booking import GroupBooking, GroupBookingMember, GroupRoomBlock, GroupPayment, GroupBookingStatus, PaymentOption +from .room_maintenance import RoomMaintenance, MaintenanceType, MaintenanceStatus +from .housekeeping_task import HousekeepingTask, HousekeepingStatus, HousekeepingType +from .room_inspection import RoomInspection, InspectionType, InspectionStatus +from .room_attribute import RoomAttribute +from .rate_plan import RatePlan, RatePlanRule, RatePlanType, RatePlanStatus +from .package import Package, PackageItem, PackageStatus, PackageItemType +from .security_event import SecurityEvent, SecurityEventType, SecurityEventSeverity, IPWhitelist, IPBlacklist, OAuthProvider, OAuthToken +from .gdpr_compliance import DataSubjectRequest, DataSubjectRequestType, DataSubjectRequestStatus, DataRetentionPolicy, ConsentRecord +from .email_campaign import Campaign, CampaignStatus, CampaignType, CampaignSegment, EmailTemplate, CampaignEmail, EmailStatus, EmailClick, DripSequence, DripSequenceStep, DripSequenceEnrollment, Unsubscribe +__all__ = ['Role', 'User', 'RefreshToken', 'PasswordResetToken', 'RoomType', 'Room', 'Booking', 'Payment', 'Service', 'ServiceUsage', 'ServiceBooking', 'ServiceBookingItem', 'ServicePayment', 'ServiceBookingStatus', 'ServicePaymentStatus', 'ServicePaymentMethod', 'Promotion', 'CheckInCheckOut', 'Banner', 'Review', 'Favorite', 'AuditLog', 'CookiePolicy', 'CookieIntegrationConfig', 'SystemSettings', 'Invoice', 'InvoiceItem', 'PageContent', 'PageType', 'Chat', 'ChatMessage', 'ChatStatus', 'LoyaltyTier', 'TierLevel', 'UserLoyalty', 'LoyaltyPointTransaction', 'TransactionType', 'TransactionSource', 'LoyaltyReward', 'RewardType', 'RewardStatus', 'RewardRedemption', 'RedemptionStatus', 'Referral', 'ReferralStatus', 'GuestPreference', 'GuestNote', 'GuestTag', 'guest_tag_association', 'GuestCommunication', 'CommunicationType', 'CommunicationDirection', 'GuestSegment', 'guest_segment_association', 'Workflow', 'WorkflowInstance', 'Task', 'TaskComment', 'WorkflowType', 'WorkflowStatus', 'WorkflowTrigger', 'TaskStatus', 'TaskPriority', 'Notification', 'NotificationTemplate', 'NotificationPreference', 'NotificationDeliveryLog', 'NotificationChannel', 'NotificationStatus', 'NotificationType', 'GroupBooking', 'GroupBookingMember', 'GroupRoomBlock', 'GroupPayment', 'GroupBookingStatus', 'PaymentOption', 'RoomMaintenance', 'MaintenanceType', 'MaintenanceStatus', 'HousekeepingTask', 'HousekeepingStatus', 'HousekeepingType', 'RoomInspection', 'InspectionType', 'InspectionStatus', 'RoomAttribute', 'RatePlan', 'RatePlanRule', 'RatePlanType', 'RatePlanStatus', 'Package', 'PackageItem', 'PackageStatus', 'PackageItemType', 'SecurityEvent', 'SecurityEventType', 'SecurityEventSeverity', 'IPWhitelist', 'IPBlacklist', 'OAuthProvider', 'OAuthToken', 'DataSubjectRequest', 'DataSubjectRequestType', 'DataSubjectRequestStatus', 'DataRetentionPolicy', 'ConsentRecord', 'Campaign', 'CampaignStatus', 'CampaignType', 'CampaignSegment', 'EmailTemplate', 'CampaignEmail', 'EmailStatus', 'EmailClick', 'DripSequence', 'DripSequenceStep', 'DripSequenceEnrollment', 'Unsubscribe'] \ No newline at end of file diff --git a/Backend/src/models/__pycache__/__init__.cpython-312.pyc b/Backend/src/models/__pycache__/__init__.cpython-312.pyc index 0f1848e2..597ac016 100644 Binary files a/Backend/src/models/__pycache__/__init__.cpython-312.pyc and b/Backend/src/models/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/booking.cpython-312.pyc b/Backend/src/models/__pycache__/booking.cpython-312.pyc index bb456b05..4ef81650 100644 Binary files a/Backend/src/models/__pycache__/booking.cpython-312.pyc and b/Backend/src/models/__pycache__/booking.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/email_campaign.cpython-312.pyc b/Backend/src/models/__pycache__/email_campaign.cpython-312.pyc new file mode 100644 index 00000000..0bf935ee Binary files /dev/null and b/Backend/src/models/__pycache__/email_campaign.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/gdpr_compliance.cpython-312.pyc b/Backend/src/models/__pycache__/gdpr_compliance.cpython-312.pyc new file mode 100644 index 00000000..4ea80806 Binary files /dev/null and b/Backend/src/models/__pycache__/gdpr_compliance.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/group_booking.cpython-312.pyc b/Backend/src/models/__pycache__/group_booking.cpython-312.pyc new file mode 100644 index 00000000..c54dfbee Binary files /dev/null and b/Backend/src/models/__pycache__/group_booking.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/housekeeping_task.cpython-312.pyc b/Backend/src/models/__pycache__/housekeeping_task.cpython-312.pyc new file mode 100644 index 00000000..07ceb05a Binary files /dev/null and b/Backend/src/models/__pycache__/housekeeping_task.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/notification.cpython-312.pyc b/Backend/src/models/__pycache__/notification.cpython-312.pyc new file mode 100644 index 00000000..81a63fea Binary files /dev/null and b/Backend/src/models/__pycache__/notification.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/package.cpython-312.pyc b/Backend/src/models/__pycache__/package.cpython-312.pyc new file mode 100644 index 00000000..066d8e78 Binary files /dev/null and b/Backend/src/models/__pycache__/package.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/payment.cpython-312.pyc b/Backend/src/models/__pycache__/payment.cpython-312.pyc index fddd5b55..f30adc21 100644 Binary files a/Backend/src/models/__pycache__/payment.cpython-312.pyc and b/Backend/src/models/__pycache__/payment.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/rate_plan.cpython-312.pyc b/Backend/src/models/__pycache__/rate_plan.cpython-312.pyc new file mode 100644 index 00000000..42d44f9c Binary files /dev/null and b/Backend/src/models/__pycache__/rate_plan.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/room.cpython-312.pyc b/Backend/src/models/__pycache__/room.cpython-312.pyc index bbcffa21..b69d4ed6 100644 Binary files a/Backend/src/models/__pycache__/room.cpython-312.pyc and b/Backend/src/models/__pycache__/room.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/room_attribute.cpython-312.pyc b/Backend/src/models/__pycache__/room_attribute.cpython-312.pyc new file mode 100644 index 00000000..3f77515c Binary files /dev/null and b/Backend/src/models/__pycache__/room_attribute.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/room_inspection.cpython-312.pyc b/Backend/src/models/__pycache__/room_inspection.cpython-312.pyc new file mode 100644 index 00000000..8e564bc9 Binary files /dev/null and b/Backend/src/models/__pycache__/room_inspection.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/room_maintenance.cpython-312.pyc b/Backend/src/models/__pycache__/room_maintenance.cpython-312.pyc new file mode 100644 index 00000000..16eda3bc Binary files /dev/null and b/Backend/src/models/__pycache__/room_maintenance.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/room_type.cpython-312.pyc b/Backend/src/models/__pycache__/room_type.cpython-312.pyc index 40fcfa38..2b07212d 100644 Binary files a/Backend/src/models/__pycache__/room_type.cpython-312.pyc and b/Backend/src/models/__pycache__/room_type.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/security_event.cpython-312.pyc b/Backend/src/models/__pycache__/security_event.cpython-312.pyc new file mode 100644 index 00000000..1881fb7f Binary files /dev/null and b/Backend/src/models/__pycache__/security_event.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/workflow.cpython-312.pyc b/Backend/src/models/__pycache__/workflow.cpython-312.pyc new file mode 100644 index 00000000..93bd0ee9 Binary files /dev/null and b/Backend/src/models/__pycache__/workflow.cpython-312.pyc differ diff --git a/Backend/src/models/booking.py b/Backend/src/models/booking.py index e36d1127..ba60fa6d 100644 --- a/Backend/src/models/booking.py +++ b/Backend/src/models/booking.py @@ -35,4 +35,8 @@ class Booking(Base): payments = relationship('Payment', back_populates='booking', cascade='all, delete-orphan') invoices = relationship('Invoice', back_populates='booking', cascade='all, delete-orphan') service_usages = relationship('ServiceUsage', back_populates='booking', cascade='all, delete-orphan') - checkin_checkout = relationship('CheckInCheckOut', back_populates='booking', uselist=False) \ No newline at end of file + checkin_checkout = relationship('CheckInCheckOut', back_populates='booking', uselist=False) + group_booking_id = Column(Integer, ForeignKey('group_bookings.id'), nullable=True) + group_booking = relationship('GroupBooking', back_populates='individual_bookings') + rate_plan_id = Column(Integer, ForeignKey('rate_plans.id'), nullable=True) + rate_plan = relationship('RatePlan', back_populates='bookings') \ No newline at end of file diff --git a/Backend/src/models/email_campaign.py b/Backend/src/models/email_campaign.py new file mode 100644 index 00000000..f0367ea6 --- /dev/null +++ b/Backend/src/models/email_campaign.py @@ -0,0 +1,285 @@ +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, JSON, Enum, Boolean, Numeric +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class CampaignStatus(str, enum.Enum): + draft = 'draft' + scheduled = 'scheduled' + sending = 'sending' + sent = 'sent' + paused = 'paused' + cancelled = 'cancelled' + +class CampaignType(str, enum.Enum): + newsletter = 'newsletter' + promotional = 'promotional' + transactional = 'transactional' + abandoned_booking = 'abandoned_booking' + welcome = 'welcome' + drip = 'drip' + custom = 'custom' + +class EmailStatus(str, enum.Enum): + pending = 'pending' + sent = 'sent' + delivered = 'delivered' + opened = 'opened' + clicked = 'clicked' + bounced = 'bounced' + failed = 'failed' + unsubscribed = 'unsubscribed' + +class Campaign(Base): + __tablename__ = 'email_campaigns' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(200), nullable=False, index=True) + subject = Column(String(500), nullable=False) + campaign_type = Column(Enum(CampaignType), nullable=False, default=CampaignType.newsletter) + status = Column(Enum(CampaignStatus), nullable=False, default=CampaignStatus.draft, index=True) + + # Content + html_content = Column(Text, nullable=True) + text_content = Column(Text, nullable=True) + template_id = Column(Integer, ForeignKey('email_templates.id'), nullable=True) + + # Scheduling + scheduled_at = Column(DateTime, nullable=True, index=True) + sent_at = Column(DateTime, nullable=True) + + # Segmentation + segment_id = Column(Integer, ForeignKey('campaign_segments.id'), nullable=True) + segment_criteria = Column(JSON, nullable=True) # Store segment criteria as JSON + + # A/B Testing + is_ab_test = Column(Boolean, nullable=False, default=False) + ab_test_variant_a_id = Column(Integer, ForeignKey('email_campaigns.id'), nullable=True) + ab_test_variant_b_id = Column(Integer, ForeignKey('email_campaigns.id'), nullable=True) + ab_test_split_percentage = Column(Integer, nullable=True, default=50) # Percentage for variant A + ab_test_winner = Column(String(1), nullable=True) # 'A' or 'B' + + # Drip Campaign + is_drip = Column(Boolean, nullable=False, default=False) + drip_sequence_id = Column(Integer, ForeignKey('drip_sequences.id'), nullable=True) + drip_delay_days = Column(Integer, nullable=True) # Days to wait before sending + + # Analytics + total_recipients = Column(Integer, nullable=False, default=0) + total_sent = Column(Integer, nullable=False, default=0) + total_delivered = Column(Integer, nullable=False, default=0) + total_opened = Column(Integer, nullable=False, default=0) + total_clicked = Column(Integer, nullable=False, default=0) + total_bounced = Column(Integer, nullable=False, default=0) + total_unsubscribed = Column(Integer, nullable=False, default=0) + + # Metrics (calculated) + open_rate = Column(Numeric(5, 2), nullable=True) # Percentage + click_rate = Column(Numeric(5, 2), nullable=True) # Percentage + bounce_rate = Column(Numeric(5, 2), nullable=True) # Percentage + + # Settings + from_name = Column(String(200), nullable=True) + from_email = Column(String(255), nullable=True) + reply_to_email = Column(String(255), nullable=True) + track_opens = Column(Boolean, nullable=False, default=True) + track_clicks = Column(Boolean, nullable=False, default=True) + + # Metadata + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + template = relationship('EmailTemplate', foreign_keys=[template_id]) + segment = relationship('CampaignSegment', foreign_keys=[segment_id]) + variant_a = relationship('Campaign', foreign_keys=[ab_test_variant_a_id], remote_side=[id]) + variant_b = relationship('Campaign', foreign_keys=[ab_test_variant_b_id], remote_side=[id]) + creator = relationship('User', foreign_keys=[created_by]) + emails = relationship('CampaignEmail', back_populates='campaign', cascade='all, delete-orphan') + drip_sequence = relationship('DripSequence', foreign_keys=[drip_sequence_id]) + +class CampaignSegment(Base): + __tablename__ = 'campaign_segments' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(200), nullable=False, index=True) + description = Column(Text, nullable=True) + + # Segment criteria (stored as JSON for flexibility) + criteria = Column(JSON, nullable=False) # e.g., {"role": "customer", "last_booking_days": 30} + + # Estimated count + estimated_count = Column(Integer, nullable=True) + last_calculated_at = Column(DateTime, nullable=True) + + is_active = Column(Boolean, nullable=False, default=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + creator = relationship('User', foreign_keys=[created_by]) + +class EmailTemplate(Base): + __tablename__ = 'email_templates' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(200), nullable=False, index=True) + subject = Column(String(500), nullable=False) + html_content = Column(Text, nullable=False) + text_content = Column(Text, nullable=True) + + # Template variables (e.g., {{name}}, {{booking_number}}) + variables = Column(JSON, nullable=True) # List of available variables + + # Category + category = Column(String(100), nullable=True, index=True) # 'newsletter', 'transactional', etc. + + is_active = Column(Boolean, nullable=False, default=True) + is_system = Column(Boolean, nullable=False, default=False) # System templates can't be deleted + + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + creator = relationship('User', foreign_keys=[created_by]) + +class CampaignEmail(Base): + __tablename__ = 'campaign_emails' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + campaign_id = Column(Integer, ForeignKey('email_campaigns.id'), nullable=False, index=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=True, index=True) + email = Column(String(255), nullable=False, index=True) + + # Status tracking + status = Column(Enum(EmailStatus), nullable=False, default=EmailStatus.pending, index=True) + sent_at = Column(DateTime, nullable=True) + delivered_at = Column(DateTime, nullable=True) + opened_at = Column(DateTime, nullable=True) + clicked_at = Column(DateTime, nullable=True) + bounced_at = Column(DateTime, nullable=True) + unsubscribed_at = Column(DateTime, nullable=True) + + # Tracking + open_count = Column(Integer, nullable=False, default=0) + click_count = Column(Integer, nullable=False, default=0) + last_opened_at = Column(DateTime, nullable=True) + last_clicked_at = Column(DateTime, nullable=True) + + # A/B Test tracking + ab_test_variant = Column(String(1), nullable=True) # 'A' or 'B' + + # Error tracking + error_message = Column(Text, nullable=True) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + + # Relationships + campaign = relationship('Campaign', back_populates='emails') + user = relationship('User') + +class EmailClick(Base): + __tablename__ = 'email_clicks' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + campaign_email_id = Column(Integer, ForeignKey('campaign_emails.id'), nullable=False, index=True) + url = Column(String(1000), nullable=False) + clicked_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + ip_address = Column(String(45), nullable=True) + user_agent = Column(String(500), nullable=True) + + # Relationships + campaign_email = relationship('CampaignEmail') + +class DripSequence(Base): + __tablename__ = 'drip_sequences' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(200), nullable=False, index=True) + description = Column(Text, nullable=True) + trigger_event = Column(String(100), nullable=True) # 'booking_created', 'checkout_abandoned', etc. + + is_active = Column(Boolean, nullable=False, default=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + creator = relationship('User', foreign_keys=[created_by]) + steps = relationship('DripSequenceStep', back_populates='sequence', cascade='all, delete-orphan', order_by='DripSequenceStep.step_order') + campaigns = relationship('Campaign', foreign_keys=[Campaign.drip_sequence_id], overlaps="drip_sequence") + +class DripSequenceStep(Base): + __tablename__ = 'drip_sequence_steps' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + sequence_id = Column(Integer, ForeignKey('drip_sequences.id'), nullable=False, index=True) + step_order = Column(Integer, nullable=False) # Order in the sequence + + # Email content + subject = Column(String(500), nullable=False) + html_content = Column(Text, nullable=False) + text_content = Column(Text, nullable=True) + template_id = Column(Integer, ForeignKey('email_templates.id'), nullable=True) + + # Timing + delay_days = Column(Integer, nullable=False, default=0) # Days to wait after previous step + delay_hours = Column(Integer, nullable=False, default=0) # Additional hours + + # Conditions (optional - skip this step if conditions not met) + conditions = Column(JSON, nullable=True) + + is_active = Column(Boolean, nullable=False, default=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + sequence = relationship('DripSequence', back_populates='steps') + template = relationship('EmailTemplate', foreign_keys=[template_id]) + +class DripSequenceEnrollment(Base): + __tablename__ = 'drip_sequence_enrollments' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + sequence_id = Column(Integer, ForeignKey('drip_sequences.id'), nullable=False, index=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True) + + # Progress tracking + current_step = Column(Integer, nullable=False, default=0) + next_send_at = Column(DateTime, nullable=True, index=True) + completed = Column(Boolean, nullable=False, default=False) + completed_at = Column(DateTime, nullable=True) + + # Trigger context + trigger_data = Column(JSON, nullable=True) # Store context about what triggered enrollment + + enrolled_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + sequence = relationship('DripSequence') + user = relationship('User') + +class Unsubscribe(Base): + __tablename__ = 'email_unsubscribes' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + email = Column(String(255), nullable=False, index=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=True, index=True) + campaign_id = Column(Integer, ForeignKey('email_campaigns.id'), nullable=True) + + # Unsubscribe type + unsubscribe_all = Column(Boolean, nullable=False, default=False) # True = all emails, False = specific campaign + unsubscribe_type = Column(String(50), nullable=True) # 'newsletter', 'promotional', etc. + + reason = Column(Text, nullable=True) + unsubscribed_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + + # Relationships + user = relationship('User') + campaign = relationship('Campaign') + diff --git a/Backend/src/models/gdpr_compliance.py b/Backend/src/models/gdpr_compliance.py new file mode 100644 index 00000000..ccac1314 --- /dev/null +++ b/Backend/src/models/gdpr_compliance.py @@ -0,0 +1,87 @@ +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, JSON, Enum, Boolean +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class DataSubjectRequestType(str, enum.Enum): + access = 'access' # Right to access + rectification = 'rectification' # Right to rectification + erasure = 'erasure' # Right to erasure (right to be forgotten) + portability = 'portability' # Right to data portability + restriction = 'restriction' # Right to restriction of processing + objection = 'objection' # Right to object + +class DataSubjectRequestStatus(str, enum.Enum): + pending = 'pending' + in_progress = 'in_progress' + completed = 'completed' + rejected = 'rejected' + cancelled = 'cancelled' + +class DataSubjectRequest(Base): + __tablename__ = 'data_subject_requests' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=True, index=True) + email = Column(String(255), nullable=False, index=True) + request_type = Column(Enum(DataSubjectRequestType), nullable=False, index=True) + status = Column(Enum(DataSubjectRequestStatus), nullable=False, default=DataSubjectRequestStatus.pending, index=True) + + # Request details + description = Column(Text, nullable=True) + verification_token = Column(String(100), nullable=True, unique=True, index=True) + verified = Column(Boolean, nullable=False, default=False) + verified_at = Column(DateTime, nullable=True) + + # Processing + assigned_to = Column(Integer, ForeignKey('users.id'), nullable=True) + notes = Column(Text, nullable=True) + response_data = Column(JSON, nullable=True) # For access requests, store the data + + # Completion + completed_at = Column(DateTime, nullable=True) + completed_by = Column(Integer, ForeignKey('users.id'), nullable=True) + + # Metadata + ip_address = Column(String(45), nullable=True) + user_agent = Column(String(500), nullable=True) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + user = Column(Integer, ForeignKey('users.id'), nullable=True) + assignee = relationship('User', foreign_keys=[assigned_to]) + completer = relationship('User', foreign_keys=[completed_by]) + +class DataRetentionPolicy(Base): + __tablename__ = 'data_retention_policies' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + data_type = Column(String(100), nullable=False, unique=True) # e.g., 'user_data', 'booking_data', 'payment_data' + retention_days = Column(Integer, nullable=False) # Days to retain data + auto_delete = Column(Boolean, nullable=False, default=False) + description = Column(Text, nullable=True) + is_active = Column(Boolean, nullable=False, default=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + +class ConsentRecord(Base): + __tablename__ = 'consent_records' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True) + consent_type = Column(String(100), nullable=False, index=True) # 'marketing', 'analytics', 'cookies', etc. + granted = Column(Boolean, nullable=False, default=False) + granted_at = Column(DateTime, nullable=True) + revoked_at = Column(DateTime, nullable=True) + ip_address = Column(String(45), nullable=True) + user_agent = Column(String(500), nullable=True) + version = Column(String(50), nullable=True) # Policy version when consent was given + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + user = relationship('User') + diff --git a/Backend/src/models/group_booking.py b/Backend/src/models/group_booking.py new file mode 100644 index 00000000..fd8fa6e0 --- /dev/null +++ b/Backend/src/models/group_booking.py @@ -0,0 +1,183 @@ +from sqlalchemy import Column, Integer, String, DateTime, Numeric, Boolean, Text, Enum, ForeignKey, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class GroupBookingStatus(str, enum.Enum): + draft = 'draft' + pending = 'pending' + confirmed = 'confirmed' + partially_confirmed = 'partially_confirmed' + checked_in = 'checked_in' + checked_out = 'checked_out' + cancelled = 'cancelled' + +class PaymentOption(str, enum.Enum): + coordinator_pays_all = 'coordinator_pays_all' + individual_payments = 'individual_payments' + split_payment = 'split_payment' + +class GroupBooking(Base): + __tablename__ = 'group_bookings' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + group_booking_number = Column(String(50), unique=True, nullable=False, index=True) + + # Coordinator information + coordinator_id = Column(Integer, ForeignKey('users.id'), nullable=False) + coordinator_name = Column(String(100), nullable=False) + coordinator_email = Column(String(100), nullable=False) + coordinator_phone = Column(String(20), nullable=True) + + # Group details + group_name = Column(String(200), nullable=True) + group_type = Column(String(50), nullable=True) # corporate, wedding, conference, etc. + total_rooms = Column(Integer, nullable=False, default=0) + total_guests = Column(Integer, nullable=False, default=0) + + # Dates + check_in_date = Column(DateTime, nullable=False) + check_out_date = Column(DateTime, nullable=False) + + # Pricing + base_rate_per_room = Column(Numeric(10, 2), nullable=False) + group_discount_percentage = Column(Numeric(5, 2), nullable=True, default=0) + group_discount_amount = Column(Numeric(10, 2), nullable=True, default=0) + original_total_price = Column(Numeric(10, 2), nullable=False) + discount_amount = Column(Numeric(10, 2), nullable=True, default=0) + total_price = Column(Numeric(10, 2), nullable=False) + + # Payment + payment_option = Column(Enum(PaymentOption), nullable=False, default=PaymentOption.coordinator_pays_all) + deposit_required = Column(Boolean, nullable=False, default=False) + deposit_percentage = Column(Integer, nullable=True) + deposit_amount = Column(Numeric(10, 2), nullable=True) + amount_paid = Column(Numeric(10, 2), nullable=False, default=0) + balance_due = Column(Numeric(10, 2), nullable=False) + + # Status and policies + status = Column(Enum(GroupBookingStatus), nullable=False, default=GroupBookingStatus.draft) + cancellation_policy = Column(Text, nullable=True) + cancellation_deadline = Column(DateTime, nullable=True) + cancellation_penalty_percentage = Column(Numeric(5, 2), nullable=True, default=0) + + # Additional information + special_requests = Column(Text, nullable=True) + notes = Column(Text, nullable=True) + contract_terms = Column(Text, nullable=True) + + # Metadata + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + confirmed_at = Column(DateTime, nullable=True) + cancelled_at = Column(DateTime, nullable=True) + + # Relationships + coordinator = relationship('User', foreign_keys=[coordinator_id]) + room_blocks = relationship('GroupRoomBlock', back_populates='group_booking', cascade='all, delete-orphan') + members = relationship('GroupBookingMember', back_populates='group_booking', cascade='all, delete-orphan') + individual_bookings = relationship('Booking', back_populates='group_booking', cascade='all, delete-orphan') + payments = relationship('GroupPayment', back_populates='group_booking', cascade='all, delete-orphan') + +class GroupRoomBlock(Base): + __tablename__ = 'group_room_blocks' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + group_booking_id = Column(Integer, ForeignKey('group_bookings.id'), nullable=False) + room_type_id = Column(Integer, ForeignKey('room_types.id'), nullable=False) + + # Blocking details + rooms_blocked = Column(Integer, nullable=False, default=0) + rooms_confirmed = Column(Integer, nullable=False, default=0) + rooms_available = Column(Integer, nullable=False, default=0) + + # Pricing + rate_per_room = Column(Numeric(10, 2), nullable=False) + total_block_price = Column(Numeric(10, 2), nullable=False) + + # Status + is_active = Column(Boolean, nullable=False, default=True) + block_released_at = Column(DateTime, nullable=True) + + # Metadata + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + group_booking = relationship('GroupBooking', back_populates='room_blocks') + room_type = relationship('RoomType') + +class GroupBookingMember(Base): + __tablename__ = 'group_booking_members' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + group_booking_id = Column(Integer, ForeignKey('group_bookings.id'), nullable=False) + + # Guest information + full_name = Column(String(100), nullable=False) + email = Column(String(100), nullable=True) + phone = Column(String(20), nullable=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=True) # If member is a registered user + + # Room assignment + room_block_id = Column(Integer, ForeignKey('group_room_blocks.id'), nullable=True) + assigned_room_id = Column(Integer, ForeignKey('rooms.id'), nullable=True) + individual_booking_id = Column(Integer, ForeignKey('bookings.id'), nullable=True) + + # Guest preferences + special_requests = Column(Text, nullable=True) + preferences = Column(JSON, nullable=True) # Store preferences as JSON + + # Payment (if individual payment option) + individual_amount = Column(Numeric(10, 2), nullable=True) + individual_paid = Column(Numeric(10, 2), nullable=True, default=0) + individual_balance = Column(Numeric(10, 2), nullable=True, default=0) + + # Status + is_checked_in = Column(Boolean, nullable=False, default=False) + checked_in_at = Column(DateTime, nullable=True) + is_checked_out = Column(Boolean, nullable=False, default=False) + checked_out_at = Column(DateTime, nullable=True) + + # Metadata + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + group_booking = relationship('GroupBooking', back_populates='members') + user = relationship('User', foreign_keys=[user_id]) + room_block = relationship('GroupRoomBlock') + assigned_room = relationship('Room', foreign_keys=[assigned_room_id]) + individual_booking = relationship('Booking', foreign_keys=[individual_booking_id]) + +class GroupPayment(Base): + __tablename__ = 'group_payments' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + group_booking_id = Column(Integer, ForeignKey('group_bookings.id'), nullable=False) + + # Payment details + amount = Column(Numeric(10, 2), nullable=False) + payment_method = Column(String(50), nullable=False) # Using same PaymentMethod enum values + payment_type = Column(String(50), nullable=False, default='deposit') # deposit, full, remaining + payment_status = Column(String(50), nullable=False, default='pending') # pending, completed, failed, refunded + + # Transaction details + transaction_id = Column(String(100), nullable=True) + payment_date = Column(DateTime, nullable=True) + notes = Column(Text, nullable=True) + + # Payer information (if individual payment) + paid_by_member_id = Column(Integer, ForeignKey('group_booking_members.id'), nullable=True) + paid_by_user_id = Column(Integer, ForeignKey('users.id'), nullable=True) + + # Metadata + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + group_booking = relationship('GroupBooking', back_populates='payments') + paid_by_member = relationship('GroupBookingMember', foreign_keys=[paid_by_member_id]) + paid_by_user = relationship('User', foreign_keys=[paid_by_user_id]) + diff --git a/Backend/src/models/housekeeping_task.py b/Backend/src/models/housekeeping_task.py new file mode 100644 index 00000000..5b823e25 --- /dev/null +++ b/Backend/src/models/housekeeping_task.py @@ -0,0 +1,64 @@ +from sqlalchemy import Column, Integer, String, Text, Enum, ForeignKey, DateTime, Boolean, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class HousekeepingStatus(str, enum.Enum): + pending = 'pending' + in_progress = 'in_progress' + completed = 'completed' + skipped = 'skipped' + cancelled = 'cancelled' + +class HousekeepingType(str, enum.Enum): + checkout = 'checkout' # Deep cleaning after checkout + stayover = 'stayover' # Daily cleaning for occupied rooms + vacant = 'vacant' # Cleaning for vacant rooms + inspection = 'inspection' # Pre-check-in inspection + turndown = 'turndown' # Evening turndown service + +class HousekeepingTask(Base): + __tablename__ = 'housekeeping_tasks' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + room_id = Column(Integer, ForeignKey('rooms.id'), nullable=False, index=True) + booking_id = Column(Integer, ForeignKey('bookings.id'), nullable=True, index=True) + + task_type = Column(Enum(HousekeepingType), nullable=False) + status = Column(Enum(HousekeepingStatus), nullable=False, default=HousekeepingStatus.pending) + + # Scheduling + scheduled_time = Column(DateTime, nullable=False, index=True) + started_at = Column(DateTime, nullable=True) + completed_at = Column(DateTime, nullable=True) + + # Assignment + assigned_to = Column(Integer, ForeignKey('users.id'), nullable=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + + # Task details + checklist_items = Column(JSON, nullable=True) # Array of {item: string, completed: bool, notes: string} + notes = Column(Text, nullable=True) + issues_found = Column(Text, nullable=True) + + # Quality control + inspected_by = Column(Integer, ForeignKey('users.id'), nullable=True) + inspected_at = Column(DateTime, nullable=True) + inspection_notes = Column(Text, nullable=True) + quality_score = Column(Integer, nullable=True) # 1-5 rating + + # Duration tracking + estimated_duration_minutes = Column(Integer, nullable=True) + actual_duration_minutes = Column(Integer, nullable=True) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + room = relationship('Room', back_populates='housekeeping_tasks') + booking = relationship('Booking') + assigned_staff = relationship('User', foreign_keys=[assigned_to]) + creator = relationship('User', foreign_keys=[created_by]) + inspector = relationship('User', foreign_keys=[inspected_by]) + diff --git a/Backend/src/models/notification.py b/Backend/src/models/notification.py new file mode 100644 index 00000000..81c5854d --- /dev/null +++ b/Backend/src/models/notification.py @@ -0,0 +1,125 @@ +from sqlalchemy import Column, Integer, String, DateTime, Boolean, Text, Enum, ForeignKey, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class NotificationChannel(str, enum.Enum): + email = 'email' + sms = 'sms' + push = 'push' + whatsapp = 'whatsapp' + in_app = 'in_app' + +class NotificationStatus(str, enum.Enum): + pending = 'pending' + sent = 'sent' + delivered = 'delivered' + failed = 'failed' + read = 'read' + +class NotificationType(str, enum.Enum): + booking_confirmation = 'booking_confirmation' + payment_receipt = 'payment_receipt' + pre_arrival_reminder = 'pre_arrival_reminder' + check_in_reminder = 'check_in_reminder' + check_out_reminder = 'check_out_reminder' + marketing_campaign = 'marketing_campaign' + loyalty_update = 'loyalty_update' + system_alert = 'system_alert' + custom = 'custom' + +class Notification(Base): + __tablename__ = 'notifications' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=True) # Nullable for system-wide notifications + notification_type = Column(Enum(NotificationType), nullable=False) + channel = Column(Enum(NotificationChannel), nullable=False) + subject = Column(String(255), nullable=True) # For email/push + content = Column(Text, nullable=False) + template_id = Column(Integer, ForeignKey('notification_templates.id'), nullable=True) + status = Column(Enum(NotificationStatus), nullable=False, default=NotificationStatus.pending) + priority = Column(String(20), nullable=False, default='normal') # low, normal, high, urgent + scheduled_at = Column(DateTime, nullable=True) # For scheduled notifications + sent_at = Column(DateTime, nullable=True) + delivered_at = Column(DateTime, nullable=True) + read_at = Column(DateTime, nullable=True) + error_message = Column(Text, nullable=True) + external_id = Column(String(255), nullable=True) # ID from external service (e.g., Twilio, SendGrid) + meta_data = Column(JSON, nullable=True) # Additional data (recipient info, attachments, etc.) + booking_id = Column(Integer, ForeignKey('bookings.id'), nullable=True) + payment_id = Column(Integer, ForeignKey('payments.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + user = relationship('User') + template = relationship('NotificationTemplate') + booking = relationship('Booking') + payment = relationship('Payment') + delivery_logs = relationship('NotificationDeliveryLog', back_populates='notification', cascade='all, delete-orphan') + +class NotificationTemplate(Base): + __tablename__ = 'notification_templates' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(255), nullable=False) + notification_type = Column(Enum(NotificationType), nullable=False) + channel = Column(Enum(NotificationChannel), nullable=False) + subject = Column(String(255), nullable=True) + content = Column(Text, nullable=False) + variables = Column(JSON, nullable=True) # Available template variables + is_active = Column(Boolean, nullable=False, default=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=False) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + creator = relationship('User', foreign_keys=[created_by]) + notifications = relationship('Notification', back_populates='template') + +class NotificationPreference(Base): + __tablename__ = 'notification_preferences' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=False, unique=True) + email_enabled = Column(Boolean, nullable=False, default=True) + sms_enabled = Column(Boolean, nullable=False, default=True) + push_enabled = Column(Boolean, nullable=False, default=True) + whatsapp_enabled = Column(Boolean, nullable=False, default=False) + in_app_enabled = Column(Boolean, nullable=False, default=True) + # Per-type preferences + booking_confirmation_email = Column(Boolean, nullable=False, default=True) + booking_confirmation_sms = Column(Boolean, nullable=False, default=False) + payment_receipt_email = Column(Boolean, nullable=False, default=True) + payment_receipt_sms = Column(Boolean, nullable=False, default=False) + pre_arrival_reminder_email = Column(Boolean, nullable=False, default=True) + pre_arrival_reminder_sms = Column(Boolean, nullable=False, default=True) + check_in_reminder_email = Column(Boolean, nullable=False, default=True) + check_in_reminder_sms = Column(Boolean, nullable=False, default=True) + check_out_reminder_email = Column(Boolean, nullable=False, default=True) + check_out_reminder_sms = Column(Boolean, nullable=False, default=True) + marketing_campaign_email = Column(Boolean, nullable=False, default=True) + marketing_campaign_sms = Column(Boolean, nullable=False, default=False) + loyalty_update_email = Column(Boolean, nullable=False, default=True) + loyalty_update_sms = Column(Boolean, nullable=False, default=False) + system_alert_email = Column(Boolean, nullable=False, default=True) + system_alert_push = Column(Boolean, nullable=False, default=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + user = relationship('User') + +class NotificationDeliveryLog(Base): + __tablename__ = 'notification_delivery_logs' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + notification_id = Column(Integer, ForeignKey('notifications.id'), nullable=False) + channel = Column(Enum(NotificationChannel), nullable=False) + status = Column(Enum(NotificationStatus), nullable=False) + external_id = Column(String(255), nullable=True) + error_message = Column(Text, nullable=True) + response_data = Column(JSON, nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + + notification = relationship('Notification', back_populates='delivery_logs') + diff --git a/Backend/src/models/package.py b/Backend/src/models/package.py new file mode 100644 index 00000000..ebe7a638 --- /dev/null +++ b/Backend/src/models/package.py @@ -0,0 +1,90 @@ +from sqlalchemy import Column, Integer, String, Numeric, Boolean, Text, JSON, Enum, ForeignKey, DateTime, Date +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class PackageStatus(str, enum.Enum): + active = 'active' + inactive = 'inactive' + scheduled = 'scheduled' + expired = 'expired' + +class PackageItemType(str, enum.Enum): + room = 'room' + service = 'service' + breakfast = 'breakfast' + activity = 'activity' + amenity = 'amenity' + discount = 'discount' + +class Package(Base): + __tablename__ = 'packages' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(100), nullable=False, index=True) + code = Column(String(50), unique=True, nullable=False, index=True) + description = Column(Text, nullable=True) + status = Column(Enum(PackageStatus), nullable=False, default=PackageStatus.active) + + # Pricing + base_price = Column(Numeric(10, 2), nullable=True) # Fixed package price (if set, overrides item prices) + price_modifier = Column(Numeric(5, 2), nullable=False, default=1.0) # Multiplier for total package price + discount_percentage = Column(Numeric(5, 2), nullable=True, default=0) + + # Applicability + room_type_id = Column(Integer, ForeignKey('room_types.id'), nullable=True) # None = all room types + min_nights = Column(Integer, nullable=True) + max_nights = Column(Integer, nullable=True) + + # Date range + valid_from = Column(Date, nullable=True) + valid_to = Column(Date, nullable=True) + + # Package details + image_url = Column(String(500), nullable=True) + highlights = Column(JSON, nullable=True) # Array of highlight strings + terms_conditions = Column(Text, nullable=True) + + # Additional data (metadata is reserved by SQLAlchemy) + extra_data = Column(JSON, nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + room_type = relationship('RoomType', back_populates='packages') + items = relationship('PackageItem', back_populates='package', cascade='all, delete-orphan') + rate_plans = relationship('RatePlan', back_populates='package') + +class PackageItem(Base): + __tablename__ = 'package_items' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + package_id = Column(Integer, ForeignKey('packages.id'), nullable=False, index=True) + + # Item details + item_type = Column(Enum(PackageItemType), nullable=False) + item_id = Column(Integer, nullable=True) # ID of room_type, service, etc. + item_name = Column(String(200), nullable=False) # Name for display + item_description = Column(Text, nullable=True) + + # Quantity + quantity = Column(Integer, nullable=False, default=1) + unit = Column(String(50), nullable=True) # 'per_night', 'per_stay', 'per_person', etc. + + # Pricing + price = Column(Numeric(10, 2), nullable=True) # Item price (if set) + included = Column(Boolean, nullable=False, default=True) # If True, included in package price + price_modifier = Column(Numeric(5, 2), nullable=True, default=1.0) # Price multiplier for this item + + # Display order + display_order = Column(Integer, nullable=False, default=0) + + # Additional data + extra_data = Column(JSON, nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + package = relationship('Package', back_populates='items') + diff --git a/Backend/src/models/payment.py b/Backend/src/models/payment.py index f057b30a..35cd26b5 100644 --- a/Backend/src/models/payment.py +++ b/Backend/src/models/payment.py @@ -12,6 +12,7 @@ class PaymentMethod(str, enum.Enum): e_wallet = 'e_wallet' stripe = 'stripe' paypal = 'paypal' + borica = 'borica' class PaymentType(str, enum.Enum): full = 'full' diff --git a/Backend/src/models/rate_plan.py b/Backend/src/models/rate_plan.py new file mode 100644 index 00000000..5cfd95fa --- /dev/null +++ b/Backend/src/models/rate_plan.py @@ -0,0 +1,107 @@ +from sqlalchemy import Column, Integer, String, Numeric, Boolean, Text, JSON, Enum, ForeignKey, DateTime, Date +from sqlalchemy.orm import relationship +from datetime import datetime, date +import enum +from ..config.database import Base + +class RatePlanType(str, enum.Enum): + BAR = 'BAR' # Best Available Rate + non_refundable = 'non_refundable' + advance_purchase = 'advance_purchase' + corporate = 'corporate' + government = 'government' + military = 'military' + long_stay = 'long_stay' + package = 'package' + +class RatePlanStatus(str, enum.Enum): + active = 'active' + inactive = 'inactive' + scheduled = 'scheduled' + expired = 'expired' + +class RatePlan(Base): + __tablename__ = 'rate_plans' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(100), nullable=False, index=True) + code = Column(String(50), unique=True, nullable=False, index=True) + description = Column(Text, nullable=True) + plan_type = Column(Enum(RatePlanType), nullable=False, default=RatePlanType.BAR) + status = Column(Enum(RatePlanStatus), nullable=False, default=RatePlanStatus.active) + + # Pricing + base_price_modifier = Column(Numeric(5, 2), nullable=False, default=1.0) # Multiplier (1.0 = 100%, 0.9 = 90%) + discount_percentage = Column(Numeric(5, 2), nullable=True, default=0) # Percentage discount + fixed_discount = Column(Numeric(10, 2), nullable=True, default=0) # Fixed amount discount + + # Applicability + room_type_id = Column(Integer, ForeignKey('room_types.id'), nullable=True) # None = all room types + min_nights = Column(Integer, nullable=True) # Minimum nights required + max_nights = Column(Integer, nullable=True) # Maximum nights allowed + advance_days_required = Column(Integer, nullable=True) # Days in advance required for booking + + # Date range + valid_from = Column(Date, nullable=True) + valid_to = Column(Date, nullable=True) + + # Restrictions + is_refundable = Column(Boolean, nullable=False, default=True) + requires_deposit = Column(Boolean, nullable=False, default=False) + deposit_percentage = Column(Numeric(5, 2), nullable=True, default=0) + cancellation_hours = Column(Integer, nullable=True) # Hours before check-in for free cancellation + + # Corporate/Government specific + corporate_code = Column(String(50), nullable=True, index=True) + requires_verification = Column(Boolean, nullable=False, default=False) + verification_type = Column(String(50), nullable=True) # 'corporate_id', 'government_id', 'military_id' + + # Long-stay specific + long_stay_nights = Column(Integer, nullable=True) # Nights required for long-stay discount + + # Package specific + is_package = Column(Boolean, nullable=False, default=False) + package_id = Column(Integer, ForeignKey('packages.id'), nullable=True) + + # Priority (lower number = higher priority) + priority = Column(Integer, nullable=False, default=100) + + # Additional data (metadata is reserved by SQLAlchemy) + extra_data = Column(JSON, nullable=True) # Additional flexible data + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + room_type = relationship('RoomType', back_populates='rate_plans') + package = relationship('Package', back_populates='rate_plans') + rules = relationship('RatePlanRule', back_populates='rate_plan', cascade='all, delete-orphan') + bookings = relationship('Booking', back_populates='rate_plan') + +class RatePlanRule(Base): + __tablename__ = 'rate_plan_rules' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + rate_plan_id = Column(Integer, ForeignKey('rate_plans.id'), nullable=False, index=True) + + # Rule type + rule_type = Column(String(50), nullable=False) # 'day_of_week', 'season', 'occupancy', 'date_range', etc. + rule_key = Column(String(100), nullable=False) # e.g., 'monday', 'summer', '2_guests', '2024-12-01_to_2024-12-31' + rule_value = Column(JSON, nullable=True) # Flexible value storage + + # Price adjustment + price_modifier = Column(Numeric(5, 2), nullable=True, default=1.0) + discount_percentage = Column(Numeric(5, 2), nullable=True, default=0) + fixed_adjustment = Column(Numeric(10, 2), nullable=True, default=0) + + # Priority within the rate plan + priority = Column(Integer, nullable=False, default=100) + + # Additional data + extra_data = Column(JSON, nullable=True) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + rate_plan = relationship('RatePlan', back_populates='rules') + diff --git a/Backend/src/models/room.py b/Backend/src/models/room.py index 4bc4e0e4..1497841e 100644 --- a/Backend/src/models/room.py +++ b/Backend/src/models/room.py @@ -30,4 +30,8 @@ class Room(Base): room_type = relationship('RoomType', back_populates='rooms') bookings = relationship('Booking', back_populates='room') reviews = relationship('Review', back_populates='room') - favorites = relationship('Favorite', back_populates='room', cascade='all, delete-orphan') \ No newline at end of file + favorites = relationship('Favorite', back_populates='room', cascade='all, delete-orphan') + maintenance_records = relationship('RoomMaintenance', back_populates='room', cascade='all, delete-orphan') + housekeeping_tasks = relationship('HousekeepingTask', back_populates='room', cascade='all, delete-orphan') + inspections = relationship('RoomInspection', back_populates='room', cascade='all, delete-orphan') + attributes = relationship('RoomAttribute', back_populates='room', cascade='all, delete-orphan') \ No newline at end of file diff --git a/Backend/src/models/room_attribute.py b/Backend/src/models/room_attribute.py new file mode 100644 index 00000000..9a67acd2 --- /dev/null +++ b/Backend/src/models/room_attribute.py @@ -0,0 +1,30 @@ +from sqlalchemy import Column, Integer, String, Text, ForeignKey, DateTime, Boolean, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +from ..config.database import Base + +class RoomAttribute(Base): + __tablename__ = 'room_attributes' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + room_id = Column(Integer, ForeignKey('rooms.id'), nullable=False, index=True) + + # Attribute details + attribute_name = Column(String(100), nullable=False) # e.g., 'view_quality', 'noise_level', 'accessibility' + attribute_value = Column(String(255), nullable=True) # e.g., 'ocean_view', 'quiet', 'wheelchair_accessible' + attribute_data = Column(JSON, nullable=True) # Additional structured data + + # Tracking + last_updated = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + updated_by = Column(Integer, ForeignKey('users.id'), nullable=True) + notes = Column(Text, nullable=True) + + # Status + is_active = Column(Boolean, nullable=False, default=True) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + + # Relationships + room = relationship('Room', back_populates='attributes') + updater = relationship('User') + diff --git a/Backend/src/models/room_inspection.py b/Backend/src/models/room_inspection.py new file mode 100644 index 00000000..734bdb2f --- /dev/null +++ b/Backend/src/models/room_inspection.py @@ -0,0 +1,68 @@ +from sqlalchemy import Column, Integer, String, Text, Enum, ForeignKey, DateTime, Boolean, JSON, Numeric +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class InspectionType(str, enum.Enum): + pre_checkin = 'pre_checkin' + post_checkout = 'post_checkout' + routine = 'routine' + maintenance = 'maintenance' + damage = 'damage' + +class InspectionStatus(str, enum.Enum): + pending = 'pending' + in_progress = 'in_progress' + completed = 'completed' + failed = 'failed' + cancelled = 'cancelled' + +class RoomInspection(Base): + __tablename__ = 'room_inspections' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + room_id = Column(Integer, ForeignKey('rooms.id'), nullable=False, index=True) + booking_id = Column(Integer, ForeignKey('bookings.id'), nullable=True, index=True) + + inspection_type = Column(Enum(InspectionType), nullable=False) + status = Column(Enum(InspectionStatus), nullable=False, default=InspectionStatus.pending) + + # Scheduling + scheduled_at = Column(DateTime, nullable=False, index=True) + started_at = Column(DateTime, nullable=True) + completed_at = Column(DateTime, nullable=True) + + # Assignment + inspected_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + + # Checklist + checklist_template_id = Column(Integer, nullable=True) # Reference to checklist template + checklist_items = Column(JSON, nullable=False) # Array of {category: string, item: string, status: string, notes: string, photos: string[]} + # status can be: 'pass', 'fail', 'needs_attention', 'not_applicable' + + # Overall assessment + overall_score = Column(Numeric(3, 2), nullable=True) # 0-5 rating + overall_notes = Column(Text, nullable=True) + issues_found = Column(JSON, nullable=True) # Array of {severity: string, description: string, photo: string} + # severity can be: 'critical', 'major', 'minor', 'cosmetic' + + # Photos + photos = Column(JSON, nullable=True) # Array of photo URLs + + # Follow-up + requires_followup = Column(Boolean, nullable=False, default=False) + followup_notes = Column(Text, nullable=True) + maintenance_request_id = Column(Integer, ForeignKey('room_maintenance.id'), nullable=True) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + room = relationship('Room', back_populates='inspections') + booking = relationship('Booking') + inspector = relationship('User', foreign_keys=[inspected_by]) + creator = relationship('User', foreign_keys=[created_by]) + maintenance_request = relationship('RoomMaintenance', foreign_keys=[maintenance_request_id]) + diff --git a/Backend/src/models/room_maintenance.py b/Backend/src/models/room_maintenance.py new file mode 100644 index 00000000..94de36be --- /dev/null +++ b/Backend/src/models/room_maintenance.py @@ -0,0 +1,62 @@ +from sqlalchemy import Column, Integer, String, Text, Enum, ForeignKey, DateTime, Boolean, Numeric +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class MaintenanceType(str, enum.Enum): + preventive = 'preventive' + corrective = 'corrective' + emergency = 'emergency' + upgrade = 'upgrade' + inspection = 'inspection' + +class MaintenanceStatus(str, enum.Enum): + scheduled = 'scheduled' + in_progress = 'in_progress' + completed = 'completed' + cancelled = 'cancelled' + on_hold = 'on_hold' + +class RoomMaintenance(Base): + __tablename__ = 'room_maintenance' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + room_id = Column(Integer, ForeignKey('rooms.id'), nullable=False, index=True) + maintenance_type = Column(Enum(MaintenanceType), nullable=False) + status = Column(Enum(MaintenanceStatus), nullable=False, default=MaintenanceStatus.scheduled) + title = Column(String(255), nullable=False) + description = Column(Text, nullable=True) + + # Scheduling + scheduled_start = Column(DateTime, nullable=False) + scheduled_end = Column(DateTime, nullable=True) + actual_start = Column(DateTime, nullable=True) + actual_end = Column(DateTime, nullable=True) + + # Assignment + assigned_to = Column(Integer, ForeignKey('users.id'), nullable=True) + reported_by = Column(Integer, ForeignKey('users.id'), nullable=True) + + # Cost tracking + estimated_cost = Column(Numeric(10, 2), nullable=True) + actual_cost = Column(Numeric(10, 2), nullable=True) + + # Room blocking + blocks_room = Column(Boolean, nullable=False, default=True) + block_start = Column(DateTime, nullable=True) + block_end = Column(DateTime, nullable=True) + + # Additional info + priority = Column(String(20), nullable=False, default='medium') # low, medium, high, urgent + notes = Column(Text, nullable=True) + completion_notes = Column(Text, nullable=True) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + room = relationship('Room', back_populates='maintenance_records') + assigned_staff = relationship('User', foreign_keys=[assigned_to]) + reporter = relationship('User', foreign_keys=[reported_by]) + diff --git a/Backend/src/models/room_type.py b/Backend/src/models/room_type.py index a9efa2b7..ea77b20a 100644 --- a/Backend/src/models/room_type.py +++ b/Backend/src/models/room_type.py @@ -13,4 +13,6 @@ class RoomType(Base): amenities = Column(JSON, nullable=True) created_at = Column(DateTime, default=datetime.utcnow, nullable=False) updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) - rooms = relationship('Room', back_populates='room_type') \ No newline at end of file + rooms = relationship('Room', back_populates='room_type') + rate_plans = relationship('RatePlan', back_populates='room_type') + packages = relationship('Package', back_populates='room_type') \ No newline at end of file diff --git a/Backend/src/models/security_event.py b/Backend/src/models/security_event.py new file mode 100644 index 00000000..2c77b30c --- /dev/null +++ b/Backend/src/models/security_event.py @@ -0,0 +1,135 @@ +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, JSON, Enum, Boolean +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class SecurityEventType(str, enum.Enum): + login_attempt = 'login_attempt' + login_success = 'login_success' + login_failure = 'login_failure' + logout = 'logout' + password_change = 'password_change' + password_reset = 'password_reset' + account_locked = 'account_locked' + account_unlocked = 'account_unlocked' + permission_denied = 'permission_denied' + suspicious_activity = 'suspicious_activity' + data_access = 'data_access' + data_modification = 'data_modification' + data_deletion = 'data_deletion' + api_access = 'api_access' + ip_blocked = 'ip_blocked' + rate_limit_exceeded = 'rate_limit_exceeded' + oauth_login = 'oauth_login' + sso_login = 'sso_login' + +class SecurityEventSeverity(str, enum.Enum): + low = 'low' + medium = 'medium' + high = 'high' + critical = 'critical' + +class SecurityEvent(Base): + __tablename__ = 'security_events' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=True, index=True) + event_type = Column(Enum(SecurityEventType), nullable=False, index=True) + severity = Column(Enum(SecurityEventSeverity), nullable=False, default=SecurityEventSeverity.medium, index=True) + + # Request details + ip_address = Column(String(45), nullable=True, index=True) + user_agent = Column(String(500), nullable=True) + request_path = Column(String(500), nullable=True) + request_method = Column(String(10), nullable=True) + request_id = Column(String(36), nullable=True, index=True) + + # Event details + description = Column(Text, nullable=True) + details = Column(JSON, nullable=True) + extra_data = Column(JSON, nullable=True) # Additional metadata (metadata is reserved by SQLAlchemy) + + # Status + resolved = Column(Boolean, nullable=False, default=False) + resolved_at = Column(DateTime, nullable=True) + resolved_by = Column(Integer, ForeignKey('users.id'), nullable=True) + resolution_notes = Column(Text, nullable=True) + + # Location (if available) + country = Column(String(100), nullable=True) + city = Column(String(100), nullable=True) + latitude = Column(String(20), nullable=True) + longitude = Column(String(20), nullable=True) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + + # Relationships + user = relationship('User', foreign_keys=[user_id]) + resolver = relationship('User', foreign_keys=[resolved_by]) + +class IPWhitelist(Base): + __tablename__ = 'ip_whitelist' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + ip_address = Column(String(45), nullable=False, unique=True, index=True) + description = Column(String(255), nullable=True) + is_active = Column(Boolean, nullable=False, default=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + creator = relationship('User', foreign_keys=[created_by]) + +class IPBlacklist(Base): + __tablename__ = 'ip_blacklist' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + ip_address = Column(String(45), nullable=False, unique=True, index=True) + reason = Column(Text, nullable=True) + is_active = Column(Boolean, nullable=False, default=True) + blocked_until = Column(DateTime, nullable=True) # Temporary block + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + creator = relationship('User', foreign_keys=[created_by]) + +class OAuthProvider(Base): + __tablename__ = 'oauth_providers' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(50), nullable=False, unique=True) # google, microsoft, github, etc. + display_name = Column(String(100), nullable=False) + client_id = Column(String(500), nullable=False) + client_secret = Column(String(500), nullable=False) # Should be encrypted + authorization_url = Column(String(500), nullable=False) + token_url = Column(String(500), nullable=False) + userinfo_url = Column(String(500), nullable=False) + scopes = Column(String(500), nullable=True) # space-separated scopes + is_active = Column(Boolean, nullable=False, default=True) + is_sso_enabled = Column(Boolean, nullable=False, default=False) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + +class OAuthToken(Base): + __tablename__ = 'oauth_tokens' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True) + provider_id = Column(Integer, ForeignKey('oauth_providers.id'), nullable=False) + provider_user_id = Column(String(255), nullable=False) # User ID from OAuth provider + access_token = Column(Text, nullable=False) # Should be encrypted + refresh_token = Column(Text, nullable=True) # Should be encrypted + token_type = Column(String(50), nullable=True, default='Bearer') + expires_at = Column(DateTime, nullable=True) + scopes = Column(String(500), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + user = relationship('User') + provider = relationship('OAuthProvider') + diff --git a/Backend/src/models/workflow.py b/Backend/src/models/workflow.py new file mode 100644 index 00000000..9ac4ea7e --- /dev/null +++ b/Backend/src/models/workflow.py @@ -0,0 +1,127 @@ +from sqlalchemy import Column, Integer, String, DateTime, Boolean, Text, Enum, ForeignKey, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ..config.database import Base + +class WorkflowType(str, enum.Enum): + pre_arrival = 'pre_arrival' + room_preparation = 'room_preparation' + maintenance = 'maintenance' + guest_communication = 'guest_communication' + follow_up = 'follow_up' + custom = 'custom' + +class WorkflowStatus(str, enum.Enum): + active = 'active' + inactive = 'inactive' + archived = 'archived' + +class WorkflowTrigger(str, enum.Enum): + booking_created = 'booking_created' + booking_confirmed = 'booking_confirmed' + check_in = 'check_in' + check_out = 'check_out' + maintenance_request = 'maintenance_request' + guest_message = 'guest_message' + manual = 'manual' + scheduled = 'scheduled' + +class Workflow(Base): + __tablename__ = 'workflows' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(255), nullable=False) + description = Column(Text, nullable=True) + workflow_type = Column(Enum(WorkflowType), nullable=False) + status = Column(Enum(WorkflowStatus), nullable=False, default=WorkflowStatus.active) + trigger = Column(Enum(WorkflowTrigger), nullable=False) + trigger_config = Column(JSON, nullable=True) # Configuration for trigger (e.g., time before check-in) + steps = Column(JSON, nullable=False) # Array of workflow steps + sla_hours = Column(Integer, nullable=True) # SLA in hours for workflow completion + is_active = Column(Boolean, nullable=False, default=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=False) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + creator = relationship('User', foreign_keys=[created_by]) + workflow_instances = relationship('WorkflowInstance', back_populates='workflow', cascade='all, delete-orphan') + +class WorkflowInstance(Base): + __tablename__ = 'workflow_instances' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + workflow_id = Column(Integer, ForeignKey('workflows.id'), nullable=False) + booking_id = Column(Integer, ForeignKey('bookings.id'), nullable=True) + room_id = Column(Integer, ForeignKey('rooms.id'), nullable=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=True) # Guest user + status = Column(String(50), nullable=False, default='pending') # pending, in_progress, completed, cancelled + started_at = Column(DateTime, default=datetime.utcnow, nullable=False) + completed_at = Column(DateTime, nullable=True) + due_date = Column(DateTime, nullable=True) + meta_data = Column(JSON, nullable=True) # Additional context data + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + workflow = relationship('Workflow', back_populates='workflow_instances') + booking = relationship('Booking') + room = relationship('Room') + user = relationship('User', foreign_keys=[user_id]) + tasks = relationship('Task', back_populates='workflow_instance', cascade='all, delete-orphan') + +class TaskStatus(str, enum.Enum): + pending = 'pending' + assigned = 'assigned' + in_progress = 'in_progress' + completed = 'completed' + cancelled = 'cancelled' + overdue = 'overdue' + +class TaskPriority(str, enum.Enum): + low = 'low' + medium = 'medium' + high = 'high' + urgent = 'urgent' + +class Task(Base): + __tablename__ = 'tasks' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + title = Column(String(255), nullable=False) + description = Column(Text, nullable=True) + task_type = Column(String(100), nullable=False) # e.g., 'room_cleaning', 'maintenance', 'guest_communication' + status = Column(Enum(TaskStatus), nullable=False, default=TaskStatus.pending) + priority = Column(Enum(TaskPriority), nullable=False, default=TaskPriority.medium) + workflow_instance_id = Column(Integer, ForeignKey('workflow_instances.id'), nullable=True) + booking_id = Column(Integer, ForeignKey('bookings.id'), nullable=True) + room_id = Column(Integer, ForeignKey('rooms.id'), nullable=True) + assigned_to = Column(Integer, ForeignKey('users.id'), nullable=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=False) + due_date = Column(DateTime, nullable=True) + completed_at = Column(DateTime, nullable=True) + estimated_duration_minutes = Column(Integer, nullable=True) + actual_duration_minutes = Column(Integer, nullable=True) + notes = Column(Text, nullable=True) + meta_data = Column(JSON, nullable=True) # Additional task-specific data + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + workflow_instance = relationship('WorkflowInstance', back_populates='tasks') + booking = relationship('Booking') + room = relationship('Room') + assignee = relationship('User', foreign_keys=[assigned_to]) + creator_user = relationship('User', foreign_keys=[created_by]) + task_comments = relationship('TaskComment', back_populates='task', cascade='all, delete-orphan') + +class TaskComment(Base): + __tablename__ = 'task_comments' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + task_id = Column(Integer, ForeignKey('tasks.id'), nullable=False) + user_id = Column(Integer, ForeignKey('users.id'), nullable=False) + comment = Column(Text, nullable=False) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + + task = relationship('Task', back_populates='task_comments') + user = relationship('User') + diff --git a/Backend/src/routes/__pycache__/advanced_room_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/advanced_room_routes.cpython-312.pyc new file mode 100644 index 00000000..1adfb833 Binary files /dev/null and b/Backend/src/routes/__pycache__/advanced_room_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/analytics_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/analytics_routes.cpython-312.pyc new file mode 100644 index 00000000..81f2e9f5 Binary files /dev/null and b/Backend/src/routes/__pycache__/analytics_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/booking_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/booking_routes.cpython-312.pyc index d195e172..418b1b2b 100644 Binary files a/Backend/src/routes/__pycache__/booking_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/booking_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/email_campaign_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/email_campaign_routes.cpython-312.pyc new file mode 100644 index 00000000..18787e16 Binary files /dev/null and b/Backend/src/routes/__pycache__/email_campaign_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/group_booking_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/group_booking_routes.cpython-312.pyc new file mode 100644 index 00000000..274d10f1 Binary files /dev/null and b/Backend/src/routes/__pycache__/group_booking_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/guest_profile_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/guest_profile_routes.cpython-312.pyc index 994b24cf..866c79f6 100644 Binary files a/Backend/src/routes/__pycache__/guest_profile_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/guest_profile_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/invoice_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/invoice_routes.cpython-312.pyc index 8ee3dbc2..009ca470 100644 Binary files a/Backend/src/routes/__pycache__/invoice_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/invoice_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/notification_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/notification_routes.cpython-312.pyc new file mode 100644 index 00000000..4e02ae84 Binary files /dev/null and b/Backend/src/routes/__pycache__/notification_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/package_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/package_routes.cpython-312.pyc new file mode 100644 index 00000000..98c2d51a Binary files /dev/null and b/Backend/src/routes/__pycache__/package_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/payment_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/payment_routes.cpython-312.pyc index f6c44d4b..6943c264 100644 Binary files a/Backend/src/routes/__pycache__/payment_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/payment_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/rate_plan_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/rate_plan_routes.cpython-312.pyc new file mode 100644 index 00000000..68a1255a Binary files /dev/null and b/Backend/src/routes/__pycache__/rate_plan_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/report_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/report_routes.cpython-312.pyc index 4234b0a1..a36b695e 100644 Binary files a/Backend/src/routes/__pycache__/report_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/report_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/room_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/room_routes.cpython-312.pyc index 1d307996..3b3b020e 100644 Binary files a/Backend/src/routes/__pycache__/room_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/room_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/security_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/security_routes.cpython-312.pyc new file mode 100644 index 00000000..f19111e3 Binary files /dev/null and b/Backend/src/routes/__pycache__/security_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/service_booking_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/service_booking_routes.cpython-312.pyc index 5446b4cd..8f06fd71 100644 Binary files a/Backend/src/routes/__pycache__/service_booking_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/service_booking_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/system_settings_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/system_settings_routes.cpython-312.pyc index e78b85b6..40ef948a 100644 Binary files a/Backend/src/routes/__pycache__/system_settings_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/system_settings_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/task_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/task_routes.cpython-312.pyc new file mode 100644 index 00000000..bae90efc Binary files /dev/null and b/Backend/src/routes/__pycache__/task_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/user_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/user_routes.cpython-312.pyc index 8d12fe3a..2921dc4f 100644 Binary files a/Backend/src/routes/__pycache__/user_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/user_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/workflow_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/workflow_routes.cpython-312.pyc new file mode 100644 index 00000000..81f14339 Binary files /dev/null and b/Backend/src/routes/__pycache__/workflow_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/advanced_room_routes.py b/Backend/src/routes/advanced_room_routes.py new file mode 100644 index 00000000..eca434f6 --- /dev/null +++ b/Backend/src/routes/advanced_room_routes.py @@ -0,0 +1,859 @@ +from fastapi import APIRouter, Depends, HTTPException, status, Query, Request +from sqlalchemy.orm import Session, joinedload, load_only +from sqlalchemy import and_, or_, func, desc +from typing import List, Optional +from datetime import datetime, timedelta +from ..config.database import get_db +from ..middleware.auth import get_current_user, authorize_roles +from ..models.user import User +from ..models.role import Role +from ..models.room import Room, RoomStatus +from ..models.booking import Booking, BookingStatus +from ..models.room_maintenance import RoomMaintenance, MaintenanceType, MaintenanceStatus +from ..models.housekeeping_task import HousekeepingTask, HousekeepingStatus, HousekeepingType +from ..models.room_inspection import RoomInspection, InspectionType, InspectionStatus +from ..models.room_attribute import RoomAttribute +from ..services.room_assignment_service import RoomAssignmentService +from pydantic import BaseModel +from typing import Dict, Any + +router = APIRouter(prefix='/advanced-rooms', tags=['advanced-room-management']) + + +# ==================== Room Assignment Optimization ==================== + +@router.post('/assign-optimal-room') +async def assign_optimal_room( + request_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Find the best available room for a booking based on preferences""" + try: + room_type_id = request_data.get('room_type_id') + check_in_str = request_data.get('check_in') + check_out_str = request_data.get('check_out') + num_guests = request_data.get('num_guests', 1) + guest_preferences = request_data.get('guest_preferences', {}) + exclude_room_ids = request_data.get('exclude_room_ids', []) + + if not room_type_id or not check_in_str or not check_out_str: + raise HTTPException(status_code=400, detail='Missing required fields') + + check_in = datetime.fromisoformat(check_in_str.replace('Z', '+00:00')) + check_out = datetime.fromisoformat(check_out_str.replace('Z', '+00:00')) + + best_room = RoomAssignmentService.find_best_room( + db=db, + room_type_id=room_type_id, + check_in=check_in, + check_out=check_out, + num_guests=num_guests, + guest_preferences=guest_preferences, + exclude_room_ids=exclude_room_ids + ) + + if not best_room: + return { + 'status': 'success', + 'data': {'room': None, 'message': 'No suitable room available'} + } + + return { + 'status': 'success', + 'data': { + 'room': { + 'id': best_room.id, + 'room_number': best_room.room_number, + 'floor': best_room.floor, + 'view': best_room.view, + 'status': best_room.status.value + } + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get('/{room_id}/availability-calendar') +async def get_room_availability_calendar( + room_id: int, + start_date: str = Query(...), + end_date: str = Query(...), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get detailed availability calendar for a room""" + try: + start = datetime.fromisoformat(start_date.replace('Z', '+00:00')) + end = datetime.fromisoformat(end_date.replace('Z', '+00:00')) + + calendar = RoomAssignmentService.get_room_availability_calendar( + db=db, + room_id=room_id, + start_date=start, + end_date=end + ) + + if not calendar: + raise HTTPException(status_code=404, detail='Room not found') + + return {'status': 'success', 'data': calendar} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +# ==================== Room Maintenance ==================== + +@router.get('/maintenance') +async def get_maintenance_records( + room_id: Optional[int] = Query(None), + status: Optional[str] = Query(None), + maintenance_type: Optional[str] = Query(None), + page: int = Query(1, ge=1), + limit: int = Query(20, ge=1, le=100), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get maintenance records with filtering""" + try: + # Check if user is staff (not admin) - staff should only see their assigned records + role = db.query(Role).filter(Role.id == current_user.role_id).first() + is_staff = role and role.name == 'staff' + + query = db.query(RoomMaintenance) + + # Filter by assigned_to for staff users + if is_staff: + query = query.filter(RoomMaintenance.assigned_to == current_user.id) + + if room_id: + query = query.filter(RoomMaintenance.room_id == room_id) + if status: + query = query.filter(RoomMaintenance.status == MaintenanceStatus(status)) + if maintenance_type: + query = query.filter(RoomMaintenance.maintenance_type == MaintenanceType(maintenance_type)) + + total = query.count() + query = query.order_by(desc(RoomMaintenance.scheduled_start)) + + offset = (page - 1) * limit + records = query.offset(offset).limit(limit).all() + + result = [] + for record in records: + result.append({ + 'id': record.id, + 'room_id': record.room_id, + 'room_number': record.room.room_number if record.room else None, + 'maintenance_type': record.maintenance_type.value, + 'status': record.status.value, + 'title': record.title, + 'description': record.description, + 'scheduled_start': record.scheduled_start.isoformat() if record.scheduled_start else None, + 'scheduled_end': record.scheduled_end.isoformat() if record.scheduled_end else None, + 'actual_start': record.actual_start.isoformat() if record.actual_start else None, + 'actual_end': record.actual_end.isoformat() if record.actual_end else None, + 'assigned_to': record.assigned_to, + 'assigned_staff_name': record.assigned_staff.full_name if record.assigned_staff else None, + 'priority': record.priority, + 'blocks_room': record.blocks_room, + 'estimated_cost': float(record.estimated_cost) if record.estimated_cost else None, + 'actual_cost': float(record.actual_cost) if record.actual_cost else None, + 'created_at': record.created_at.isoformat() if record.created_at else None + }) + + return { + 'status': 'success', + 'data': { + 'maintenance_records': result, + 'pagination': { + 'total': total, + 'page': page, + 'limit': limit, + 'total_pages': (total + limit - 1) // limit + } + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post('/maintenance') +async def create_maintenance_record( + maintenance_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Create a new maintenance record""" + try: + room = db.query(Room).filter(Room.id == maintenance_data.get('room_id')).first() + if not room: + raise HTTPException(status_code=404, detail='Room not found') + + scheduled_start = datetime.fromisoformat(maintenance_data['scheduled_start'].replace('Z', '+00:00')) + scheduled_end = None + if maintenance_data.get('scheduled_end'): + scheduled_end = datetime.fromisoformat(maintenance_data['scheduled_end'].replace('Z', '+00:00')) + + block_start = None + block_end = None + if maintenance_data.get('block_start'): + block_start = datetime.fromisoformat(maintenance_data['block_start'].replace('Z', '+00:00')) + if maintenance_data.get('block_end'): + block_end = datetime.fromisoformat(maintenance_data['block_end'].replace('Z', '+00:00')) + + maintenance = RoomMaintenance( + room_id=maintenance_data['room_id'], + maintenance_type=MaintenanceType(maintenance_data.get('maintenance_type', 'preventive')), + status=MaintenanceStatus(maintenance_data.get('status', 'scheduled')), + title=maintenance_data.get('title', 'Maintenance'), + description=maintenance_data.get('description'), + scheduled_start=scheduled_start, + scheduled_end=scheduled_end, + assigned_to=maintenance_data.get('assigned_to'), + reported_by=current_user.id, + estimated_cost=maintenance_data.get('estimated_cost'), + blocks_room=maintenance_data.get('blocks_room', True), + block_start=block_start, + block_end=block_end, + priority=maintenance_data.get('priority', 'medium'), + notes=maintenance_data.get('notes') + ) + + # Update room status if blocking and maintenance is active + if maintenance.blocks_room and maintenance.status in [MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]: + # Only update if room is currently available + if room.status == RoomStatus.available: + room.status = RoomStatus.maintenance + + db.add(maintenance) + db.commit() + db.refresh(maintenance) + + return { + 'status': 'success', + 'message': 'Maintenance record created successfully', + 'data': {'maintenance_id': maintenance.id} + } + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + + +@router.put('/maintenance/{maintenance_id}') +async def update_maintenance_record( + maintenance_id: int, + maintenance_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Update a maintenance record""" + try: + maintenance = db.query(RoomMaintenance).filter(RoomMaintenance.id == maintenance_id).first() + if not maintenance: + raise HTTPException(status_code=404, detail='Maintenance record not found') + + # Check if user is staff (not admin) - staff can only update their own assigned records + role = db.query(Role).filter(Role.id == current_user.role_id).first() + is_staff = role and role.name == 'staff' + + if is_staff: + # Staff can only update records assigned to them + if maintenance.assigned_to != current_user.id: + raise HTTPException(status_code=403, detail='You can only update maintenance assigned to you') + # Staff can only update status and completion fields + allowed_fields = {'status', 'actual_start', 'actual_end', 'completion_notes', 'actual_cost'} + if any(key not in allowed_fields for key in maintenance_data.keys()): + raise HTTPException(status_code=403, detail='You can only update status and completion information') + + # Update fields + if 'status' in maintenance_data: + new_status = MaintenanceStatus(maintenance_data['status']) + + # Only the assigned user can mark the maintenance as completed + if new_status == MaintenanceStatus.completed: + if not maintenance.assigned_to: + raise HTTPException(status_code=400, detail='Maintenance must be assigned before it can be marked as completed') + if maintenance.assigned_to != current_user.id: + raise HTTPException(status_code=403, detail='Only the assigned staff member can mark this maintenance as completed') + + old_status = maintenance.status + maintenance.status = new_status + + # Update room status based on maintenance status + if maintenance.status == MaintenanceStatus.completed and maintenance.blocks_room: + # Check if room has other active maintenance + other_maintenance = db.query(RoomMaintenance).filter( + and_( + RoomMaintenance.room_id == maintenance.room_id, + RoomMaintenance.id != maintenance_id, + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]) + ) + ).first() + + if not other_maintenance: + # Check if room has active bookings + from datetime import datetime + active_booking = db.query(Booking).filter( + and_( + Booking.room_id == maintenance.room_id, + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in]), + Booking.check_in_date <= datetime.utcnow(), + Booking.check_out_date > datetime.utcnow() + ) + ).first() + + if active_booking: + maintenance.room.status = RoomStatus.occupied + else: + maintenance.room.status = RoomStatus.available + elif maintenance.status in [MaintenanceStatus.scheduled, MaintenanceStatus.in_progress] and maintenance.blocks_room: + # Set room to maintenance if it's not occupied + if maintenance.room.status == RoomStatus.available: + maintenance.room.status = RoomStatus.maintenance + + if 'actual_start' in maintenance_data: + maintenance.actual_start = datetime.fromisoformat(maintenance_data['actual_start'].replace('Z', '+00:00')) + if 'actual_end' in maintenance_data: + maintenance.actual_end = datetime.fromisoformat(maintenance_data['actual_end'].replace('Z', '+00:00')) + if 'completion_notes' in maintenance_data: + maintenance.completion_notes = maintenance_data['completion_notes'] + if 'actual_cost' in maintenance_data: + maintenance.actual_cost = maintenance_data['actual_cost'] + + db.commit() + + return { + 'status': 'success', + 'message': 'Maintenance record updated successfully' + } + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + + +# ==================== Housekeeping Tasks ==================== + +@router.get('/housekeeping') +async def get_housekeeping_tasks( + room_id: Optional[int] = Query(None), + status: Optional[str] = Query(None), + task_type: Optional[str] = Query(None), + date: Optional[str] = Query(None), + page: int = Query(1, ge=1), + limit: int = Query(20, ge=1, le=100), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get housekeeping tasks with filtering""" + try: + # Check if user is staff (not admin) - staff should only see their assigned tasks + role = db.query(Role).filter(Role.id == current_user.role_id).first() + is_staff = role and role.name == 'staff' + + query = db.query(HousekeepingTask) + + # Filter by assigned_to for staff users + if is_staff: + query = query.filter(HousekeepingTask.assigned_to == current_user.id) + + if room_id: + query = query.filter(HousekeepingTask.room_id == room_id) + if status: + query = query.filter(HousekeepingTask.status == HousekeepingStatus(status)) + if task_type: + query = query.filter(HousekeepingTask.task_type == HousekeepingType(task_type)) + if date: + date_obj = datetime.fromisoformat(date.replace('Z', '+00:00')).date() + query = query.filter(func.date(HousekeepingTask.scheduled_time) == date_obj) + + total = query.count() + query = query.order_by(HousekeepingTask.scheduled_time) + + offset = (page - 1) * limit + tasks = query.offset(offset).limit(limit).all() + + result = [] + for task in tasks: + result.append({ + 'id': task.id, + 'room_id': task.room_id, + 'room_number': task.room.room_number if task.room else None, + 'booking_id': task.booking_id, + 'task_type': task.task_type.value, + 'status': task.status.value, + 'scheduled_time': task.scheduled_time.isoformat() if task.scheduled_time else None, + 'started_at': task.started_at.isoformat() if task.started_at else None, + 'completed_at': task.completed_at.isoformat() if task.completed_at else None, + 'assigned_to': task.assigned_to, + 'assigned_staff_name': task.assigned_staff.full_name if task.assigned_staff else None, + 'checklist_items': task.checklist_items, + 'notes': task.notes, + 'quality_score': task.quality_score, + 'estimated_duration_minutes': task.estimated_duration_minutes, + 'actual_duration_minutes': task.actual_duration_minutes + }) + + return { + 'status': 'success', + 'data': { + 'tasks': result, + 'pagination': { + 'total': total, + 'page': page, + 'limit': limit, + 'total_pages': (total + limit - 1) // limit + } + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post('/housekeeping') +async def create_housekeeping_task( + task_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Create a new housekeeping task""" + try: + room = db.query(Room).filter(Room.id == task_data.get('room_id')).first() + if not room: + raise HTTPException(status_code=404, detail='Room not found') + + scheduled_time = datetime.fromisoformat(task_data['scheduled_time'].replace('Z', '+00:00')) + assigned_to = task_data.get('assigned_to') + + task = HousekeepingTask( + room_id=task_data['room_id'], + booking_id=task_data.get('booking_id'), + task_type=HousekeepingType(task_data.get('task_type', 'vacant')), + status=HousekeepingStatus(task_data.get('status', 'pending')), + scheduled_time=scheduled_time, + assigned_to=assigned_to, + created_by=current_user.id, + checklist_items=task_data.get('checklist_items', []), + notes=task_data.get('notes'), + estimated_duration_minutes=task_data.get('estimated_duration_minutes') + ) + + db.add(task) + db.commit() + db.refresh(task) + + # Send notification to assigned staff member if task is assigned + if assigned_to: + try: + from ..routes.chat_routes import manager + assigned_staff = db.query(User).filter(User.id == assigned_to).first() + task_data_notification = { + 'id': task.id, + 'room_id': task.room_id, + 'room_number': room.room_number, + 'task_type': task.task_type.value, + 'status': task.status.value, + 'scheduled_time': task.scheduled_time.isoformat() if task.scheduled_time else None, + 'assigned_to': task.assigned_to, + 'created_at': task.created_at.isoformat() if task.created_at else None + } + notification_data = { + 'type': 'housekeeping_task_assigned', + 'data': task_data_notification + } + # Send notification to the specific staff member + if assigned_to in manager.staff_connections: + try: + await manager.staff_connections[assigned_to].send_json(notification_data) + except Exception as e: + print(f'Error sending housekeeping task notification to staff {assigned_to}: {e}') + except Exception as e: + print(f'Error setting up housekeeping task notification: {e}') + + return { + 'status': 'success', + 'message': 'Housekeeping task created successfully', + 'data': {'task_id': task.id} + } + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + + +@router.put('/housekeeping/{task_id}') +async def update_housekeeping_task( + task_id: int, + task_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Update a housekeeping task""" + try: + task = db.query(HousekeepingTask).filter(HousekeepingTask.id == task_id).first() + if not task: + raise HTTPException(status_code=404, detail='Housekeeping task not found') + + # Check if user is staff (not admin) - staff can only update their own assigned tasks + role = db.query(Role).filter(Role.id == current_user.role_id).first() + is_staff = role and role.name == 'staff' + + if is_staff: + # Staff can only update tasks assigned to them + if task.assigned_to != current_user.id: + raise HTTPException(status_code=403, detail='You can only update tasks assigned to you') + # Staff cannot change assignment + if 'assigned_to' in task_data and task_data.get('assigned_to') != task.assigned_to: + raise HTTPException(status_code=403, detail='You cannot change task assignment') + + old_assigned_to = task.assigned_to + assigned_to_changed = False + + if 'assigned_to' in task_data and not is_staff: + new_assigned_to = task_data.get('assigned_to') + if new_assigned_to != old_assigned_to: + task.assigned_to = new_assigned_to + assigned_to_changed = True + + if 'status' in task_data: + new_status = HousekeepingStatus(task_data['status']) + + # Only the assigned user can mark the task as completed + if new_status == HousekeepingStatus.completed: + if not task.assigned_to: + raise HTTPException(status_code=400, detail='Task must be assigned before it can be marked as completed') + if task.assigned_to != current_user.id: + raise HTTPException(status_code=403, detail='Only the assigned staff member can mark this task as completed') + + task.status = new_status + + if new_status == HousekeepingStatus.in_progress and not task.started_at: + task.started_at = datetime.utcnow() + elif new_status == HousekeepingStatus.completed and not task.completed_at: + task.completed_at = datetime.utcnow() + if task.started_at: + duration = (task.completed_at - task.started_at).total_seconds() / 60 + task.actual_duration_minutes = int(duration) + + if 'checklist_items' in task_data: + task.checklist_items = task_data['checklist_items'] + if 'notes' in task_data: + task.notes = task_data['notes'] + if 'issues_found' in task_data: + task.issues_found = task_data['issues_found'] + if 'quality_score' in task_data: + task.quality_score = task_data['quality_score'] + if 'inspected_by' in task_data: + task.inspected_by = task_data['inspected_by'] + task.inspected_at = datetime.utcnow() + if 'inspection_notes' in task_data: + task.inspection_notes = task_data['inspection_notes'] + + db.commit() + db.refresh(task) + + # Send notification if assignment changed + if assigned_to_changed and task.assigned_to: + try: + from ..routes.chat_routes import manager + room = db.query(Room).filter(Room.id == task.room_id).first() + task_data_notification = { + 'id': task.id, + 'room_id': task.room_id, + 'room_number': room.room_number if room else None, + 'task_type': task.task_type.value, + 'status': task.status.value, + 'scheduled_time': task.scheduled_time.isoformat() if task.scheduled_time else None, + 'assigned_to': task.assigned_to, + 'updated_at': task.updated_at.isoformat() if task.updated_at else None + } + notification_data = { + 'type': 'housekeeping_task_assigned', + 'data': task_data_notification + } + # Send notification to the newly assigned staff member + if task.assigned_to in manager.staff_connections: + try: + await manager.staff_connections[task.assigned_to].send_json(notification_data) + except Exception as e: + print(f'Error sending housekeeping task notification to staff {task.assigned_to}: {e}') + except Exception as e: + print(f'Error setting up housekeeping task notification: {e}') + + return { + 'status': 'success', + 'message': 'Housekeeping task updated successfully' + } + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + + +# ==================== Room Inspections ==================== + +@router.get('/inspections') +async def get_room_inspections( + room_id: Optional[int] = Query(None), + inspection_type: Optional[str] = Query(None), + status: Optional[str] = Query(None), + page: int = Query(1, ge=1), + limit: int = Query(20, ge=1, le=100), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get room inspections with filtering""" + try: + # Check if user is staff (not admin) - staff should only see their assigned inspections + role = db.query(Role).filter(Role.id == current_user.role_id).first() + is_staff = role and role.name == 'staff' + + query = db.query(RoomInspection) + + # Filter by inspected_by for staff users + if is_staff: + query = query.filter(RoomInspection.inspected_by == current_user.id) + + if room_id: + query = query.filter(RoomInspection.room_id == room_id) + if inspection_type: + query = query.filter(RoomInspection.inspection_type == InspectionType(inspection_type)) + if status: + query = query.filter(RoomInspection.status == InspectionStatus(status)) + + total = query.count() + query = query.order_by(desc(RoomInspection.scheduled_at)) + + offset = (page - 1) * limit + inspections = query.offset(offset).limit(limit).all() + + result = [] + for inspection in inspections: + result.append({ + 'id': inspection.id, + 'room_id': inspection.room_id, + 'room_number': inspection.room.room_number if inspection.room else None, + 'booking_id': inspection.booking_id, + 'inspection_type': inspection.inspection_type.value, + 'status': inspection.status.value, + 'scheduled_at': inspection.scheduled_at.isoformat() if inspection.scheduled_at else None, + 'started_at': inspection.started_at.isoformat() if inspection.started_at else None, + 'completed_at': inspection.completed_at.isoformat() if inspection.completed_at else None, + 'inspected_by': inspection.inspected_by, + 'inspector_name': inspection.inspector.full_name if inspection.inspector else None, + 'checklist_items': inspection.checklist_items, + 'overall_score': float(inspection.overall_score) if inspection.overall_score else None, + 'overall_notes': inspection.overall_notes, + 'issues_found': inspection.issues_found, + 'requires_followup': inspection.requires_followup, + 'created_at': inspection.created_at.isoformat() if inspection.created_at else None + }) + + return { + 'status': 'success', + 'data': { + 'inspections': result, + 'pagination': { + 'total': total, + 'page': page, + 'limit': limit, + 'total_pages': (total + limit - 1) // limit + } + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post('/inspections') +async def create_room_inspection( + inspection_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Create a new room inspection""" + try: + room = db.query(Room).filter(Room.id == inspection_data.get('room_id')).first() + if not room: + raise HTTPException(status_code=404, detail='Room not found') + + scheduled_at = datetime.fromisoformat(inspection_data['scheduled_at'].replace('Z', '+00:00')) + + inspection = RoomInspection( + room_id=inspection_data['room_id'], + booking_id=inspection_data.get('booking_id'), + inspection_type=InspectionType(inspection_data.get('inspection_type', 'routine')), + status=InspectionStatus(inspection_data.get('status', 'pending')), + scheduled_at=scheduled_at, + inspected_by=inspection_data.get('inspected_by'), + created_by=current_user.id, + checklist_items=inspection_data.get('checklist_items', []), + checklist_template_id=inspection_data.get('checklist_template_id') + ) + + db.add(inspection) + db.commit() + db.refresh(inspection) + + return { + 'status': 'success', + 'message': 'Room inspection created successfully', + 'data': {'inspection_id': inspection.id} + } + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + + +@router.put('/inspections/{inspection_id}') +async def update_room_inspection( + inspection_id: int, + inspection_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Update a room inspection""" + try: + inspection = db.query(RoomInspection).filter(RoomInspection.id == inspection_id).first() + if not inspection: + raise HTTPException(status_code=404, detail='Room inspection not found') + + # Check if user is staff (not admin) - staff can only update their own assigned inspections + role = db.query(Role).filter(Role.id == current_user.role_id).first() + is_staff = role and role.name == 'staff' + + if is_staff: + # Staff can only update inspections assigned to them + if inspection.inspected_by != current_user.id: + raise HTTPException(status_code=403, detail='You can only update inspections assigned to you') + # Staff can only update status and inspection results + allowed_fields = {'status', 'checklist_items', 'overall_score', 'overall_notes', 'issues_found', 'requires_followup', 'followup_notes'} + if any(key not in allowed_fields for key in inspection_data.keys()): + raise HTTPException(status_code=403, detail='You can only update status and inspection results') + + if 'status' in inspection_data: + new_status = InspectionStatus(inspection_data['status']) + + # Only the assigned user can mark the inspection as completed + if new_status == InspectionStatus.completed: + if not inspection.inspected_by: + raise HTTPException(status_code=400, detail='Inspection must be assigned before it can be marked as completed') + if inspection.inspected_by != current_user.id: + raise HTTPException(status_code=403, detail='Only the assigned inspector can mark this inspection as completed') + + inspection.status = new_status + + if new_status == InspectionStatus.in_progress and not inspection.started_at: + inspection.started_at = datetime.utcnow() + elif new_status == InspectionStatus.completed and not inspection.completed_at: + inspection.completed_at = datetime.utcnow() + + if 'checklist_items' in inspection_data: + inspection.checklist_items = inspection_data['checklist_items'] + if 'overall_score' in inspection_data: + inspection.overall_score = inspection_data['overall_score'] + if 'overall_notes' in inspection_data: + inspection.overall_notes = inspection_data['overall_notes'] + if 'issues_found' in inspection_data: + inspection.issues_found = inspection_data['issues_found'] + if 'photos' in inspection_data: + inspection.photos = inspection_data['photos'] + if 'requires_followup' in inspection_data: + inspection.requires_followup = inspection_data['requires_followup'] + if 'followup_notes' in inspection_data: + inspection.followup_notes = inspection_data['followup_notes'] + if 'maintenance_request_id' in inspection_data: + inspection.maintenance_request_id = inspection_data['maintenance_request_id'] + + db.commit() + + return { + 'status': 'success', + 'message': 'Room inspection updated successfully' + } + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + + +# ==================== Room Status Board ==================== + +@router.get('/status-board') +async def get_room_status_board( + floor: Optional[int] = Query(None), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get visual room status board with all rooms and their current status""" + try: + query = db.query(Room).options(joinedload(Room.room_type)) + if floor: + query = query.filter(Room.floor == floor) + + rooms = query.order_by(Room.floor, Room.room_number).all() + + result = [] + for room in rooms: + # Get current booking if any + # Use load_only to avoid querying columns that don't exist in the database (rate_plan_id, group_booking_id) + current_booking = db.query(Booking).options( + joinedload(Booking.user), + load_only(Booking.id, Booking.user_id, Booking.room_id, Booking.check_in_date, Booking.check_out_date, Booking.status) + ).filter( + and_( + Booking.room_id == room.id, + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in]), + Booking.check_in_date <= datetime.utcnow(), + Booking.check_out_date > datetime.utcnow() + ) + ).first() + + # Get active maintenance + active_maintenance = db.query(RoomMaintenance).filter( + and_( + RoomMaintenance.room_id == room.id, + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]) + ) + ).first() + + # Get pending housekeeping tasks + pending_housekeeping = db.query(HousekeepingTask).filter( + and_( + HousekeepingTask.room_id == room.id, + HousekeepingTask.status == HousekeepingStatus.pending, + func.date(HousekeepingTask.scheduled_time) == datetime.utcnow().date() + ) + ).count() + + result.append({ + 'id': room.id, + 'room_number': room.room_number, + 'floor': room.floor, + 'status': room.status.value, + 'room_type': room.room_type.name if room.room_type else None, + 'current_booking': { + 'id': current_booking.id, + 'guest_name': current_booking.user.full_name if current_booking.user else 'Unknown', + 'check_out': current_booking.check_out_date.isoformat() + } if current_booking else None, + 'active_maintenance': { + 'id': active_maintenance.id, + 'title': active_maintenance.title, + 'type': active_maintenance.maintenance_type.value + } if active_maintenance else None, + 'pending_housekeeping_count': pending_housekeeping + }) + + return { + 'status': 'success', + 'data': {'rooms': result} + } + except Exception as e: + import logging + import traceback + logger = logging.getLogger(__name__) + logger.error(f'Error in get_room_status_board: {str(e)}') + logger.error(f'Traceback: {traceback.format_exc()}') + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/routes/analytics_routes.py b/Backend/src/routes/analytics_routes.py new file mode 100644 index 00000000..bce4abd2 --- /dev/null +++ b/Backend/src/routes/analytics_routes.py @@ -0,0 +1,301 @@ +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.orm import Session +from typing import Optional +from ..config.database import get_db +from ..middleware.auth import authorize_roles, get_current_user +from ..models.user import User +from ..services.analytics_service import AnalyticsService + +router = APIRouter(prefix='/analytics', tags=['analytics']) + +# ==================== REVENUE ANALYTICS ==================== + +@router.get('/revenue/revpar') +async def get_revpar( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get RevPAR (Revenue Per Available Room)""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_revpar(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/revenue/adr') +async def get_adr( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get ADR (Average Daily Rate)""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_adr(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/revenue/occupancy') +async def get_occupancy_rate( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Occupancy Rate""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_occupancy_rate(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/revenue/forecast') +async def get_revenue_forecast( + days: int = Query(30, ge=1, le=365), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Revenue Forecast""" + try: + result = AnalyticsService.get_revenue_forecast(db, days) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/revenue/market-penetration') +async def get_market_penetration( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Market Penetration Analysis""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_market_penetration(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +# ==================== OPERATIONAL ANALYTICS ==================== + +@router.get('/operational/staff-performance') +async def get_staff_performance( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get Staff Performance Metrics""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_staff_performance(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/operational/service-usage') +async def get_service_usage_analytics( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Service Usage Analytics""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_service_usage_analytics(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/operational/efficiency') +async def get_operational_efficiency( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Operational Efficiency Metrics""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_operational_efficiency(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +# ==================== GUEST ANALYTICS ==================== + +@router.get('/guest/lifetime-value') +async def get_guest_lifetime_value( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Guest Lifetime Value Analysis""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_guest_lifetime_value(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/guest/acquisition-cost') +async def get_customer_acquisition_cost( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Customer Acquisition Cost Analysis""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_customer_acquisition_cost(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/guest/repeat-rate') +async def get_repeat_guest_rate( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Repeat Guest Rate""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_repeat_guest_rate(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/guest/satisfaction-trends') +async def get_guest_satisfaction_trends( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Guest Satisfaction Trends""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_guest_satisfaction_trends(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +# ==================== FINANCIAL ANALYTICS ==================== + +@router.get('/financial/profit-loss') +async def get_profit_loss( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'accountant')), + db: Session = Depends(get_db) +): + """Get Profit & Loss Report""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_profit_loss(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/financial/payment-methods') +async def get_payment_method_analytics( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Payment Method Analytics""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_payment_method_analytics(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/financial/refunds') +async def get_refund_analysis( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + current_user: User = Depends(authorize_roles('admin', 'accountant')), + db: Session = Depends(get_db) +): + """Get Refund Analysis""" + try: + start, end = AnalyticsService.parse_date_range(start_date, end_date) + result = AnalyticsService.get_refund_analysis(db, start, end) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +# ==================== COMPREHENSIVE ANALYTICS ==================== + +@router.get('/comprehensive') +async def get_comprehensive_analytics( + start_date: Optional[str] = Query(None, alias='from'), + end_date: Optional[str] = Query(None, alias='to'), + include_revenue: bool = Query(True), + include_operational: bool = Query(True), + include_guest: bool = Query(True), + include_financial: bool = Query(True), + current_user: User = Depends(authorize_roles('admin', 'staff', 'accountant')), + db: Session = Depends(get_db) +): + """Get Comprehensive Analytics across all categories""" + try: + result = AnalyticsService.get_comprehensive_analytics( + db, + start_date, + end_date, + include_revenue, + include_operational, + include_guest, + include_financial + ) + return {'status': 'success', 'data': result} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/routes/booking_routes.py b/Backend/src/routes/booking_routes.py index dc39c343..32593540 100644 --- a/Backend/src/routes/booking_routes.py +++ b/Backend/src/routes/booking_routes.py @@ -1,6 +1,6 @@ from fastapi import APIRouter, Depends, HTTPException, status, Query -from sqlalchemy.orm import Session, joinedload, selectinload -from sqlalchemy import and_, or_ +from sqlalchemy.orm import Session, joinedload, selectinload, load_only +from sqlalchemy import and_, or_, func from typing import Optional from datetime import datetime import random @@ -22,6 +22,8 @@ from fastapi import Request from ..utils.mailer import send_email from ..utils.email_templates import booking_confirmation_email_template, booking_status_changed_email_template from ..services.loyalty_service import LoyaltyService +from ..utils.currency_helpers import get_currency_symbol +from ..utils.response_helpers import success_response router = APIRouter(prefix='/bookings', tags=['bookings']) def _generate_invoice_email_html(invoice: dict, is_proforma: bool=False) -> str: @@ -46,7 +48,20 @@ def calculate_booking_payment_balance(booking: Booking) -> dict: @router.get('/') async def get_all_bookings(search: Optional[str]=Query(None), status_filter: Optional[str]=Query(None, alias='status'), startDate: Optional[str]=Query(None), endDate: Optional[str]=Query(None), page: int=Query(1, ge=1), limit: int=Query(10, ge=1, le=100), current_user: User=Depends(authorize_roles('admin', 'staff')), db: Session=Depends(get_db)): try: - query = db.query(Booking).options(selectinload(Booking.payments), joinedload(Booking.user), joinedload(Booking.room).joinedload(Room.room_type)) + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + query = db.query(Booking).options( + load_only( + Booking.id, Booking.booking_number, Booking.user_id, Booking.room_id, + Booking.check_in_date, Booking.check_out_date, Booking.num_guests, + Booking.total_price, Booking.original_price, Booking.discount_amount, + Booking.promotion_code, Booking.status, Booking.deposit_paid, + Booking.requires_deposit, Booking.special_requests, + Booking.created_at, Booking.updated_at + ), + selectinload(Booking.payments), + joinedload(Booking.user), + joinedload(Booking.room).joinedload(Room.room_type) + ) if search: query = query.filter(Booking.booking_number.like(f'%{search}%')) if status_filter: @@ -60,7 +75,8 @@ async def get_all_bookings(search: Optional[str]=Query(None), status_filter: Opt if endDate: end = datetime.fromisoformat(endDate.replace('Z', '+00:00')) query = query.filter(Booking.check_in_date <= end) - total = query.count() + # Use func.count() to avoid loading all columns (including non-existent rate_plan_id) + total = query.with_entities(func.count(Booking.id)).scalar() offset = (page - 1) * limit bookings = query.order_by(Booking.created_at.desc()).offset(offset).limit(limit).all() result = [] @@ -96,7 +112,9 @@ async def get_all_bookings(search: Optional[str]=Query(None), status_filter: Opt else: booking_dict['payments'] = [] result.append(booking_dict) - return {'status': 'success', 'data': {'bookings': result, 'pagination': {'total': total, 'page': page, 'limit': limit, 'totalPages': (total + limit - 1) // limit}}} + return success_response( + data={'bookings': result, 'pagination': {'total': total, 'page': page, 'limit': limit, 'totalPages': (total + limit - 1) // limit}} + ) except Exception as e: import logging import traceback @@ -140,7 +158,7 @@ async def get_my_bookings(request: Request, current_user: User=Depends(get_curre else: booking_dict['payments'] = [] result.append(booking_dict) - return {'success': True, 'data': {'bookings': result}} + return success_response(data={'bookings': result}) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @@ -190,9 +208,36 @@ async def create_booking(booking_data: dict, current_user: User=Depends(get_curr check_out = datetime.fromisoformat(check_out_date.replace('Z', '+00:00')) else: check_out = datetime.strptime(check_out_date, '%Y-%m-%d') + if check_in >= check_out: + raise HTTPException(status_code=400, detail='Check-out date must be after check-in date') overlapping = db.query(Booking).filter(and_(Booking.room_id == room_id, Booking.status != BookingStatus.cancelled, Booking.check_in_date < check_out, Booking.check_out_date > check_in)).first() if overlapping: raise HTTPException(status_code=409, detail='Room already booked for the selected dates') + + # Check for maintenance blocks + from ..models.room_maintenance import RoomMaintenance, MaintenanceStatus + maintenance_block = db.query(RoomMaintenance).filter( + and_( + RoomMaintenance.room_id == room_id, + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]), + or_( + and_( + RoomMaintenance.block_start.isnot(None), + RoomMaintenance.block_end.isnot(None), + RoomMaintenance.block_start < check_out, + RoomMaintenance.block_end > check_in + ), + and_( + RoomMaintenance.scheduled_start < check_out, + RoomMaintenance.scheduled_end.isnot(None), + RoomMaintenance.scheduled_end > check_in + ) + ) + ) + ).first() + if maintenance_block: + raise HTTPException(status_code=409, detail=f'Room is blocked for maintenance: {maintenance_block.title}') booking_number = generate_booking_number() # Calculate room price @@ -330,6 +375,17 @@ async def create_booking(booking_data: dict, current_user: User=Depends(get_curr db.add(service_usage) db.commit() db.refresh(booking) + + # Send booking confirmation notification + try: + from ..services.notification_service import NotificationService + if booking.status == BookingStatus.confirmed: + NotificationService.send_booking_confirmation(db, booking) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.warning(f'Failed to send booking confirmation notification: {e}') + try: from ..services.invoice_service import InvoiceService from ..utils.mailer import send_email @@ -430,7 +486,8 @@ async def create_booking(booking_data: dict, current_user: User=Depends(get_curr booking_dict['room'] = {'id': booking.room.id, 'room_number': booking.room.room_number, 'floor': booking.room.floor} if booking.room.room_type: booking_dict['room']['room_type'] = {'id': booking.room.room_type.id, 'name': booking.room.room_type.name, 'base_price': float(booking.room.room_type.base_price) if booking.room.room_type.base_price else 0.0, 'capacity': booking.room.room_type.capacity} - return {'success': True, 'data': {'booking': booking_dict}, 'message': f'Booking created. Please pay {deposit_percentage}% deposit to confirm.' if requires_deposit else 'Booking created successfully'} + message = f'Booking created. Please pay {deposit_percentage}% deposit to confirm.' if requires_deposit else 'Booking created successfully' + return success_response(data={'booking': booking_dict}, message=message) except HTTPException: raise except Exception as e: @@ -449,7 +506,8 @@ async def get_booking_by_id(id: int, request: Request, current_user: User=Depend booking = db.query(Booking).options(selectinload(Booking.payments), selectinload(Booking.service_usages).selectinload(ServiceUsage.service), joinedload(Booking.user), joinedload(Booking.room).joinedload(Room.room_type)).filter(Booking.id == id).first() if not booking: raise HTTPException(status_code=404, detail='Booking not found') - if current_user.role_id != 1 and booking.user_id != current_user.id: + from ..utils.role_helpers import is_admin + if not is_admin(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail='Forbidden') import logging logger = logging.getLogger(__name__) @@ -497,7 +555,7 @@ async def get_booking_by_id(id: int, request: Request, current_user: User=Depend else: logger.info(f'Get booking {id} - No service_usages found, initializing empty array') booking_dict['service_usages'] = [] - return {'success': True, 'data': {'booking': booking_dict}} + return success_response(data={'booking': booking_dict}) except HTTPException: raise except Exception as e: @@ -513,10 +571,10 @@ async def cancel_booking(id: int, current_user: User=Depends(get_current_user), raise HTTPException(status_code=403, detail='Forbidden') if booking.status == BookingStatus.cancelled: raise HTTPException(status_code=400, detail='Booking already cancelled') - if booking.status == BookingStatus.confirmed: - raise HTTPException(status_code=400, detail='Cannot cancel a confirmed booking. Please contact support for assistance.') + # Customers can only cancel pending bookings + # Admin/Staff can cancel any booking via update_booking endpoint if booking.status != BookingStatus.pending: - raise HTTPException(status_code=400, detail=f'Cannot cancel booking with status: {booking.status.value}. Only pending bookings can be cancelled.') + raise HTTPException(status_code=400, detail=f'Cannot cancel booking with status: {booking.status.value}. Only pending bookings can be cancelled. Please contact support for assistance.') booking = db.query(Booking).options(selectinload(Booking.payments)).filter(Booking.id == id).first() payments_updated = False if booking.payments: @@ -536,7 +594,37 @@ async def cancel_booking(id: int, current_user: User=Depends(get_current_user), payment.notes = existing_notes + cancellation_note if existing_notes else cancellation_note.strip() payments_updated = True booking.status = BookingStatus.cancelled - if payments_updated > 0: + + # Update room status when booking is cancelled + if booking.room: + # Check if room has other active bookings + active_booking = db.query(Booking).filter( + and_( + Booking.room_id == booking.room_id, + Booking.id != booking.id, + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in]), + Booking.check_in_date <= datetime.utcnow(), + Booking.check_out_date > datetime.utcnow() + ) + ).first() + + if not active_booking: + # Check for maintenance + from ..models.room_maintenance import RoomMaintenance, MaintenanceStatus + active_maintenance = db.query(RoomMaintenance).filter( + and_( + RoomMaintenance.room_id == booking.room_id, + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]) + ) + ).first() + + if active_maintenance: + booking.room.status = RoomStatus.maintenance + else: + booking.room.status = RoomStatus.available + + if payments_updated: db.flush() db.commit() try: @@ -549,14 +637,14 @@ async def cancel_booking(id: int, current_user: User=Depends(get_current_user), import logging logger = logging.getLogger(__name__) logger.error(f'Failed to send cancellation email: {e}') - return {'success': True, 'data': {'booking': booking}} + return success_response(data={'booking': booking}) except HTTPException: raise except Exception as e: db.rollback() raise HTTPException(status_code=500, detail=str(e)) -@router.put('/{id}', dependencies=[Depends(authorize_roles('admin'))]) +@router.put('/{id}', dependencies=[Depends(authorize_roles('admin', 'staff'))]) async def update_booking(id: int, booking_data: dict, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): try: booking = db.query(Booking).options( @@ -567,11 +655,36 @@ async def update_booking(id: int, booking_data: dict, current_user: User=Depends raise HTTPException(status_code=404, detail='Booking not found') old_status = booking.status status_value = booking_data.get('status') + room = booking.room + new_status = None if status_value: try: new_status = BookingStatus(status_value) booking.status = new_status - if new_status == BookingStatus.cancelled: + + # Update room status based on booking status + if new_status == BookingStatus.checked_in: + # Set room to occupied when checked in + if room and room.status != RoomStatus.maintenance: + room.status = RoomStatus.occupied + elif new_status == BookingStatus.checked_out: + # Set room to cleaning when checked out (housekeeping needed) + if room: + # Check if there's active maintenance + from ..models.room_maintenance import RoomMaintenance, MaintenanceStatus + active_maintenance = db.query(RoomMaintenance).filter( + and_( + RoomMaintenance.room_id == room.id, + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]) + ) + ).first() + if active_maintenance: + room.status = RoomStatus.maintenance + else: + room.status = RoomStatus.cleaning + elif new_status == BookingStatus.cancelled: + # Update room status when booking is cancelled if booking.payments: for payment in booking.payments: if payment.payment_status == PaymentStatus.pending: @@ -580,10 +693,48 @@ async def update_booking(id: int, booking_data: dict, current_user: User=Depends cancellation_note = f'\nPayment cancelled due to booking cancellation on {datetime.utcnow().isoformat()}' payment.notes = existing_notes + cancellation_note if existing_notes else cancellation_note.strip() db.flush() + + # Check if room has other active bookings + if room: + active_booking = db.query(Booking).filter( + and_( + Booking.room_id == room.id, + Booking.id != booking.id, + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in]), + Booking.check_in_date <= datetime.utcnow(), + Booking.check_out_date > datetime.utcnow() + ) + ).first() + + if not active_booking: + # Check for maintenance + from ..models.room_maintenance import RoomMaintenance, MaintenanceStatus + active_maintenance = db.query(RoomMaintenance).filter( + and_( + RoomMaintenance.room_id == room.id, + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]) + ) + ).first() + + if active_maintenance: + room.status = RoomStatus.maintenance + else: + room.status = RoomStatus.available except ValueError: raise HTTPException(status_code=400, detail='Invalid status') db.commit() + # Send booking confirmation notification if status changed to confirmed + if new_status == BookingStatus.confirmed: + try: + from ..services.notification_service import NotificationService + NotificationService.send_booking_confirmation(db, booking) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.warning(f'Failed to send booking confirmation notification: {e}') + # Reload booking with all relationships after commit booking = db.query(Booking).options( selectinload(Booking.payments), @@ -610,8 +761,7 @@ async def update_booking(id: int, booking_data: dict, current_user: User=Depends room_type_name = room.room_type.name if room and room.room_type else 'Room' currency_setting = db.query(SystemSettings).filter(SystemSettings.key == 'platform_currency').first() currency = currency_setting.value if currency_setting and currency_setting.value else 'USD' - currency_symbols = {'USD': '$', 'EUR': '€', 'GBP': '£', 'JPY': '¥', 'CNY': '¥', 'KRW': '₩', 'SGD': 'S$', 'THB': '฿', 'AUD': 'A$', 'CAD': 'C$', 'VND': '₫', 'INR': '₹', 'CHF': 'CHF', 'NZD': 'NZ$'} - currency_symbol = currency_symbols.get(currency, currency) + currency_symbol = get_currency_symbol(currency) guest_name = booking.user.full_name if booking.user else 'Guest' guest_email = booking.user.email if booking.user else None email_html = booking_confirmation_email_template(booking_number=booking.booking_number, guest_name=guest_name, room_number=room.room_number if room else 'N/A', room_type=room_type_name, check_in=booking.check_in_date.strftime('%B %d, %Y') if booking.check_in_date else 'N/A', check_out=booking.check_out_date.strftime('%B %d, %Y') if booking.check_out_date else 'N/A', num_guests=booking.num_guests, total_price=float(booking.total_price), requires_deposit=booking.requires_deposit, deposit_amount=float(booking.total_price) * 0.2 if booking.requires_deposit else None, original_price=float(booking.original_price) if booking.original_price else None, discount_amount=float(booking.discount_amount) if booking.discount_amount else None, promotion_code=booking.promotion_code, client_url=client_url, currency_symbol=currency_symbol) @@ -673,10 +823,10 @@ async def update_booking(id: int, booking_data: dict, current_user: User=Depends 'status': booking.status.value if isinstance(booking.status, BookingStatus) else booking.status, } - response_data = {'status': 'success', 'message': 'Booking updated successfully', 'data': {'booking': booking_dict}} + message = 'Booking updated successfully. ⚠️ Payment reminder: Guest has remaining balance.' if payment_warning else 'Booking updated successfully' + response_data = success_response(data={'booking': booking_dict}, message=message) if payment_warning: response_data['warning'] = payment_warning - response_data['message'] = 'Booking updated successfully. ⚠️ Payment reminder: Guest has remaining balance.' return response_data except HTTPException: raise @@ -722,7 +872,7 @@ async def check_booking_by_number(booking_number: str, db: Session=Depends(get_d booking_dict['payments'] = [] payment_balance = calculate_booking_payment_balance(booking) booking_dict['payment_balance'] = {'total_paid': payment_balance['total_paid'], 'total_price': payment_balance['total_price'], 'remaining_balance': payment_balance['remaining_balance'], 'is_fully_paid': payment_balance['is_fully_paid'], 'payment_percentage': payment_balance['payment_percentage']} - response_data = {'status': 'success', 'data': {'booking': booking_dict}} + response_data = success_response(data={'booking': booking_dict}) if payment_balance['remaining_balance'] > 0.01: response_data['warning'] = {'message': f'Guest has not fully paid. Remaining balance: {payment_balance['remaining_balance']:.2f}', 'remaining_balance': payment_balance['remaining_balance'], 'payment_percentage': payment_balance['payment_percentage']} return response_data @@ -758,6 +908,8 @@ async def admin_create_booking(booking_data: dict, current_user: User=Depends(au check_out_date = booking_data.get('check_out_date') total_price = booking_data.get('total_price') guest_count = booking_data.get('guest_count', 1) + if guest_count < 1 or guest_count > 20: + raise HTTPException(status_code=400, detail='Guest count must be between 1 and 20') notes = booking_data.get('notes') payment_method = booking_data.get('payment_method', 'cash') payment_status = booking_data.get('payment_status', 'unpaid') # 'full', 'deposit', or 'unpaid' @@ -793,6 +945,10 @@ async def admin_create_booking(booking_data: dict, current_user: User=Depends(au else: check_out = datetime.strptime(check_out_date, '%Y-%m-%d') + # Validate dates + if check_in >= check_out: + raise HTTPException(status_code=400, detail='Check-out date must be after check-in date') + # Check for overlapping bookings overlapping = db.query(Booking).filter( and_( @@ -1118,11 +1274,10 @@ async def admin_create_booking(booking_data: dict, current_user: User=Depends(au 'capacity': booking.room.room_type.capacity } - return { - 'success': True, - 'data': {'booking': booking_dict}, - 'message': f'Booking created successfully by {current_user.full_name}' - } + return success_response( + data={'booking': booking_dict}, + message=f'Booking created successfully by {current_user.full_name}' + ) except HTTPException: raise except Exception as e: diff --git a/Backend/src/routes/email_campaign_routes.py b/Backend/src/routes/email_campaign_routes.py new file mode 100644 index 00000000..4110029e --- /dev/null +++ b/Backend/src/routes/email_campaign_routes.py @@ -0,0 +1,584 @@ +from fastapi import APIRouter, Depends, HTTPException, status, Query, Request +from sqlalchemy.orm import Session, selectinload +from typing import Optional, List, Union +from datetime import datetime +from pydantic import BaseModel, EmailStr, field_validator + +from ..config.database import get_db +from ..middleware.auth import get_current_user, authorize_roles +from ..models.user import User +from ..models.email_campaign import ( + Campaign, CampaignStatus, CampaignType, + CampaignSegment, EmailTemplate, CampaignEmail, EmailStatus, + DripSequence, DripSequenceStep, Unsubscribe +) +from ..services.email_campaign_service import email_campaign_service + +router = APIRouter(prefix="/email-campaigns", tags=["Email Campaigns"]) + +# Pydantic Models +class CampaignCreate(BaseModel): + name: str + subject: str + html_content: str + text_content: Optional[str] = None + campaign_type: str = "newsletter" + segment_id: Optional[Union[int, str]] = None + scheduled_at: Optional[datetime] = None + template_id: Optional[Union[int, str]] = None + from_name: Optional[str] = None + from_email: Optional[str] = None + reply_to_email: Optional[str] = None + track_opens: bool = True + track_clicks: bool = True + + @field_validator('segment_id', 'template_id', mode='before') + @classmethod + def parse_int_or_none(cls, v): + if v is None or v == '' or v == 'undefined' or (isinstance(v, str) and v.strip() == ''): + return None + if isinstance(v, str): + try: + return int(v) + except (ValueError, TypeError): + return None + if isinstance(v, int): + return v + return None + +class CampaignUpdate(BaseModel): + name: Optional[str] = None + subject: Optional[str] = None + html_content: Optional[str] = None + text_content: Optional[str] = None + segment_id: Optional[Union[int, str]] = None + scheduled_at: Optional[datetime] = None + status: Optional[str] = None + + @field_validator('segment_id', mode='before') + @classmethod + def parse_int_or_none(cls, v): + if v is None or v == '' or v == 'undefined' or (isinstance(v, str) and v.strip() == ''): + return None + if isinstance(v, str): + try: + return int(v) + except (ValueError, TypeError): + return None + if isinstance(v, int): + return v + return None + +class SegmentCreate(BaseModel): + name: str + description: Optional[str] = None + criteria: dict + +class TemplateCreate(BaseModel): + name: str + subject: str + html_content: str + text_content: Optional[str] = None + category: Optional[str] = None + variables: Optional[List[str]] = None + +class DripSequenceCreate(BaseModel): + name: str + description: Optional[str] = None + trigger_event: Optional[str] = None + +class DripStepCreate(BaseModel): + subject: str + html_content: str + text_content: Optional[str] = None + delay_days: int = 0 + delay_hours: int = 0 + template_id: Optional[Union[int, str]] = None + + @field_validator('template_id', mode='before') + @classmethod + def parse_int_or_none(cls, v): + if v is None or v == '' or v == 'undefined' or (isinstance(v, str) and v.strip() == ''): + return None + if isinstance(v, str): + try: + return int(v) + except (ValueError, TypeError): + return None + if isinstance(v, int): + return v + return None + +# Campaign Routes +@router.get("") +async def get_campaigns( + status_filter: Optional[str] = Query(None, alias='status'), + campaign_type: Optional[str] = Query(None), + limit: int = Query(50, ge=1, le=100), + offset: int = Query(0, ge=0), + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get all email campaigns""" + query = db.query(Campaign) + + if status_filter: + try: + status_enum = CampaignStatus(status_filter) + query = query.filter(Campaign.status == status_enum) + except ValueError: + pass + + if campaign_type: + try: + type_enum = CampaignType(campaign_type) + query = query.filter(Campaign.campaign_type == type_enum) + except ValueError: + pass + + campaigns = query.order_by(Campaign.created_at.desc()).offset(offset).limit(limit).all() + + return [{ + "id": c.id, + "name": c.name, + "subject": c.subject, + "campaign_type": c.campaign_type.value, + "status": c.status.value, + "total_recipients": c.total_recipients, + "total_sent": c.total_sent, + "total_opened": c.total_opened, + "total_clicked": c.total_clicked, + "open_rate": float(c.open_rate) if c.open_rate else None, + "click_rate": float(c.click_rate) if c.click_rate else None, + "scheduled_at": c.scheduled_at.isoformat() if c.scheduled_at else None, + "sent_at": c.sent_at.isoformat() if c.sent_at else None, + "created_at": c.created_at.isoformat() if c.created_at else None + } for c in campaigns] + +# Segment Routes (must be before /{campaign_id} to avoid route conflicts) +@router.get("/segments") +async def get_segments( + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get all campaign segments""" + try: + segments = db.query(CampaignSegment).filter(CampaignSegment.is_active == True).all() + return [{ + "id": s.id, + "name": s.name, + "description": s.description, + "criteria": s.criteria, + "estimated_count": s.estimated_count, + "created_at": s.created_at.isoformat() if s.created_at else None + } for s in segments] + except HTTPException: + raise + except Exception as e: + from ..config.logging_config import get_logger + logger = get_logger(__name__) + logger.error(f"Error fetching segments: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Failed to fetch segments: {str(e)}") + +@router.post("/segments") +async def create_segment( + data: SegmentCreate, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Create a new campaign segment""" + try: + segment = email_campaign_service.create_segment( + db=db, + name=data.name, + criteria=data.criteria, + description=data.description, + created_by=current_user.id + ) + return {"status": "success", "segment_id": segment.id, "estimated_count": segment.estimated_count} + except HTTPException: + raise + except Exception as e: + from ..config.logging_config import get_logger + logger = get_logger(__name__) + logger.error(f"Error creating segment: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Failed to create segment: {str(e)}") + +# Template Routes (must be before /{campaign_id} to avoid route conflicts) +@router.get("/templates") +async def get_templates( + category: Optional[str] = Query(None, description="Filter by template category"), + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get all email templates""" + try: + query = db.query(EmailTemplate).filter(EmailTemplate.is_active == True) + if category: + query = query.filter(EmailTemplate.category == category) + + templates = query.all() + result = [{ + "id": t.id, + "name": t.name, + "subject": t.subject, + "category": t.category, + "variables": t.variables, + "created_at": t.created_at.isoformat() if t.created_at else None + } for t in templates] + return result + except HTTPException: + raise + except Exception as e: + from ..config.logging_config import get_logger + logger = get_logger(__name__) + logger.error(f"Error fetching templates: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Failed to fetch templates: {str(e)}") + +@router.post("/templates") +async def create_template( + data: TemplateCreate, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Create a new email template""" + try: + template = EmailTemplate( + name=data.name, + subject=data.subject, + html_content=data.html_content, + text_content=data.text_content, + category=data.category, + variables=data.variables, + created_by=current_user.id + ) + db.add(template) + db.commit() + db.refresh(template) + return {"status": "success", "template_id": template.id} + except HTTPException: + raise + except Exception as e: + from ..config.logging_config import get_logger + logger = get_logger(__name__) + logger.error(f"Error creating template: {str(e)}", exc_info=True) + db.rollback() + raise HTTPException(status_code=500, detail=f"Failed to create template: {str(e)}") + +# Drip Sequence Routes (must be before /{campaign_id} to avoid route conflicts) +@router.get("/drip-sequences") +async def get_drip_sequences( + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get all drip sequences""" + try: + # Use eager loading to avoid lazy loading issues + sequences = db.query(DripSequence).options( + selectinload(DripSequence.steps) + ).filter(DripSequence.is_active == True).all() + + return [{ + "id": s.id, + "name": s.name, + "description": s.description, + "trigger_event": s.trigger_event, + "step_count": len(s.steps) if s.steps else 0, + "created_at": s.created_at.isoformat() if s.created_at else None + } for s in sequences] + except HTTPException: + raise + except Exception as e: + from ..config.logging_config import get_logger + logger = get_logger(__name__) + logger.error(f"Error fetching drip sequences: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Failed to fetch drip sequences: {str(e)}") + +@router.post("/drip-sequences") +async def create_drip_sequence( + data: DripSequenceCreate, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Create a new drip sequence""" + try: + sequence = email_campaign_service.create_drip_sequence( + db=db, + name=data.name, + description=data.description, + trigger_event=data.trigger_event, + created_by=current_user.id + ) + return {"status": "success", "sequence_id": sequence.id} + except HTTPException: + raise + except Exception as e: + from ..config.logging_config import get_logger + logger = get_logger(__name__) + logger.error(f"Error creating drip sequence: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Failed to create drip sequence: {str(e)}") + +@router.post("/drip-sequences/{sequence_id}/steps") +async def add_drip_step( + sequence_id: int, + data: DripStepCreate, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Add a step to a drip sequence""" + try: + # Ensure template_id is integer or None + template_id = int(data.template_id) if data.template_id is not None else None + + step = email_campaign_service.add_drip_step( + db=db, + sequence_id=sequence_id, + subject=data.subject, + html_content=data.html_content, + text_content=data.text_content, + delay_days=data.delay_days, + delay_hours=data.delay_hours, + template_id=template_id + ) + return {"status": "success", "step_id": step.id} + except HTTPException: + raise + except Exception as e: + from ..config.logging_config import get_logger + logger = get_logger(__name__) + logger.error(f"Error adding drip step: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Failed to add drip step: {str(e)}") + +@router.get("/{campaign_id}") +async def get_campaign( + campaign_id: int, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get a specific campaign""" + campaign = db.query(Campaign).filter(Campaign.id == campaign_id).first() + if not campaign: + raise HTTPException(status_code=404, detail="Campaign not found") + + return { + "id": campaign.id, + "name": campaign.name, + "subject": campaign.subject, + "html_content": campaign.html_content, + "text_content": campaign.text_content, + "campaign_type": campaign.campaign_type.value, + "status": campaign.status.value, + "segment_id": campaign.segment_id, + "scheduled_at": campaign.scheduled_at.isoformat() if campaign.scheduled_at else None, + "total_recipients": campaign.total_recipients, + "total_sent": campaign.total_sent, + "total_delivered": campaign.total_delivered, + "total_opened": campaign.total_opened, + "total_clicked": campaign.total_clicked, + "total_bounced": campaign.total_bounced, + "open_rate": float(campaign.open_rate) if campaign.open_rate else None, + "click_rate": float(campaign.click_rate) if campaign.click_rate else None, + "created_at": campaign.created_at.isoformat() if campaign.created_at else None + } + +@router.post("") +async def create_campaign( + data: CampaignCreate, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Create a new email campaign""" + try: + campaign_type = CampaignType(data.campaign_type) + except ValueError: + raise HTTPException(status_code=400, detail="Invalid campaign type") + + campaign = email_campaign_service.create_campaign( + db=db, + name=data.name, + subject=data.subject, + html_content=data.html_content, + text_content=data.text_content, + campaign_type=campaign_type, + segment_id=data.segment_id, + scheduled_at=data.scheduled_at, + template_id=data.template_id, + created_by=current_user.id, + from_name=data.from_name, + from_email=data.from_email, + reply_to_email=data.reply_to_email, + track_opens=data.track_opens, + track_clicks=data.track_clicks + ) + + return {"status": "success", "campaign_id": campaign.id} + +@router.put("/{campaign_id}") +async def update_campaign( + campaign_id: int, + data: CampaignUpdate, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Update a campaign""" + campaign = db.query(Campaign).filter(Campaign.id == campaign_id).first() + if not campaign: + raise HTTPException(status_code=404, detail="Campaign not found") + + if data.name: + campaign.name = data.name + if data.subject: + campaign.subject = data.subject + if data.html_content: + campaign.html_content = data.html_content + if data.text_content is not None: + campaign.text_content = data.text_content + if data.segment_id is not None: + campaign.segment_id = int(data.segment_id) if isinstance(data.segment_id, str) else data.segment_id + if data.scheduled_at is not None: + campaign.scheduled_at = data.scheduled_at + if data.status: + try: + campaign.status = CampaignStatus(data.status) + except ValueError: + raise HTTPException(status_code=400, detail="Invalid status") + + db.commit() + db.refresh(campaign) + + return {"status": "success", "message": "Campaign updated"} + +@router.post("/{campaign_id}/send") +async def send_campaign( + campaign_id: int, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Send an email campaign""" + try: + result = email_campaign_service.send_campaign(db=db, campaign_id=campaign_id) + return {"status": "success", "result": result} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to send campaign: {str(e)}") + +@router.get("/{campaign_id}/analytics") +async def get_campaign_analytics( + campaign_id: int, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get campaign analytics""" + campaign = db.query(Campaign).filter(Campaign.id == campaign_id).first() + if not campaign: + raise HTTPException(status_code=404, detail="Campaign not found") + + # Get email status breakdown + emails = db.query(CampaignEmail).filter(CampaignEmail.campaign_id == campaign_id).all() + + status_breakdown = {} + for status in EmailStatus: + status_breakdown[status.value] = len([e for e in emails if e.status == status]) + + return { + "campaign_id": campaign.id, + "total_recipients": campaign.total_recipients, + "total_sent": campaign.total_sent, + "total_delivered": campaign.total_delivered, + "total_opened": campaign.total_opened, + "total_clicked": campaign.total_clicked, + "total_bounced": campaign.total_bounced, + "total_unsubscribed": campaign.total_unsubscribed, + "open_rate": float(campaign.open_rate) if campaign.open_rate else 0, + "click_rate": float(campaign.click_rate) if campaign.click_rate else 0, + "bounce_rate": float(campaign.bounce_rate) if campaign.bounce_rate else 0, + "status_breakdown": status_breakdown + } + +# Tracking Routes (public endpoints for email tracking) +@router.get("/track/open/{campaign_email_id}") +async def track_email_open( + campaign_email_id: int, + db: Session = Depends(get_db) +): + """Track email open (called by tracking pixel)""" + email_campaign_service.track_email_open(db=db, campaign_email_id=campaign_email_id) + # Return 1x1 transparent pixel (GIF) + from fastapi.responses import Response + # 1x1 transparent GIF + pixel = b'\x47\x49\x46\x38\x39\x61\x01\x00\x01\x00\x80\x00\x00\xff\xff\xff\x00\x00\x00\x21\xf9\x04\x01\x00\x00\x00\x00\x2c\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02\x04\x01\x00\x3b' + return Response(content=pixel, media_type="image/gif") + +@router.get("/track/click/{campaign_email_id}") +async def track_email_click( + campaign_email_id: int, + url: str = Query(...), + request: Request = None, + db: Session = Depends(get_db) +): + """Track email click""" + ip_address = request.client.host if request and request.client else None + user_agent = request.headers.get("User-Agent") if request else None + + email_campaign_service.track_email_click( + db=db, + campaign_email_id=campaign_email_id, + url=url, + ip_address=ip_address, + user_agent=user_agent + ) + + # Redirect to the actual URL + from fastapi.responses import RedirectResponse + return RedirectResponse(url=url) + +# Unsubscribe Routes +@router.post("/unsubscribe") +async def unsubscribe( + email: EmailStr = Query(...), + campaign_id: Optional[Union[int, str]] = Query(None), + unsubscribe_all: bool = Query(False), + reason: Optional[str] = None, + db: Session = Depends(get_db) +): + """Unsubscribe from email campaigns""" + # Parse campaign_id if it's a string + parsed_campaign_id = None + if campaign_id is not None and campaign_id != '' and campaign_id != 'undefined': + try: + parsed_campaign_id = int(campaign_id) if isinstance(campaign_id, str) else campaign_id + except (ValueError, TypeError): + parsed_campaign_id = None + + user = db.query(User).filter(User.email == email).first() + + unsubscribe_record = Unsubscribe( + email=email, + user_id=user.id if user else None, + campaign_id=parsed_campaign_id, + unsubscribe_all=unsubscribe_all, + reason=reason + ) + db.add(unsubscribe_record) + db.commit() + + return {"status": "success", "message": "Successfully unsubscribed"} + +@router.post("/drip-sequences/process") +async def process_drip_sequences( + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Manually trigger drip sequence processing""" + try: + email_campaign_service.process_drip_sequences(db=db) + return {"status": "success", "message": "Drip sequences processed"} + except HTTPException: + raise + except Exception as e: + from ..config.logging_config import get_logger + logger = get_logger(__name__) + logger.error(f"Error processing drip sequences: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Failed to process drip sequences: {str(e)}") + diff --git a/Backend/src/routes/group_booking_routes.py b/Backend/src/routes/group_booking_routes.py new file mode 100644 index 00000000..28bc6dfb --- /dev/null +++ b/Backend/src/routes/group_booking_routes.py @@ -0,0 +1,575 @@ +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.orm import Session, joinedload, selectinload +from typing import Optional, List +from datetime import datetime +from decimal import Decimal + +from ..config.database import get_db +from ..middleware.auth import get_current_user, authorize_roles +from ..models.user import User +from ..models.role import Role +from ..models.group_booking import ( + GroupBooking, GroupBookingMember, GroupRoomBlock, GroupPayment, + GroupBookingStatus, PaymentOption +) +from ..models.room import Room +from ..models.room_type import RoomType +from ..services.group_booking_service import GroupBookingService +from ..services.room_service import get_base_url +from fastapi import Request + +router = APIRouter(prefix='/group-bookings', tags=['group-bookings']) + + +@router.post('/') +async def create_group_booking( + booking_data: dict, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Create a new group booking""" + try: + # Extract data + coordinator_name = booking_data.get('coordinator_name') or current_user.full_name + coordinator_email = booking_data.get('coordinator_email') or current_user.email + coordinator_phone = booking_data.get('coordinator_phone') or current_user.phone + + check_in_date = datetime.fromisoformat(booking_data['check_in_date'].replace('Z', '+00:00')) + check_out_date = datetime.fromisoformat(booking_data['check_out_date'].replace('Z', '+00:00')) + + room_blocks = booking_data.get('room_blocks', []) + if not room_blocks: + raise HTTPException(status_code=400, detail="At least one room block is required") + + payment_option = PaymentOption(booking_data.get('payment_option', 'coordinator_pays_all')) + deposit_required = booking_data.get('deposit_required', False) + deposit_percentage = booking_data.get('deposit_percentage') + + group_booking = GroupBookingService.create_group_booking( + db=db, + coordinator_id=current_user.id, + coordinator_name=coordinator_name, + coordinator_email=coordinator_email, + coordinator_phone=coordinator_phone, + check_in_date=check_in_date, + check_out_date=check_out_date, + room_blocks=room_blocks, + group_name=booking_data.get('group_name'), + group_type=booking_data.get('group_type'), + payment_option=payment_option, + deposit_required=deposit_required, + deposit_percentage=deposit_percentage, + special_requests=booking_data.get('special_requests'), + notes=booking_data.get('notes'), + cancellation_policy=booking_data.get('cancellation_policy'), + cancellation_deadline=datetime.fromisoformat(booking_data['cancellation_deadline'].replace('Z', '+00:00')) if booking_data.get('cancellation_deadline') else None, + cancellation_penalty_percentage=booking_data.get('cancellation_penalty_percentage'), + group_discount_percentage=booking_data.get('group_discount_percentage') + ) + + # Load relationships + db.refresh(group_booking) + group_booking = db.query(GroupBooking).options( + selectinload(GroupBooking.room_blocks).joinedload(GroupRoomBlock.room_type), + selectinload(GroupBooking.members), + selectinload(GroupBooking.coordinator) + ).filter(GroupBooking.id == group_booking.id).first() + + return { + 'status': 'success', + 'message': 'Group booking created successfully', + 'data': { + 'group_booking': _serialize_group_booking(group_booking) + } + } + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + import traceback + logger = logging.getLogger(__name__) + logger.error(f'Error creating group booking: {str(e)}') + logger.error(traceback.format_exc()) + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get('/') +async def get_group_bookings( + search: Optional[str] = Query(None), + status_filter: Optional[str] = Query(None, alias='status'), + page: int = Query(1, ge=1), + limit: int = Query(10, ge=1, le=100), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get all group bookings (admin/staff only)""" + try: + query = db.query(GroupBooking).options( + selectinload(GroupBooking.room_blocks).joinedload(GroupRoomBlock.room_type), + selectinload(GroupBooking.members), + selectinload(GroupBooking.coordinator), + selectinload(GroupBooking.payments) + ) + + if search: + query = query.filter( + GroupBooking.group_booking_number.like(f'%{search}%') | + GroupBooking.group_name.like(f'%{search}%') | + GroupBooking.coordinator_name.like(f'%{search}%') + ) + + if status_filter: + try: + query = query.filter(GroupBooking.status == GroupBookingStatus(status_filter)) + except ValueError: + pass + + total = query.count() + offset = (page - 1) * limit + group_bookings = query.order_by(GroupBooking.created_at.desc()).offset(offset).limit(limit).all() + + return { + 'status': 'success', + 'data': { + 'group_bookings': [_serialize_group_booking(gb) for gb in group_bookings], + 'pagination': { + 'total': total, + 'page': page, + 'limit': limit, + 'totalPages': (total + limit - 1) // limit + } + } + } + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error getting group bookings: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get('/me') +async def get_my_group_bookings( + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get group bookings for current user (as coordinator)""" + try: + group_bookings = db.query(GroupBooking).options( + selectinload(GroupBooking.room_blocks).joinedload(GroupRoomBlock.room_type), + selectinload(GroupBooking.members), + selectinload(GroupBooking.payments) + ).filter(GroupBooking.coordinator_id == current_user.id).order_by(GroupBooking.created_at.desc()).all() + + return { + 'status': 'success', + 'data': { + 'group_bookings': [_serialize_group_booking(gb) for gb in group_bookings] + } + } + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error getting my group bookings: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get('/{group_booking_id}') +async def get_group_booking( + group_booking_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get a specific group booking""" + try: + group_booking = db.query(GroupBooking).options( + selectinload(GroupBooking.room_blocks).joinedload(GroupRoomBlock.room_type), + selectinload(GroupBooking.members), + selectinload(GroupBooking.coordinator), + selectinload(GroupBooking.payments), + selectinload(GroupBooking.individual_bookings) + ).filter(GroupBooking.id == group_booking_id).first() + + if not group_booking: + raise HTTPException(status_code=404, detail="Group booking not found") + + # Check authorization + role = db.query(Role).filter(Role.id == current_user.role_id).first() + if role and role.name not in ['admin', 'staff']: + if group_booking.coordinator_id != current_user.id: + raise HTTPException(status_code=403, detail="Not authorized to view this group booking") + + return { + 'status': 'success', + 'data': { + 'group_booking': _serialize_group_booking(group_booking, detailed=True) + } + } + except HTTPException: + raise + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error getting group booking: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post('/{group_booking_id}/members') +async def add_member( + group_booking_id: int, + member_data: dict, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Add a member to a group booking""" + try: + # Check authorization + group_booking = db.query(GroupBooking).filter(GroupBooking.id == group_booking_id).first() + if not group_booking: + raise HTTPException(status_code=404, detail="Group booking not found") + + role = db.query(Role).filter(Role.id == current_user.role_id).first() + if role and role.name not in ['admin', 'staff']: + if group_booking.coordinator_id != current_user.id: + raise HTTPException(status_code=403, detail="Not authorized to add members") + + member = GroupBookingService.add_member_to_group( + db=db, + group_booking_id=group_booking_id, + full_name=member_data.get('full_name'), + email=member_data.get('email'), + phone=member_data.get('phone'), + user_id=member_data.get('user_id'), + room_block_id=member_data.get('room_block_id'), + special_requests=member_data.get('special_requests'), + preferences=member_data.get('preferences') + ) + + db.refresh(member) + member = db.query(GroupBookingMember).options( + joinedload(GroupBookingMember.user), + joinedload(GroupBookingMember.room_block), + joinedload(GroupBookingMember.assigned_room) + ).filter(GroupBookingMember.id == member.id).first() + + return { + 'status': 'success', + 'message': 'Member added successfully', + 'data': { + 'member': _serialize_member(member) + } + } + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error adding member: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post('/{group_booking_id}/confirm') +async def confirm_group_booking( + group_booking_id: int, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Confirm a group booking""" + try: + group_booking = GroupBookingService.confirm_group_booking(db, group_booking_id) + + return { + 'status': 'success', + 'message': 'Group booking confirmed successfully', + 'data': { + 'group_booking': _serialize_group_booking(group_booking) + } + } + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error confirming group booking: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post('/{group_booking_id}/members/{member_id}/assign-room') +async def assign_room_to_member( + group_booking_id: int, + member_id: int, + assignment_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Assign a room to a group member and create individual booking""" + try: + room_id = assignment_data.get('room_id') + if not room_id: + raise HTTPException(status_code=400, detail="room_id is required") + + booking = GroupBookingService.create_individual_booking_from_member( + db=db, + member_id=member_id, + room_id=room_id + ) + + return { + 'status': 'success', + 'message': 'Room assigned and booking created successfully', + 'data': { + 'booking': { + 'id': booking.id, + 'booking_number': booking.booking_number, + 'room_id': booking.room_id, + 'status': booking.status.value if hasattr(booking.status, 'value') else str(booking.status) + } + } + } + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error assigning room: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post('/{group_booking_id}/payments') +async def add_payment( + group_booking_id: int, + payment_data: dict, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Add a payment to a group booking""" + try: + # Check authorization + group_booking = db.query(GroupBooking).filter(GroupBooking.id == group_booking_id).first() + if not group_booking: + raise HTTPException(status_code=404, detail="Group booking not found") + + role = db.query(Role).filter(Role.id == current_user.role_id).first() + if role and role.name not in ['admin', 'staff']: + if group_booking.coordinator_id != current_user.id: + raise HTTPException(status_code=403, detail="Not authorized to add payments") + + payment = GroupBookingService.add_group_payment( + db=db, + group_booking_id=group_booking_id, + amount=Decimal(str(payment_data.get('amount'))), + payment_method=payment_data.get('payment_method'), + payment_type=payment_data.get('payment_type', 'deposit'), + transaction_id=payment_data.get('transaction_id'), + paid_by_member_id=payment_data.get('paid_by_member_id'), + paid_by_user_id=payment_data.get('paid_by_user_id', current_user.id), + notes=payment_data.get('notes') + ) + + return { + 'status': 'success', + 'message': 'Payment added successfully', + 'data': { + 'payment': _serialize_payment(payment) + } + } + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error adding payment: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post('/{group_booking_id}/cancel') +async def cancel_group_booking( + group_booking_id: int, + cancellation_data: dict, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Cancel a group booking""" + try: + group_booking = GroupBookingService.cancel_group_booking( + db=db, + group_booking_id=group_booking_id, + cancellation_reason=cancellation_data.get('reason') + ) + + return { + 'status': 'success', + 'message': 'Group booking cancelled successfully', + 'data': { + 'group_booking': _serialize_group_booking(group_booking) + } + } + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error cancelling group booking: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get('/{group_booking_id}/availability') +async def check_availability( + group_booking_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Check room availability for a group booking""" + try: + group_booking = db.query(GroupBooking).filter(GroupBooking.id == group_booking_id).first() + if not group_booking: + raise HTTPException(status_code=404, detail="Group booking not found") + + availability_results = [] + room_blocks = db.query(GroupRoomBlock).filter( + GroupRoomBlock.group_booking_id == group_booking_id + ).all() + + for room_block in room_blocks: + availability = GroupBookingService.check_room_availability( + db=db, + room_type_id=room_block.room_type_id, + check_in=group_booking.check_in_date, + check_out=group_booking.check_out_date, + num_rooms=room_block.rooms_blocked + ) + + availability_results.append({ + 'room_block_id': room_block.id, + 'room_type_id': room_block.room_type_id, + 'rooms_blocked': room_block.rooms_blocked, + 'availability': availability + }) + + return { + 'status': 'success', + 'data': { + 'availability': availability_results + } + } + except HTTPException: + raise + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Error checking availability: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + +# Helper functions for serialization +def _serialize_group_booking(group_booking: GroupBooking, detailed: bool = False) -> dict: + """Serialize group booking to dict""" + data = { + 'id': group_booking.id, + 'group_booking_number': group_booking.group_booking_number, + 'coordinator': { + 'id': group_booking.coordinator_id, + 'name': group_booking.coordinator_name, + 'email': group_booking.coordinator_email, + 'phone': group_booking.coordinator_phone + }, + 'group_name': group_booking.group_name, + 'group_type': group_booking.group_type, + 'total_rooms': group_booking.total_rooms, + 'total_guests': group_booking.total_guests, + 'check_in_date': group_booking.check_in_date.isoformat() if group_booking.check_in_date else None, + 'check_out_date': group_booking.check_out_date.isoformat() if group_booking.check_out_date else None, + 'base_rate_per_room': float(group_booking.base_rate_per_room) if group_booking.base_rate_per_room else 0.0, + 'group_discount_percentage': float(group_booking.group_discount_percentage) if group_booking.group_discount_percentage else 0.0, + 'group_discount_amount': float(group_booking.group_discount_amount) if group_booking.group_discount_amount else 0.0, + 'original_total_price': float(group_booking.original_total_price) if group_booking.original_total_price else 0.0, + 'discount_amount': float(group_booking.discount_amount) if group_booking.discount_amount else 0.0, + 'total_price': float(group_booking.total_price) if group_booking.total_price else 0.0, + 'payment_option': group_booking.payment_option.value if hasattr(group_booking.payment_option, 'value') else str(group_booking.payment_option), + 'deposit_required': group_booking.deposit_required, + 'deposit_percentage': group_booking.deposit_percentage, + 'deposit_amount': float(group_booking.deposit_amount) if group_booking.deposit_amount else None, + 'amount_paid': float(group_booking.amount_paid) if group_booking.amount_paid else 0.0, + 'balance_due': float(group_booking.balance_due) if group_booking.balance_due else 0.0, + 'status': group_booking.status.value if hasattr(group_booking.status, 'value') else str(group_booking.status), + 'special_requests': group_booking.special_requests, + 'notes': group_booking.notes, + 'created_at': group_booking.created_at.isoformat() if group_booking.created_at else None, + 'updated_at': group_booking.updated_at.isoformat() if group_booking.updated_at else None + } + + if detailed: + data['room_blocks'] = [_serialize_room_block(rb) for rb in group_booking.room_blocks] if group_booking.room_blocks else [] + data['members'] = [_serialize_member(m) for m in group_booking.members] if group_booking.members else [] + data['payments'] = [_serialize_payment(p) for p in group_booking.payments] if group_booking.payments else [] + data['cancellation_policy'] = group_booking.cancellation_policy + data['cancellation_deadline'] = group_booking.cancellation_deadline.isoformat() if group_booking.cancellation_deadline else None + data['cancellation_penalty_percentage'] = float(group_booking.cancellation_penalty_percentage) if group_booking.cancellation_penalty_percentage else None + data['confirmed_at'] = group_booking.confirmed_at.isoformat() if group_booking.confirmed_at else None + data['cancelled_at'] = group_booking.cancelled_at.isoformat() if group_booking.cancelled_at else None + + return data + + +def _serialize_room_block(room_block: GroupRoomBlock) -> dict: + """Serialize room block to dict""" + return { + 'id': room_block.id, + 'room_type_id': room_block.room_type_id, + 'room_type': { + 'id': room_block.room_type.id, + 'name': room_block.room_type.name, + 'base_price': float(room_block.room_type.base_price) if room_block.room_type.base_price else 0.0 + } if room_block.room_type else None, + 'rooms_blocked': room_block.rooms_blocked, + 'rooms_confirmed': room_block.rooms_confirmed, + 'rooms_available': room_block.rooms_available, + 'rate_per_room': float(room_block.rate_per_room) if room_block.rate_per_room else 0.0, + 'total_block_price': float(room_block.total_block_price) if room_block.total_block_price else 0.0, + 'is_active': room_block.is_active, + 'block_released_at': room_block.block_released_at.isoformat() if room_block.block_released_at else None + } + + +def _serialize_member(member: GroupBookingMember) -> dict: + """Serialize group member to dict""" + return { + 'id': member.id, + 'full_name': member.full_name, + 'email': member.email, + 'phone': member.phone, + 'user_id': member.user_id, + 'room_block_id': member.room_block_id, + 'assigned_room_id': member.assigned_room_id, + 'individual_booking_id': member.individual_booking_id, + 'special_requests': member.special_requests, + 'preferences': member.preferences, + 'individual_amount': float(member.individual_amount) if member.individual_amount else None, + 'individual_paid': float(member.individual_paid) if member.individual_paid else 0.0, + 'individual_balance': float(member.individual_balance) if member.individual_balance else 0.0, + 'is_checked_in': member.is_checked_in, + 'checked_in_at': member.checked_in_at.isoformat() if member.checked_in_at else None, + 'is_checked_out': member.is_checked_out, + 'checked_out_at': member.checked_out_at.isoformat() if member.checked_out_at else None + } + + +def _serialize_payment(payment: GroupPayment) -> dict: + """Serialize group payment to dict""" + return { + 'id': payment.id, + 'amount': float(payment.amount) if payment.amount else 0.0, + 'payment_method': payment.payment_method, + 'payment_type': payment.payment_type, + 'payment_status': payment.payment_status, + 'transaction_id': payment.transaction_id, + 'payment_date': payment.payment_date.isoformat() if payment.payment_date else None, + 'notes': payment.notes, + 'paid_by_member_id': payment.paid_by_member_id, + 'paid_by_user_id': payment.paid_by_user_id, + 'created_at': payment.created_at.isoformat() if payment.created_at else None + } + diff --git a/Backend/src/routes/guest_profile_routes.py b/Backend/src/routes/guest_profile_routes.py index 1e016352..56d68bee 100644 --- a/Backend/src/routes/guest_profile_routes.py +++ b/Backend/src/routes/guest_profile_routes.py @@ -10,6 +10,7 @@ from ..models.guest_tag import GuestTag from ..models.guest_communication import GuestCommunication, CommunicationType, CommunicationDirection from ..models.guest_segment import GuestSegment from ..services.guest_profile_service import GuestProfileService +from ..utils.role_helpers import is_customer import json router = APIRouter(prefix='/guest-profiles', tags=['guest-profiles']) @@ -88,8 +89,9 @@ async def get_guest_profile( if not user: raise HTTPException(status_code=404, detail=f'User with ID {user_id} not found') - # Check if user is a customer (role_id == 3) - if user.role_id != 3: + # Check if user is a customer + from ..utils.role_helpers import is_customer + if not is_customer(user, db): raise HTTPException(status_code=404, detail=f'User with ID {user_id} is not a guest (customer)') # Get analytics @@ -189,8 +191,8 @@ async def update_guest_preferences( ): """Update guest preferences""" try: - user = db.query(User).filter(User.id == user_id, User.role_id == 3).first() - if not user: + user = db.query(User).filter(User.id == user_id).first() + if not user or not is_customer(user, db): raise HTTPException(status_code=404, detail='Guest not found') preferences = db.query(GuestPreference).filter(GuestPreference.user_id == user_id).first() @@ -240,8 +242,8 @@ async def create_guest_note( ): """Create a note for a guest""" try: - user = db.query(User).filter(User.id == user_id, User.role_id == 3).first() - if not user: + user = db.query(User).filter(User.id == user_id).first() + if not user or not is_customer(user, db): raise HTTPException(status_code=404, detail='Guest not found') note = GuestNote( @@ -302,8 +304,8 @@ async def toggle_vip_status( ): """Toggle VIP status for a guest""" try: - user = db.query(User).filter(User.id == user_id, User.role_id == 3).first() - if not user: + user = db.query(User).filter(User.id == user_id).first() + if not user or not is_customer(user, db): raise HTTPException(status_code=404, detail='Guest not found') user.is_vip = vip_data.get('is_vip', False) @@ -327,8 +329,8 @@ async def add_tag_to_guest( ): """Add a tag to a guest""" try: - user = db.query(User).filter(User.id == user_id, User.role_id == 3).first() - if not user: + user = db.query(User).filter(User.id == user_id).first() + if not user or not is_customer(user, db): raise HTTPException(status_code=404, detail='Guest not found') tag_id = tag_data.get('tag_id') @@ -357,8 +359,8 @@ async def remove_tag_from_guest( ): """Remove a tag from a guest""" try: - user = db.query(User).filter(User.id == user_id, User.role_id == 3).first() - if not user: + user = db.query(User).filter(User.id == user_id).first() + if not user or not is_customer(user, db): raise HTTPException(status_code=404, detail='Guest not found') tag = db.query(GuestTag).filter(GuestTag.id == tag_id).first() @@ -386,8 +388,8 @@ async def create_communication( ): """Create a communication record""" try: - user = db.query(User).filter(User.id == user_id, User.role_id == 3).first() - if not user: + user = db.query(User).filter(User.id == user_id).first() + if not user or not is_customer(user, db): raise HTTPException(status_code=404, detail='Guest not found') comm = GuestCommunication( @@ -420,8 +422,8 @@ async def get_guest_analytics( ): """Get guest analytics""" try: - user = db.query(User).filter(User.id == user_id, User.role_id == 3).first() - if not user: + user = db.query(User).filter(User.id == user_id).first() + if not user or not is_customer(user, db): raise HTTPException(status_code=404, detail='Guest not found') analytics = GuestProfileService.get_guest_analytics(user_id, db) @@ -441,8 +443,8 @@ async def update_guest_metrics( ): """Update guest metrics (lifetime value, satisfaction score, etc.)""" try: - user = db.query(User).filter(User.id == user_id, User.role_id == 3).first() - if not user: + user = db.query(User).filter(User.id == user_id).first() + if not user or not is_customer(user, db): raise HTTPException(status_code=404, detail='Guest not found') metrics = GuestProfileService.update_guest_metrics(user_id, db) diff --git a/Backend/src/routes/invoice_routes.py b/Backend/src/routes/invoice_routes.py index baafc62a..5e108db7 100644 --- a/Backend/src/routes/invoice_routes.py +++ b/Backend/src/routes/invoice_routes.py @@ -8,14 +8,16 @@ from ..models.user import User from ..models.invoice import Invoice, InvoiceStatus from ..models.booking import Booking from ..services.invoice_service import InvoiceService +from ..utils.role_helpers import can_access_all_invoices, can_create_invoices +from ..utils.response_helpers import success_response router = APIRouter(prefix='/invoices', tags=['invoices']) @router.get('/') async def get_invoices(booking_id: Optional[int]=Query(None), status_filter: Optional[str]=Query(None, alias='status'), page: int=Query(1, ge=1), limit: int=Query(10, ge=1, le=100), current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): try: - user_id = None if current_user.role_id in [1, 4] else current_user.id # admin and accountant can see all invoices + user_id = None if can_access_all_invoices(current_user, db) else current_user.id result = InvoiceService.get_invoices(db=db, user_id=user_id, booking_id=booking_id, status=status_filter, page=page, limit=limit) - return {'status': 'success', 'data': result} + return success_response(data=result) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @@ -25,9 +27,9 @@ async def get_invoice_by_id(id: int, current_user: User=Depends(get_current_user invoice = InvoiceService.get_invoice(id, db) if not invoice: raise HTTPException(status_code=404, detail='Invoice not found') - if current_user.role_id not in [1, 4] and invoice['user_id'] != current_user.id: # admin and accountant can see all invoices + if not can_access_all_invoices(current_user, db) and invoice['user_id'] != current_user.id: raise HTTPException(status_code=403, detail='Forbidden') - return {'status': 'success', 'data': {'invoice': invoice}} + return success_response(data={'invoice': invoice}) except HTTPException: raise except Exception as e: @@ -36,7 +38,7 @@ async def get_invoice_by_id(id: int, current_user: User=Depends(get_current_user @router.post('/') async def create_invoice(invoice_data: dict, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): try: - if current_user.role_id not in [1, 2, 4]: # admin, staff, and accountant can create invoices + if not can_create_invoices(current_user, db): raise HTTPException(status_code=403, detail='Forbidden') booking_id = invoice_data.get('booking_id') if not booking_id: @@ -55,7 +57,7 @@ async def create_invoice(invoice_data: dict, current_user: User=Depends(get_curr invoice_notes = f'{promotion_note}\n{invoice_notes}'.strip() if invoice_notes else promotion_note invoice_kwargs['notes'] = invoice_notes invoice = InvoiceService.create_invoice_from_booking(booking_id=booking_id, db=db, created_by_id=current_user.id, tax_rate=invoice_data.get('tax_rate', 0.0), discount_amount=invoice_data.get('discount_amount', 0.0), due_days=invoice_data.get('due_days', 30), **invoice_kwargs) - return {'status': 'success', 'message': 'Invoice created successfully', 'data': {'invoice': invoice}} + return success_response(data={'invoice': invoice}, message='Invoice created successfully') except HTTPException: raise except ValueError as e: @@ -70,7 +72,7 @@ async def update_invoice(id: int, invoice_data: dict, current_user: User=Depends if not invoice: raise HTTPException(status_code=404, detail='Invoice not found') updated_invoice = InvoiceService.update_invoice(invoice_id=id, db=db, updated_by_id=current_user.id, **invoice_data) - return {'status': 'success', 'message': 'Invoice updated successfully', 'data': {'invoice': updated_invoice}} + return success_response(data={'invoice': updated_invoice}, message='Invoice updated successfully') except HTTPException: raise except ValueError as e: @@ -83,7 +85,7 @@ async def mark_invoice_as_paid(id: int, payment_data: dict, current_user: User=D try: amount = payment_data.get('amount') updated_invoice = InvoiceService.mark_invoice_as_paid(invoice_id=id, db=db, amount=amount, updated_by_id=current_user.id) - return {'status': 'success', 'message': 'Invoice marked as paid successfully', 'data': {'invoice': updated_invoice}} + return success_response(data={'invoice': updated_invoice}, message='Invoice marked as paid successfully') except HTTPException: raise except ValueError as e: @@ -99,7 +101,7 @@ async def delete_invoice(id: int, current_user: User=Depends(authorize_roles('ad raise HTTPException(status_code=404, detail='Invoice not found') db.delete(invoice) db.commit() - return {'status': 'success', 'message': 'Invoice deleted successfully'} + return success_response(message='Invoice deleted successfully') except HTTPException: raise except Exception as e: @@ -112,10 +114,10 @@ async def get_invoices_by_booking(booking_id: int, current_user: User=Depends(ge booking = db.query(Booking).filter(Booking.id == booking_id).first() if not booking: raise HTTPException(status_code=404, detail='Booking not found') - if current_user.role_id not in [1, 4] and booking.user_id != current_user.id: # admin and accountant can see all invoices + if not can_access_all_invoices(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail='Forbidden') result = InvoiceService.get_invoices(db=db, booking_id=booking_id) - return {'status': 'success', 'data': result} + return success_response(data=result) except HTTPException: raise except Exception as e: diff --git a/Backend/src/routes/notification_routes.py b/Backend/src/routes/notification_routes.py new file mode 100644 index 00000000..b560e5c6 --- /dev/null +++ b/Backend/src/routes/notification_routes.py @@ -0,0 +1,306 @@ +from fastapi import APIRouter, Depends, HTTPException, Query, Body +from sqlalchemy.orm import Session +from typing import Optional, List, Dict, Any +from ..config.database import get_db +from ..middleware.auth import authorize_roles, get_current_user +from ..models.user import User +from ..models.notification import NotificationChannel, NotificationStatus, NotificationType +from ..services.notification_service import NotificationService +from pydantic import BaseModel +from datetime import datetime + +router = APIRouter(prefix='/notifications', tags=['notifications']) + +# Request/Response Models +class NotificationSendRequest(BaseModel): + user_id: Optional[int] = None + notification_type: str + channel: str + content: str + subject: Optional[str] = None + template_id: Optional[int] = None + priority: Optional[str] = 'normal' + scheduled_at: Optional[str] = None + booking_id: Optional[int] = None + payment_id: Optional[int] = None + meta_data: Optional[Dict[str, Any]] = None + +class TemplateCreateRequest(BaseModel): + name: str + notification_type: str + channel: str + content: str + subject: Optional[str] = None + variables: Optional[List[str]] = None + +class PreferencesUpdateRequest(BaseModel): + email_enabled: Optional[bool] = None + sms_enabled: Optional[bool] = None + push_enabled: Optional[bool] = None + whatsapp_enabled: Optional[bool] = None + in_app_enabled: Optional[bool] = None + booking_confirmation_email: Optional[bool] = None + booking_confirmation_sms: Optional[bool] = None + payment_receipt_email: Optional[bool] = None + payment_receipt_sms: Optional[bool] = None + pre_arrival_reminder_email: Optional[bool] = None + pre_arrival_reminder_sms: Optional[bool] = None + check_in_reminder_email: Optional[bool] = None + check_in_reminder_sms: Optional[bool] = None + check_out_reminder_email: Optional[bool] = None + check_out_reminder_sms: Optional[bool] = None + marketing_campaign_email: Optional[bool] = None + marketing_campaign_sms: Optional[bool] = None + loyalty_update_email: Optional[bool] = None + loyalty_update_sms: Optional[bool] = None + system_alert_email: Optional[bool] = None + system_alert_push: Optional[bool] = None + +# Notifications +@router.get('/') +async def get_notifications( + user_id: Optional[int] = Query(None), + notification_type: Optional[str] = Query(None), + channel: Optional[str] = Query(None), + status: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=1000), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get notifications""" + try: + notifications = NotificationService.get_notifications( + db=db, + user_id=user_id, + notification_type=NotificationType(notification_type) if notification_type else None, + channel=NotificationChannel(channel) if channel else None, + status=NotificationStatus(status) if status else None, + skip=skip, + limit=limit + ) + return {'status': 'success', 'data': [{ + 'id': n.id, + 'user_id': n.user_id, + 'notification_type': n.notification_type.value, + 'channel': n.channel.value, + 'subject': n.subject, + 'content': n.content, + 'status': n.status.value, + 'priority': n.priority, + 'sent_at': n.sent_at.isoformat() if n.sent_at else None, + 'delivered_at': n.delivered_at.isoformat() if n.delivered_at else None, + 'read_at': n.read_at.isoformat() if n.read_at else None, + 'created_at': n.created_at.isoformat(), + } for n in notifications]} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/my-notifications') +async def get_my_notifications( + status: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=100), + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get current user's notifications""" + try: + notifications = NotificationService.get_notifications( + db=db, + user_id=current_user.id, + status=NotificationStatus(status) if status else None, + skip=skip, + limit=limit + ) + return {'status': 'success', 'data': [{ + 'id': n.id, + 'notification_type': n.notification_type.value, + 'channel': n.channel.value, + 'subject': n.subject, + 'content': n.content, + 'status': n.status.value, + 'read_at': n.read_at.isoformat() if n.read_at else None, + 'created_at': n.created_at.isoformat(), + } for n in notifications]} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/send') +async def send_notification( + notification_data: NotificationSendRequest, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Send notification""" + try: + scheduled_at = None + if notification_data.scheduled_at: + scheduled_at = datetime.fromisoformat(notification_data.scheduled_at.replace('Z', '+00:00')) + + notification = NotificationService.send_notification( + db=db, + user_id=notification_data.user_id, + notification_type=NotificationType(notification_data.notification_type), + channel=NotificationChannel(notification_data.channel), + content=notification_data.content, + subject=notification_data.subject, + template_id=notification_data.template_id, + priority=notification_data.priority or 'normal', + scheduled_at=scheduled_at, + booking_id=notification_data.booking_id, + payment_id=notification_data.payment_id, + meta_data=notification_data.meta_data + ) + return {'status': 'success', 'data': { + 'id': notification.id, + 'status': notification.status.value, + 'created_at': notification.created_at.isoformat() + }} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/{notification_id}/read') +async def mark_notification_read( + notification_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Mark notification as read""" + try: + notification = NotificationService.mark_as_read(db, notification_id, current_user.id) + if not notification: + raise HTTPException(status_code=404, detail='Notification not found') + + return {'status': 'success', 'data': { + 'id': notification.id, + 'status': notification.status.value, + 'read_at': notification.read_at.isoformat() if notification.read_at else None + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +# Templates +@router.post('/templates') +async def create_template( + template_data: TemplateCreateRequest, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Create notification template""" + try: + template = NotificationService.create_template( + db=db, + name=template_data.name, + notification_type=NotificationType(template_data.notification_type), + channel=NotificationChannel(template_data.channel), + content=template_data.content, + created_by=current_user.id, + subject=template_data.subject, + variables=template_data.variables + ) + return {'status': 'success', 'data': { + 'id': template.id, + 'name': template.name, + 'created_at': template.created_at.isoformat() + }} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/templates') +async def get_templates( + notification_type: Optional[str] = Query(None), + channel: Optional[str] = Query(None), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get notification templates""" + try: + from ..models.notification import NotificationTemplate + query = db.query(NotificationTemplate) + + if notification_type: + query = query.filter(NotificationTemplate.notification_type == NotificationType(notification_type)) + if channel: + query = query.filter(NotificationTemplate.channel == NotificationChannel(channel)) + + templates = query.filter(NotificationTemplate.is_active == True).all() + return {'status': 'success', 'data': [{ + 'id': t.id, + 'name': t.name, + 'notification_type': t.notification_type.value, + 'channel': t.channel.value, + 'subject': t.subject, + 'content': t.content, + 'variables': t.variables, + } for t in templates]} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +# Preferences +@router.get('/preferences') +async def get_preferences( + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get user notification preferences""" + try: + preferences = NotificationService.get_user_preferences(db, current_user.id) + return {'status': 'success', 'data': { + 'email_enabled': preferences.email_enabled, + 'sms_enabled': preferences.sms_enabled, + 'push_enabled': preferences.push_enabled, + 'whatsapp_enabled': preferences.whatsapp_enabled, + 'in_app_enabled': preferences.in_app_enabled, + 'booking_confirmation_email': preferences.booking_confirmation_email, + 'booking_confirmation_sms': preferences.booking_confirmation_sms, + 'payment_receipt_email': preferences.payment_receipt_email, + 'payment_receipt_sms': preferences.payment_receipt_sms, + 'pre_arrival_reminder_email': preferences.pre_arrival_reminder_email, + 'pre_arrival_reminder_sms': preferences.pre_arrival_reminder_sms, + 'check_in_reminder_email': preferences.check_in_reminder_email, + 'check_in_reminder_sms': preferences.check_in_reminder_sms, + 'check_out_reminder_email': preferences.check_out_reminder_email, + 'check_out_reminder_sms': preferences.check_out_reminder_sms, + 'marketing_campaign_email': preferences.marketing_campaign_email, + 'marketing_campaign_sms': preferences.marketing_campaign_sms, + 'loyalty_update_email': preferences.loyalty_update_email, + 'loyalty_update_sms': preferences.loyalty_update_sms, + 'system_alert_email': preferences.system_alert_email, + 'system_alert_push': preferences.system_alert_push, + }} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.put('/preferences') +async def update_preferences( + preferences_data: PreferencesUpdateRequest, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Update user notification preferences""" + try: + prefs_dict = preferences_data.dict(exclude_unset=True) + preferences = NotificationService.update_user_preferences(db, current_user.id, prefs_dict) + return {'status': 'success', 'data': { + 'email_enabled': preferences.email_enabled, + 'sms_enabled': preferences.sms_enabled, + 'push_enabled': preferences.push_enabled, + 'whatsapp_enabled': preferences.whatsapp_enabled, + 'in_app_enabled': preferences.in_app_enabled, + }} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/routes/package_routes.py b/Backend/src/routes/package_routes.py new file mode 100644 index 00000000..4b8fcb48 --- /dev/null +++ b/Backend/src/routes/package_routes.py @@ -0,0 +1,435 @@ +from fastapi import APIRouter, Depends, HTTPException, status, Query +from sqlalchemy.orm import Session +from sqlalchemy import or_, and_ +from typing import Optional, List +from datetime import datetime, date +from decimal import Decimal +from ..config.database import get_db +from ..middleware.auth import get_current_user, authorize_roles +from ..models.user import User +from ..models.package import Package, PackageItem, PackageStatus, PackageItemType +from ..models.rate_plan import RatePlan +from ..models.room_type import RoomType +from pydantic import BaseModel + +router = APIRouter(prefix='/packages', tags=['packages']) + +# Pydantic models +class PackageItemCreate(BaseModel): + item_type: str + item_id: Optional[int] = None + item_name: str + item_description: Optional[str] = None + quantity: int = 1 + unit: Optional[str] = None + price: Optional[float] = None + included: bool = True + price_modifier: Optional[float] = None + display_order: int = 0 + extra_data: Optional[dict] = None + +class PackageCreate(BaseModel): + name: str + code: str + description: Optional[str] = None + status: str = 'active' + base_price: Optional[float] = None + price_modifier: float = 1.0 + discount_percentage: Optional[float] = None + room_type_id: Optional[int] = None + min_nights: Optional[int] = None + max_nights: Optional[int] = None + valid_from: Optional[str] = None + valid_to: Optional[str] = None + image_url: Optional[str] = None + highlights: Optional[List[str]] = None + terms_conditions: Optional[str] = None + extra_data: Optional[dict] = None + items: Optional[List[PackageItemCreate]] = [] + +class PackageUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None + base_price: Optional[float] = None + price_modifier: Optional[float] = None + discount_percentage: Optional[float] = None + room_type_id: Optional[int] = None + min_nights: Optional[int] = None + max_nights: Optional[int] = None + valid_from: Optional[str] = None + valid_to: Optional[str] = None + image_url: Optional[str] = None + highlights: Optional[List[str]] = None + terms_conditions: Optional[str] = None + extra_data: Optional[dict] = None + +@router.get('/') +async def get_packages( + search: Optional[str] = Query(None), + status_filter: Optional[str] = Query(None, alias='status'), + room_type_id: Optional[int] = Query(None), + page: int = Query(1, ge=1), + limit: int = Query(10, ge=1, le=100), + db: Session = Depends(get_db) +): + try: + query = db.query(Package) + + if search: + query = query.filter( + or_( + Package.name.like(f'%{search}%'), + Package.code.like(f'%{search}%'), + Package.description.like(f'%{search}%') + ) + ) + + if status_filter: + try: + query = query.filter(Package.status == PackageStatus(status_filter)) + except ValueError: + pass + + if room_type_id: + query = query.filter( + or_( + Package.room_type_id == room_type_id, + Package.room_type_id.is_(None) + ) + ) + + total = query.count() + offset = (page - 1) * limit + packages = query.order_by(Package.created_at.desc()).offset(offset).limit(limit).all() + + result = [] + for pkg in packages: + pkg_dict = { + 'id': pkg.id, + 'name': pkg.name, + 'code': pkg.code, + 'description': pkg.description, + 'status': pkg.status.value if isinstance(pkg.status, PackageStatus) else pkg.status, + 'base_price': float(pkg.base_price) if pkg.base_price else None, + 'price_modifier': float(pkg.price_modifier) if pkg.price_modifier else 1.0, + 'discount_percentage': float(pkg.discount_percentage) if pkg.discount_percentage else None, + 'room_type_id': pkg.room_type_id, + 'room_type_name': pkg.room_type.name if pkg.room_type else None, + 'min_nights': pkg.min_nights, + 'max_nights': pkg.max_nights, + 'valid_from': pkg.valid_from.isoformat() if pkg.valid_from else None, + 'valid_to': pkg.valid_to.isoformat() if pkg.valid_to else None, + 'image_url': pkg.image_url, + 'highlights': pkg.highlights, + 'terms_conditions': pkg.terms_conditions, + 'extra_data': pkg.extra_data, + 'created_at': pkg.created_at.isoformat() if pkg.created_at else None, + 'updated_at': pkg.updated_at.isoformat() if pkg.updated_at else None, + } + result.append(pkg_dict) + + return { + 'status': 'success', + 'data': { + 'packages': result, + 'pagination': { + 'total': total, + 'page': page, + 'limit': limit, + 'totalPages': (total + limit - 1) // limit + } + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/{id}') +async def get_package(id: int, db: Session = Depends(get_db)): + try: + pkg = db.query(Package).filter(Package.id == id).first() + if not pkg: + raise HTTPException(status_code=404, detail='Package not found') + + items = db.query(PackageItem).filter(PackageItem.package_id == id).order_by(PackageItem.display_order.asc()).all() + + pkg_dict = { + 'id': pkg.id, + 'name': pkg.name, + 'code': pkg.code, + 'description': pkg.description, + 'status': pkg.status.value if isinstance(pkg.status, PackageStatus) else pkg.status, + 'base_price': float(pkg.base_price) if pkg.base_price else None, + 'price_modifier': float(pkg.price_modifier) if pkg.price_modifier else 1.0, + 'discount_percentage': float(pkg.discount_percentage) if pkg.discount_percentage else None, + 'room_type_id': pkg.room_type_id, + 'room_type_name': pkg.room_type.name if pkg.room_type else None, + 'min_nights': pkg.min_nights, + 'max_nights': pkg.max_nights, + 'valid_from': pkg.valid_from.isoformat() if pkg.valid_from else None, + 'valid_to': pkg.valid_to.isoformat() if pkg.valid_to else None, + 'image_url': pkg.image_url, + 'highlights': pkg.highlights, + 'terms_conditions': pkg.terms_conditions, + 'extra_data': pkg.extra_data, + 'items': [ + { + 'id': item.id, + 'item_type': item.item_type.value if isinstance(item.item_type, PackageItemType) else item.item_type, + 'item_id': item.item_id, + 'item_name': item.item_name, + 'item_description': item.item_description, + 'quantity': item.quantity, + 'unit': item.unit, + 'price': float(item.price) if item.price else None, + 'included': item.included, + 'price_modifier': float(item.price_modifier) if item.price_modifier else None, + 'display_order': item.display_order, + 'extra_data': item.extra_data, + } + for item in items + ], + 'created_at': pkg.created_at.isoformat() if pkg.created_at else None, + 'updated_at': pkg.updated_at.isoformat() if pkg.updated_at else None, + } + + return {'status': 'success', 'data': {'package': pkg_dict}} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/', dependencies=[Depends(authorize_roles('admin'))]) +async def create_package(package_data: PackageCreate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)): + try: + # Check if code already exists + existing = db.query(Package).filter(Package.code == package_data.code).first() + if existing: + raise HTTPException(status_code=400, detail='Package code already exists') + + # Validate room_type_id if provided + if package_data.room_type_id: + room_type = db.query(RoomType).filter(RoomType.id == package_data.room_type_id).first() + if not room_type: + raise HTTPException(status_code=404, detail='Room type not found') + + # Create package + pkg = Package( + name=package_data.name, + code=package_data.code, + description=package_data.description, + status=PackageStatus(package_data.status), + base_price=Decimal(str(package_data.base_price)) if package_data.base_price else None, + price_modifier=Decimal(str(package_data.price_modifier)), + discount_percentage=Decimal(str(package_data.discount_percentage)) if package_data.discount_percentage else None, + room_type_id=package_data.room_type_id, + min_nights=package_data.min_nights, + max_nights=package_data.max_nights, + valid_from=datetime.strptime(package_data.valid_from, '%Y-%m-%d').date() if package_data.valid_from else None, + valid_to=datetime.strptime(package_data.valid_to, '%Y-%m-%d').date() if package_data.valid_to else None, + image_url=package_data.image_url, + highlights=package_data.highlights, + terms_conditions=package_data.terms_conditions, + extra_data=package_data.extra_data, + ) + + db.add(pkg) + db.flush() + + # Create items + if package_data.items: + for item_data in package_data.items: + item = PackageItem( + package_id=pkg.id, + item_type=PackageItemType(item_data.item_type), + item_id=item_data.item_id, + item_name=item_data.item_name, + item_description=item_data.item_description, + quantity=item_data.quantity, + unit=item_data.unit, + price=Decimal(str(item_data.price)) if item_data.price else None, + included=item_data.included, + price_modifier=Decimal(str(item_data.price_modifier)) if item_data.price_modifier else None, + display_order=item_data.display_order, + extra_data=item_data.extra_data, + ) + db.add(item) + + db.commit() + db.refresh(pkg) + + return {'status': 'success', 'message': 'Package created successfully', 'data': {'package_id': pkg.id}} + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=f'Invalid enum value: {str(e)}') + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.put('/{id}', dependencies=[Depends(authorize_roles('admin'))]) +async def update_package(id: int, package_data: PackageUpdate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)): + try: + pkg = db.query(Package).filter(Package.id == id).first() + if not pkg: + raise HTTPException(status_code=404, detail='Package not found') + + # Update fields + if package_data.name is not None: + pkg.name = package_data.name + if package_data.description is not None: + pkg.description = package_data.description + if package_data.status is not None: + pkg.status = PackageStatus(package_data.status) + if package_data.base_price is not None: + pkg.base_price = Decimal(str(package_data.base_price)) if package_data.base_price else None + if package_data.price_modifier is not None: + pkg.price_modifier = Decimal(str(package_data.price_modifier)) + if package_data.discount_percentage is not None: + pkg.discount_percentage = Decimal(str(package_data.discount_percentage)) if package_data.discount_percentage else None + if package_data.room_type_id is not None: + if package_data.room_type_id: + room_type = db.query(RoomType).filter(RoomType.id == package_data.room_type_id).first() + if not room_type: + raise HTTPException(status_code=404, detail='Room type not found') + pkg.room_type_id = package_data.room_type_id + if package_data.min_nights is not None: + pkg.min_nights = package_data.min_nights + if package_data.max_nights is not None: + pkg.max_nights = package_data.max_nights + if package_data.valid_from is not None: + pkg.valid_from = datetime.strptime(package_data.valid_from, '%Y-%m-%d').date() if package_data.valid_from else None + if package_data.valid_to is not None: + pkg.valid_to = datetime.strptime(package_data.valid_to, '%Y-%m-%d').date() if package_data.valid_to else None + if package_data.image_url is not None: + pkg.image_url = package_data.image_url + if package_data.highlights is not None: + pkg.highlights = package_data.highlights + if package_data.terms_conditions is not None: + pkg.terms_conditions = package_data.terms_conditions + if package_data.extra_data is not None: + pkg.extra_data = package_data.extra_data + + db.commit() + + return {'status': 'success', 'message': 'Package updated successfully'} + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=f'Invalid enum value: {str(e)}') + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.delete('/{id}', dependencies=[Depends(authorize_roles('admin'))]) +async def delete_package(id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)): + try: + pkg = db.query(Package).filter(Package.id == id).first() + if not pkg: + raise HTTPException(status_code=404, detail='Package not found') + + # Check if package is used in rate plans + rate_plan_count = db.query(RatePlan).filter(RatePlan.package_id == id).count() + if rate_plan_count > 0: + raise HTTPException(status_code=400, detail=f'Cannot delete package. It is used in {rate_plan_count} rate plan(s)') + + # Delete items first + db.query(PackageItem).filter(PackageItem.package_id == id).delete() + + db.delete(pkg) + db.commit() + + return {'status': 'success', 'message': 'Package deleted successfully'} + except HTTPException: + raise + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/available/{room_type_id}') +async def get_available_packages( + room_type_id: int, + check_in: str = Query(...), + check_out: str = Query(...), + num_nights: Optional[int] = Query(None), + db: Session = Depends(get_db) +): + """Get available packages for a room type and date range""" + try: + check_in_date = datetime.strptime(check_in, '%Y-%m-%d').date() + check_out_date = datetime.strptime(check_out, '%Y-%m-%d').date() + + if num_nights is None: + num_nights = (check_out_date - check_in_date).days + + # Query packages + query = db.query(Package).filter( + Package.status == PackageStatus.active, + or_( + Package.room_type_id == room_type_id, + Package.room_type_id.is_(None) + ) + ) + + # Filter by date range + query = query.filter( + or_( + Package.valid_from.is_(None), + Package.valid_from <= check_in_date + ), + or_( + Package.valid_to.is_(None), + Package.valid_to >= check_out_date + ) + ) + + # Filter by nights + query = query.filter( + or_( + Package.min_nights.is_(None), + Package.min_nights <= num_nights + ), + or_( + Package.max_nights.is_(None), + Package.max_nights >= num_nights + ) + ) + + packages = query.order_by(Package.created_at.desc()).all() + + result = [] + for pkg in packages: + items = db.query(PackageItem).filter(PackageItem.package_id == pkg.id).order_by(PackageItem.display_order.asc()).all() + + pkg_dict = { + 'id': pkg.id, + 'name': pkg.name, + 'code': pkg.code, + 'description': pkg.description, + 'base_price': float(pkg.base_price) if pkg.base_price else None, + 'price_modifier': float(pkg.price_modifier) if pkg.price_modifier else 1.0, + 'discount_percentage': float(pkg.discount_percentage) if pkg.discount_percentage else None, + 'image_url': pkg.image_url, + 'highlights': pkg.highlights, + 'items': [ + { + 'id': item.id, + 'item_type': item.item_type.value if isinstance(item.item_type, PackageItemType) else item.item_type, + 'item_name': item.item_name, + 'item_description': item.item_description, + 'quantity': item.quantity, + 'unit': item.unit, + 'price': float(item.price) if item.price else None, + 'included': item.included, + } + for item in items + ], + } + result.append(pkg_dict) + + return {'status': 'success', 'data': {'packages': result}} + except ValueError as e: + raise HTTPException(status_code=400, detail=f'Invalid date format: {str(e)}') + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/routes/payment_routes.py b/Backend/src/routes/payment_routes.py index 16334154..daa28189 100644 --- a/Backend/src/routes/payment_routes.py +++ b/Backend/src/routes/payment_routes.py @@ -1,5 +1,5 @@ from fastapi import APIRouter, Depends, HTTPException, status, Query, Request, Header -from sqlalchemy.orm import Session, joinedload, selectinload +from sqlalchemy.orm import Session, joinedload, selectinload, load_only from typing import Optional from datetime import datetime import os @@ -9,10 +9,14 @@ from ..middleware.auth import get_current_user, authorize_roles from ..models.user import User from ..models.payment import Payment, PaymentMethod, PaymentType, PaymentStatus from ..models.booking import Booking, BookingStatus +from ..utils.role_helpers import can_access_all_payments +from ..utils.currency_helpers import get_currency_symbol +from ..utils.response_helpers import success_response from ..utils.mailer import send_email from ..utils.email_templates import payment_confirmation_email_template, booking_status_changed_email_template from ..services.stripe_service import StripeService from ..services.paypal_service import PayPalService +from ..services.borica_service import BoricaService from ..services.loyalty_service import LoyaltyService router = APIRouter(prefix='/payments', tags=['payments']) @@ -20,7 +24,18 @@ async def cancel_booking_on_payment_failure(booking: Booking, db: Session, reaso if booking.status == BookingStatus.cancelled: return from sqlalchemy.orm import selectinload - booking = db.query(Booking).options(selectinload(Booking.payments)).filter(Booking.id == booking.id).first() + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + booking = db.query(Booking).options( + load_only( + Booking.id, Booking.booking_number, Booking.user_id, Booking.room_id, + Booking.check_in_date, Booking.check_out_date, Booking.num_guests, + Booking.total_price, Booking.original_price, Booking.discount_amount, + Booking.promotion_code, Booking.status, Booking.deposit_paid, + Booking.requires_deposit, Booking.special_requests, + Booking.created_at, Booking.updated_at + ), + selectinload(Booking.payments) + ).filter(Booking.id == booking.id).first() if booking.payments: for payment in booking.payments: if payment.payment_status == PaymentStatus.pending: @@ -55,10 +70,23 @@ async def get_payments(booking_id: Optional[int]=Query(None), status_filter: Opt query = query.filter(Payment.payment_status == PaymentStatus(status_filter)) except ValueError: pass - if current_user.role_id not in [1, 4]: # admin and accountant can see all payments + if not can_access_all_payments(current_user, db): query = query.join(Booking).filter(Booking.user_id == current_user.id) total = query.count() - query = query.options(selectinload(Payment.booking).selectinload(Booking.user)) + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + query = query.options( + selectinload(Payment.booking).options( + load_only( + Booking.id, Booking.booking_number, Booking.user_id, Booking.room_id, + Booking.check_in_date, Booking.check_out_date, Booking.num_guests, + Booking.total_price, Booking.original_price, Booking.discount_amount, + Booking.promotion_code, Booking.status, Booking.deposit_paid, + Booking.requires_deposit, Booking.special_requests, + Booking.created_at, Booking.updated_at + ), + joinedload(Booking.user) + ) + ) offset = (page - 1) * limit payments = query.order_by(Payment.created_at.desc()).offset(offset).limit(limit).all() result = [] @@ -69,7 +97,7 @@ async def get_payments(booking_id: Optional[int]=Query(None), status_filter: Opt if payment.booking.user: payment_dict['booking']['user'] = {'id': payment.booking.user.id, 'name': payment.booking.user.full_name, 'full_name': payment.booking.user.full_name, 'email': payment.booking.user.email} result.append(payment_dict) - return {'status': 'success', 'data': {'payments': result, 'pagination': {'total': total, 'page': page, 'limit': limit, 'totalPages': (total + limit - 1) // limit}}} + return success_response(data={'payments': result, 'pagination': {'total': total, 'page': page, 'limit': limit, 'totalPages': (total + limit - 1) // limit}}) except HTTPException: raise except Exception as e: @@ -81,12 +109,26 @@ async def get_payments(booking_id: Optional[int]=Query(None), status_filter: Opt @router.get('/booking/{booking_id}') async def get_payments_by_booking_id(booking_id: int, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): try: + from ..utils.role_helpers import is_admin booking = db.query(Booking).filter(Booking.id == booking_id).first() if not booking: raise HTTPException(status_code=404, detail='Booking not found') - if current_user.role_id != 1 and booking.user_id != current_user.id: + if not is_admin(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail='Forbidden') - payments = db.query(Payment).options(joinedload(Payment.booking).joinedload(Booking.user)).filter(Payment.booking_id == booking_id).order_by(Payment.created_at.desc()).all() + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + payments = db.query(Payment).options( + joinedload(Payment.booking).options( + load_only( + Booking.id, Booking.booking_number, Booking.user_id, Booking.room_id, + Booking.check_in_date, Booking.check_out_date, Booking.num_guests, + Booking.total_price, Booking.original_price, Booking.discount_amount, + Booking.promotion_code, Booking.status, Booking.deposit_paid, + Booking.requires_deposit, Booking.special_requests, + Booking.created_at, Booking.updated_at + ), + joinedload(Booking.user) + ) + ).filter(Payment.booking_id == booking_id).order_by(Payment.created_at.desc()).all() result = [] for payment in payments: payment_dict = {'id': payment.id, 'booking_id': payment.booking_id, 'amount': float(payment.amount) if payment.amount else 0.0, 'payment_method': payment.payment_method.value if isinstance(payment.payment_method, PaymentMethod) else payment.payment_method, 'payment_type': payment.payment_type.value if isinstance(payment.payment_type, PaymentType) else payment.payment_type, 'deposit_percentage': payment.deposit_percentage, 'related_payment_id': payment.related_payment_id, 'payment_status': payment.payment_status.value if isinstance(payment.payment_status, PaymentStatus) else payment.payment_status, 'transaction_id': payment.transaction_id, 'payment_date': payment.payment_date.isoformat() if payment.payment_date else None, 'notes': payment.notes, 'created_at': payment.created_at.isoformat() if payment.created_at else None} @@ -95,7 +137,7 @@ async def get_payments_by_booking_id(booking_id: int, current_user: User=Depends if payment.booking.user: payment_dict['booking']['user'] = {'id': payment.booking.user.id, 'name': payment.booking.user.full_name, 'full_name': payment.booking.user.full_name, 'email': payment.booking.user.email} result.append(payment_dict) - return {'status': 'success', 'data': {'payments': result}} + return success_response(data={'payments': result}) except HTTPException: raise except Exception as e: @@ -107,13 +149,13 @@ async def get_payment_by_id(id: int, current_user: User=Depends(get_current_user payment = db.query(Payment).filter(Payment.id == id).first() if not payment: raise HTTPException(status_code=404, detail='Payment not found') - if current_user.role_id not in [1, 4]: # admin and accountant can see all payments + if not can_access_all_payments(current_user, db): if payment.booking and payment.booking.user_id != current_user.id: raise HTTPException(status_code=403, detail='Forbidden') payment_dict = {'id': payment.id, 'booking_id': payment.booking_id, 'amount': float(payment.amount) if payment.amount else 0.0, 'payment_method': payment.payment_method.value if isinstance(payment.payment_method, PaymentMethod) else payment.payment_method, 'payment_type': payment.payment_type.value if isinstance(payment.payment_type, PaymentType) else payment.payment_type, 'deposit_percentage': payment.deposit_percentage, 'related_payment_id': payment.related_payment_id, 'payment_status': payment.payment_status.value if isinstance(payment.payment_status, PaymentStatus) else payment.payment_status, 'transaction_id': payment.transaction_id, 'payment_date': payment.payment_date.isoformat() if payment.payment_date else None, 'notes': payment.notes, 'created_at': payment.created_at.isoformat() if payment.created_at else None} if payment.booking: payment_dict['booking'] = {'id': payment.booking.id, 'booking_number': payment.booking.booking_number} - return {'status': 'success', 'data': {'payment': payment_dict}} + return success_response(data={'payment': payment_dict}) except HTTPException: raise except Exception as e: @@ -129,7 +171,8 @@ async def create_payment(payment_data: dict, current_user: User=Depends(get_curr booking = db.query(Booking).filter(Booking.id == booking_id).first() if not booking: raise HTTPException(status_code=404, detail='Booking not found') - if current_user.role_id != 1 and booking.user_id != current_user.id: + from ..utils.role_helpers import is_admin + if not is_admin(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail='Forbidden') payment = Payment(booking_id=booking_id, amount=amount, payment_method=PaymentMethod(payment_method), payment_type=PaymentType(payment_type), payment_status=PaymentStatus.pending, payment_date=datetime.utcnow() if payment_data.get('mark_as_paid') else None, notes=payment_data.get('notes')) if payment_data.get('mark_as_paid'): @@ -139,6 +182,16 @@ async def create_payment(payment_data: dict, current_user: User=Depends(get_curr db.commit() db.refresh(payment) + # Send payment receipt notification + if payment.payment_status == PaymentStatus.completed: + try: + from ..services.notification_service import NotificationService + NotificationService.send_payment_receipt(db, payment) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.warning(f'Failed to send payment receipt notification: {e}') + # Award loyalty points if payment completed and booking is confirmed if payment.payment_status == PaymentStatus.completed and booking: try: @@ -168,15 +221,14 @@ async def create_payment(payment_data: dict, current_user: User=Depends(get_curr client_url = client_url_setting.value if client_url_setting and client_url_setting.value else settings.CLIENT_URL or os.getenv('CLIENT_URL', 'http://localhost:5173') currency_setting = db.query(SystemSettings).filter(SystemSettings.key == 'platform_currency').first() currency = currency_setting.value if currency_setting and currency_setting.value else 'USD' - currency_symbols = {'USD': '$', 'EUR': '€', 'GBP': '£', 'JPY': '¥', 'CNY': '¥', 'KRW': '₩', 'SGD': 'S$', 'THB': '฿', 'AUD': 'A$', 'CAD': 'C$', 'VND': '₫', 'INR': '₹', 'CHF': 'CHF', 'NZD': 'NZ$'} - currency_symbol = currency_symbols.get(currency, currency) + currency_symbol = get_currency_symbol(currency) email_html = payment_confirmation_email_template(booking_number=booking.booking_number, guest_name=booking.user.full_name, amount=float(payment.amount), payment_method=payment.payment_method.value if isinstance(payment.payment_method, PaymentMethod) else str(payment.payment_method), transaction_id=payment.transaction_id, payment_type=payment.payment_type.value if payment.payment_type else None, total_price=float(booking.total_price), client_url=client_url, currency_symbol=currency_symbol) await send_email(to=booking.user.email, subject=f'Payment Confirmed - {booking.booking_number}', html=email_html) except Exception as e: import logging logger = logging.getLogger(__name__) logger.error(f'Failed to send payment confirmation email: {e}') - return {'status': 'success', 'message': 'Payment created successfully', 'data': {'payment': payment}} + return success_response(data={'payment': payment}, message='Payment created successfully') except HTTPException: raise except Exception as e: @@ -195,10 +247,30 @@ async def update_payment_status(id: int, status_data: dict, current_user: User=D try: new_status = PaymentStatus(status_value) payment.payment_status = new_status + # Only cancel booking if it's a full refund or all payments are failed if new_status in [PaymentStatus.failed, PaymentStatus.refunded]: - booking = db.query(Booking).filter(Booking.id == payment.booking_id).first() + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + booking = db.query(Booking).options( + load_only( + Booking.id, Booking.booking_number, Booking.user_id, Booking.room_id, + Booking.check_in_date, Booking.check_out_date, Booking.num_guests, + Booking.total_price, Booking.original_price, Booking.discount_amount, + Booking.promotion_code, Booking.status, Booking.deposit_paid, + Booking.requires_deposit, Booking.special_requests, + Booking.created_at, Booking.updated_at + ), + selectinload(Booking.payments) + ).filter(Booking.id == payment.booking_id).first() if booking and booking.status != BookingStatus.cancelled: - await cancel_booking_on_payment_failure(booking, db, reason=f'Payment {new_status.value}') + # Check if this is a full refund or if all payments are failed + total_paid = sum(float(p.amount) for p in booking.payments if p.payment_status == PaymentStatus.completed) + total_price = float(booking.total_price) if booking.total_price else 0.0 + all_payments_failed = all(p.payment_status in [PaymentStatus.failed, PaymentStatus.refunded] for p in booking.payments) + is_full_refund = new_status == PaymentStatus.refunded and float(payment.amount) >= total_price + + # Only cancel if it's a full refund or all payments failed + if is_full_refund or (new_status == PaymentStatus.failed and all_payments_failed): + await cancel_booking_on_payment_failure(booking, db, reason=f'Payment {new_status.value}') except ValueError: raise HTTPException(status_code=400, detail='Invalid payment status') if status_data.get('transaction_id'): @@ -209,22 +281,29 @@ async def update_payment_status(id: int, status_data: dict, current_user: User=D db.commit() db.refresh(payment) if payment.payment_status == PaymentStatus.completed and old_status != PaymentStatus.completed: + # Send payment receipt notification + try: + from ..services.notification_service import NotificationService + NotificationService.send_payment_receipt(db, payment) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.warning(f'Failed to send payment receipt notification: {e}') + try: from ..models.system_settings import SystemSettings client_url_setting = db.query(SystemSettings).filter(SystemSettings.key == 'client_url').first() client_url = client_url_setting.value if client_url_setting and client_url_setting.value else settings.CLIENT_URL or os.getenv('CLIENT_URL', 'http://localhost:5173') currency_setting = db.query(SystemSettings).filter(SystemSettings.key == 'platform_currency').first() currency = currency_setting.value if currency_setting and currency_setting.value else 'USD' - currency_symbols = {'USD': '$', 'EUR': '€', 'GBP': '£', 'JPY': '¥', 'CNY': '¥', 'KRW': '₩', 'SGD': 'S$', 'THB': '฿', 'AUD': 'A$', 'CAD': 'C$', 'VND': '₫', 'INR': '₹', 'CHF': 'CHF', 'NZD': 'NZ$'} - currency_symbol = currency_symbols.get(currency, currency) + currency_symbol = get_currency_symbol(currency) payment = db.query(Payment).filter(Payment.id == id).first() if payment.booking and payment.booking.user: client_url_setting = db.query(SystemSettings).filter(SystemSettings.key == 'client_url').first() client_url = client_url_setting.value if client_url_setting and client_url_setting.value else settings.CLIENT_URL or os.getenv('CLIENT_URL', 'http://localhost:5173') currency_setting = db.query(SystemSettings).filter(SystemSettings.key == 'platform_currency').first() currency = currency_setting.value if currency_setting and currency_setting.value else 'USD' - currency_symbols = {'USD': '$', 'EUR': '€', 'GBP': '£', 'JPY': '¥', 'CNY': '¥', 'KRW': '₩', 'SGD': 'S$', 'THB': '฿', 'AUD': 'A$', 'CAD': 'C$', 'VND': '₫', 'INR': '₹', 'CHF': 'CHF', 'NZD': 'NZ$'} - currency_symbol = currency_symbols.get(currency, currency) + currency_symbol = get_currency_symbol(currency) email_html = payment_confirmation_email_template(booking_number=payment.booking.booking_number, guest_name=payment.booking.user.full_name, amount=float(payment.amount), payment_method=payment.payment_method.value if isinstance(payment.payment_method, PaymentMethod) else str(payment.payment_method), transaction_id=payment.transaction_id, client_url=client_url, currency_symbol=currency_symbol) await send_email(to=payment.booking.user.email, subject=f'Payment Confirmed - {payment.booking.booking_number}', html=email_html) if payment.payment_type == PaymentType.deposit and payment.booking: @@ -240,7 +319,7 @@ async def update_payment_status(id: int, status_data: dict, current_user: User=D db.commit() except Exception as e: print(f'Failed to send payment confirmation email: {e}') - return {'status': 'success', 'message': 'Payment status updated successfully', 'data': {'payment': payment}} + return success_response(data={'payment': payment}, message='Payment status updated successfully') except HTTPException: raise except Exception as e: @@ -270,7 +349,8 @@ async def create_stripe_payment_intent(intent_data: dict, current_user: User=Dep booking = db.query(Booking).filter(Booking.id == booking_id).first() if not booking: raise HTTPException(status_code=404, detail='Booking not found') - if current_user.role_id != 1 and booking.user_id != current_user.id: + from ..utils.role_helpers import is_admin + if not is_admin(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail='Forbidden') if booking.requires_deposit and (not booking.deposit_paid): deposit_payment = db.query(Payment).filter(Payment.booking_id == booking_id, Payment.payment_type == PaymentType.deposit, Payment.payment_status == PaymentStatus.pending).order_by(Payment.created_at.desc()).first() @@ -294,7 +374,7 @@ async def create_stripe_payment_intent(intent_data: dict, current_user: User=Dep logger = logging.getLogger(__name__) logger.error('Payment intent created but client_secret is missing') raise HTTPException(status_code=500, detail='Failed to create payment intent. Client secret is missing.') - return {'status': 'success', 'message': 'Payment intent created successfully', 'data': {'client_secret': intent['client_secret'], 'payment_intent_id': intent['id'], 'publishable_key': publishable_key}} + return success_response(data={'client_secret': intent['client_secret'], 'payment_intent_id': intent['id'], 'publishable_key': publishable_key}, message='Payment intent created successfully') except HTTPException: raise except ValueError as e: @@ -330,15 +410,14 @@ async def confirm_stripe_payment(payment_data: dict, current_user: User=Depends( client_url = client_url_setting.value if client_url_setting and client_url_setting.value else settings.CLIENT_URL or os.getenv('CLIENT_URL', 'http://localhost:5173') currency_setting = db.query(SystemSettings).filter(SystemSettings.key == 'platform_currency').first() currency = currency_setting.value if currency_setting and currency_setting.value else 'USD' - currency_symbols = {'USD': '$', 'EUR': '€', 'GBP': '£', 'JPY': '¥', 'CNY': '¥', 'KRW': '₩', 'SGD': 'S$', 'THB': '฿', 'AUD': 'A$', 'CAD': 'C$', 'VND': '₫', 'INR': '₹', 'CHF': 'CHF', 'NZD': 'NZ$'} - currency_symbol = currency_symbols.get(currency, currency) + currency_symbol = get_currency_symbol(currency) email_html = payment_confirmation_email_template(booking_number=booking.booking_number, guest_name=booking.user.full_name, amount=payment['amount'], payment_method='stripe', transaction_id=payment['transaction_id'], payment_type=payment.get('payment_type'), total_price=float(booking.total_price), client_url=client_url, currency_symbol=currency_symbol) await send_email(to=booking.user.email, subject=f'Payment Confirmed - {booking.booking_number}', html=email_html) except Exception as e: import logging logger = logging.getLogger(__name__) logger.warning(f'Failed to send payment confirmation email: {e}') - return {'status': 'success', 'message': 'Payment confirmed successfully', 'data': {'payment': payment, 'booking': {'id': booking.id if booking else None, 'booking_number': booking.booking_number if booking else None, 'status': booking.status.value if booking else None}}} + return success_response(data={'payment': payment, 'booking': {'id': booking.id if booking else None, 'booking_number': booking.booking_number if booking else None, 'status': booking.status.value if booking else None}}, message='Payment confirmed successfully') except HTTPException: db.rollback() raise @@ -369,7 +448,7 @@ async def stripe_webhook(request: Request, db: Session=Depends(get_db)): if not signature: raise HTTPException(status_code=400, detail='Missing stripe-signature header') result = await StripeService.handle_webhook(payload=payload, signature=signature, db=db) - return {'status': 'success', 'data': result} + return success_response(data=result) except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) except Exception as e: @@ -395,10 +474,11 @@ async def create_paypal_order(order_data: dict, current_user: User=Depends(get_c raise HTTPException(status_code=400, detail='booking_id and amount are required') if amount > 100000: raise HTTPException(status_code=400, detail=f"Amount ${amount:,.2f} exceeds PayPal's maximum of $100,000. Please contact support for large payments.") + from ..utils.role_helpers import is_admin booking = db.query(Booking).filter(Booking.id == booking_id).first() if not booking: raise HTTPException(status_code=404, detail='Booking not found') - if current_user.role_id != 1 and booking.user_id != current_user.id: + if not is_admin(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail='Forbidden') if booking.requires_deposit and (not booking.deposit_paid): deposit_payment = db.query(Payment).filter(Payment.booking_id == booking_id, Payment.payment_type == PaymentType.deposit, Payment.payment_status == PaymentStatus.pending).order_by(Payment.created_at.desc()).first() @@ -415,7 +495,7 @@ async def create_paypal_order(order_data: dict, current_user: User=Depends(get_c order = PayPalService.create_order(amount=amount, currency=currency, metadata={'booking_id': str(booking_id), 'booking_number': booking.booking_number, 'user_id': str(current_user.id), 'description': f'Hotel Booking Payment - {booking.booking_number}', 'return_url': return_url, 'cancel_url': cancel_url}, db=db) if not order.get('approval_url'): raise HTTPException(status_code=500, detail='Failed to create PayPal order. Approval URL is missing.') - return {'status': 'success', 'message': 'PayPal order created successfully', 'data': {'order_id': order['id'], 'approval_url': order['approval_url'], 'status': order['status']}} + return success_response(data={'order_id': order['id'], 'approval_url': order['approval_url'], 'status': order['status']}, message='PayPal order created successfully') except HTTPException: raise except ValueError as e: @@ -445,7 +525,7 @@ async def cancel_paypal_payment(payment_data: dict, current_user: User=Depends(g booking = db.query(Booking).filter(Booking.id == booking_id).first() if booking and booking.status != BookingStatus.cancelled: await cancel_booking_on_payment_failure(booking, db, reason='PayPal payment canceled by user') - return {'status': 'success', 'message': 'Payment canceled and booking cancelled'} + return success_response(message='Payment canceled and booking cancelled') except HTTPException: db.rollback() raise @@ -475,15 +555,14 @@ async def capture_paypal_payment(payment_data: dict, current_user: User=Depends( client_url = client_url_setting.value if client_url_setting and client_url_setting.value else settings.CLIENT_URL or os.getenv('CLIENT_URL', 'http://localhost:5173') currency_setting = db.query(SystemSettings).filter(SystemSettings.key == 'platform_currency').first() currency = currency_setting.value if currency_setting and currency_setting.value else 'USD' - currency_symbols = {'USD': '$', 'EUR': '€', 'GBP': '£', 'JPY': '¥', 'CNY': '¥', 'KRW': '₩', 'SGD': 'S$', 'THB': '฿', 'AUD': 'A$', 'CAD': 'C$', 'VND': '₫', 'INR': '₹', 'CHF': 'CHF', 'NZD': 'NZ$'} - currency_symbol = currency_symbols.get(currency, currency) + currency_symbol = get_currency_symbol(currency) email_html = payment_confirmation_email_template(booking_number=booking.booking_number, guest_name=booking.user.full_name, amount=payment['amount'], payment_method='paypal', transaction_id=payment['transaction_id'], payment_type=payment.get('payment_type'), total_price=float(booking.total_price), client_url=client_url, currency_symbol=currency_symbol) await send_email(to=booking.user.email, subject=f'Payment Confirmed - {booking.booking_number}', html=email_html) except Exception as e: import logging logger = logging.getLogger(__name__) logger.warning(f'Failed to send payment confirmation email: {e}') - return {'status': 'success', 'message': 'Payment confirmed successfully', 'data': {'payment': payment, 'booking': {'id': booking.id if booking else None, 'booking_number': booking.booking_number if booking else None, 'status': booking.status.value if booking else None}}} + return success_response(data={'payment': payment, 'booking': {'id': booking.id if booking else None, 'booking_number': booking.booking_number if booking else None, 'status': booking.status.value if booking else None}}, message='Payment confirmed successfully') except HTTPException: db.rollback() raise @@ -498,4 +577,174 @@ async def capture_paypal_payment(payment_data: dict, current_user: User=Depends( logger = logging.getLogger(__name__) logger.error(f'Unexpected error confirming PayPal payment: {str(e)}', exc_info=True) db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/borica/create-payment') +async def create_borica_payment(payment_data: dict, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): + try: + from ..services.borica_service import get_borica_terminal_id, get_borica_merchant_id + terminal_id = get_borica_terminal_id(db) + merchant_id = get_borica_merchant_id(db) + if not terminal_id or not merchant_id: + if not settings.BORICA_TERMINAL_ID or not settings.BORICA_MERCHANT_ID: + raise HTTPException(status_code=500, detail='Borica is not configured. Please configure Borica settings in Admin Panel or set BORICA_TERMINAL_ID and BORICA_MERCHANT_ID environment variables.') + booking_id = payment_data.get('booking_id') + amount = float(payment_data.get('amount', 0)) + currency = payment_data.get('currency', 'BGN') + if not booking_id or amount <= 0: + raise HTTPException(status_code=400, detail='booking_id and amount are required') + if amount > 100000: + raise HTTPException(status_code=400, detail=f"Amount {amount:,.2f} exceeds maximum of 100,000. Please contact support for large payments.") + from ..utils.role_helpers import is_admin + booking = db.query(Booking).filter(Booking.id == booking_id).first() + if not booking: + raise HTTPException(status_code=404, detail='Booking not found') + if not is_admin(current_user, db) and booking.user_id != current_user.id: + raise HTTPException(status_code=403, detail='Forbidden') + if booking.requires_deposit and (not booking.deposit_paid): + deposit_payment = db.query(Payment).filter(Payment.booking_id == booking_id, Payment.payment_type == PaymentType.deposit, Payment.payment_status == PaymentStatus.pending).order_by(Payment.created_at.desc()).first() + if deposit_payment: + expected_deposit_amount = float(deposit_payment.amount) + if abs(amount - expected_deposit_amount) > 0.01: + import logging + logger = logging.getLogger(__name__) + logger.warning(f'Amount mismatch for deposit payment: Requested {amount:,.2f}, Expected deposit {expected_deposit_amount:,.2f}, Booking total {float(booking.total_price):,.2f}') + raise HTTPException(status_code=400, detail=f'For pay-on-arrival bookings, only the deposit amount ({expected_deposit_amount:,.2f}) should be charged, not the full booking amount ({float(booking.total_price):,.2f}).') + transaction_id = BoricaService.generate_transaction_id(booking_id) + client_url = settings.CLIENT_URL or os.getenv('CLIENT_URL', 'http://localhost:5173') + return_url = payment_data.get('return_url', f'{client_url}/payment/borica/return') + description = f'Hotel Booking Payment - {booking.booking_number}' + payment_request = BoricaService.create_payment_request(amount=amount, currency=currency, order_id=transaction_id, description=description, return_url=return_url, db=db) + payment_type = PaymentType.full + if booking.requires_deposit and (not booking.deposit_paid): + payment_type = PaymentType.deposit + payment = Payment(booking_id=booking_id, amount=amount, payment_method=PaymentMethod.borica, payment_type=payment_type, payment_status=PaymentStatus.pending, transaction_id=transaction_id, notes=f'Borica payment initiated - Order: {transaction_id}') + db.add(payment) + db.commit() + db.refresh(payment) + return success_response(data={'payment_request': payment_request, 'payment_id': payment.id, 'transaction_id': transaction_id}, message='Borica payment request created successfully') + except HTTPException: + raise + except ValueError as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Borica payment creation error: {str(e)}') + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Unexpected error creating Borica payment: {str(e)}', exc_info=True) + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/borica/callback') +async def borica_callback(request: Request, db: Session=Depends(get_db)): + """ + Handle Borica payment callback (POST from Borica gateway). + Borica sends POST data with payment response. + """ + try: + form_data = await request.form() + response_data = dict(form_data) + + # Also try to get from JSON if available + try: + json_data = await request.json() + response_data.update(json_data) + except: + pass + + payment = await BoricaService.confirm_payment(response_data=response_data, db=db) + try: + db.commit() + except Exception: + pass + + booking = db.query(Booking).filter(Booking.id == payment['booking_id']).first() + if booking: + db.refresh(booking) + + if booking and booking.user: + try: + from ..models.system_settings import SystemSettings + client_url_setting = db.query(SystemSettings).filter(SystemSettings.key == 'client_url').first() + client_url = client_url_setting.value if client_url_setting and client_url_setting.value else settings.CLIENT_URL or os.getenv('CLIENT_URL', 'http://localhost:5173') + currency_setting = db.query(SystemSettings).filter(SystemSettings.key == 'platform_currency').first() + currency = currency_setting.value if currency_setting and currency_setting.value else 'USD' + currency_symbol = get_currency_symbol(currency) + email_html = payment_confirmation_email_template(booking_number=booking.booking_number, guest_name=booking.user.full_name, amount=payment['amount'], payment_method='borica', transaction_id=payment['transaction_id'], payment_type=payment.get('payment_type'), total_price=float(booking.total_price), client_url=client_url, currency_symbol=currency_symbol) + await send_email(to=booking.user.email, subject=f'Payment Confirmed - {booking.booking_number}', html=email_html) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.warning(f'Failed to send payment confirmation email: {e}') + + # Redirect to return URL with success status + return_url = response_data.get('BACKREF', '') + if return_url: + from fastapi.responses import RedirectResponse + return RedirectResponse(url=f"{return_url}?status=success&order={response_data.get('ORDER', '')}&bookingId={payment['booking_id']}") + + return success_response(data={'payment': payment, 'booking': {'id': booking.id if booking else None, 'booking_number': booking.booking_number if booking else None, 'status': booking.status.value if booking else None}}, message='Payment confirmed successfully') + except HTTPException: + db.rollback() + raise + except ValueError as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Borica payment callback error: {str(e)}') + db.rollback() + # Redirect to return URL with error status + return_url = dict(await request.form()).get('BACKREF', '') if hasattr(request, 'form') else '' + if return_url: + from fastapi.responses import RedirectResponse + return RedirectResponse(url=f"{return_url}?status=error&error={str(e)}") + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Unexpected error in Borica callback: {str(e)}', exc_info=True) + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/borica/confirm') +async def confirm_borica_payment(response_data: dict, db: Session=Depends(get_db)): + try: + payment = await BoricaService.confirm_payment(response_data=response_data, db=db) + try: + db.commit() + except Exception: + pass + booking = db.query(Booking).filter(Booking.id == payment['booking_id']).first() + if booking: + db.refresh(booking) + if booking and booking.user: + try: + from ..models.system_settings import SystemSettings + client_url_setting = db.query(SystemSettings).filter(SystemSettings.key == 'client_url').first() + client_url = client_url_setting.value if client_url_setting and client_url_setting.value else settings.CLIENT_URL or os.getenv('CLIENT_URL', 'http://localhost:5173') + currency_setting = db.query(SystemSettings).filter(SystemSettings.key == 'platform_currency').first() + currency = currency_setting.value if currency_setting and currency_setting.value else 'USD' + currency_symbol = get_currency_symbol(currency) + email_html = payment_confirmation_email_template(booking_number=booking.booking_number, guest_name=booking.user.full_name, amount=payment['amount'], payment_method='borica', transaction_id=payment['transaction_id'], payment_type=payment.get('payment_type'), total_price=float(booking.total_price), client_url=client_url, currency_symbol=currency_symbol) + await send_email(to=booking.user.email, subject=f'Payment Confirmed - {booking.booking_number}', html=email_html) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.warning(f'Failed to send payment confirmation email: {e}') + return success_response(data={'payment': payment, 'booking': {'id': booking.id if booking else None, 'booking_number': booking.booking_number if booking else None, 'status': booking.status.value if booking else None}}, message='Payment confirmed successfully') + except HTTPException: + db.rollback() + raise + except ValueError as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Borica payment confirmation error: {str(e)}') + db.rollback() + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f'Unexpected error confirming Borica payment: {str(e)}', exc_info=True) + db.rollback() raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file diff --git a/Backend/src/routes/rate_plan_routes.py b/Backend/src/routes/rate_plan_routes.py new file mode 100644 index 00000000..bf9f403a --- /dev/null +++ b/Backend/src/routes/rate_plan_routes.py @@ -0,0 +1,496 @@ +from fastapi import APIRouter, Depends, HTTPException, status, Query +from sqlalchemy.orm import Session +from sqlalchemy import or_, and_ +from typing import Optional, List +from datetime import datetime, date +from decimal import Decimal +from ..config.database import get_db +from ..middleware.auth import get_current_user, authorize_roles +from ..models.user import User +from ..models.rate_plan import RatePlan, RatePlanRule, RatePlanType, RatePlanStatus +from ..models.room_type import RoomType +from ..models.booking import Booking +from pydantic import BaseModel +from typing import Optional as Opt + +router = APIRouter(prefix='/rate-plans', tags=['rate-plans']) + +# Pydantic models for request/response +class RatePlanRuleCreate(BaseModel): + rule_type: str + rule_key: str + rule_value: Optional[dict] = None + price_modifier: Optional[float] = None + discount_percentage: Optional[float] = None + fixed_adjustment: Optional[float] = None + priority: int = 100 + +class RatePlanCreate(BaseModel): + name: str + code: str + description: Optional[str] = None + plan_type: str + status: str = 'active' + base_price_modifier: float = 1.0 + discount_percentage: Optional[float] = None + fixed_discount: Optional[float] = None + room_type_id: Optional[int] = None + min_nights: Optional[int] = None + max_nights: Optional[int] = None + advance_days_required: Optional[int] = None + valid_from: Optional[str] = None + valid_to: Optional[str] = None + is_refundable: bool = True + requires_deposit: bool = False + deposit_percentage: Optional[float] = None + cancellation_hours: Optional[int] = None + corporate_code: Optional[str] = None + requires_verification: bool = False + verification_type: Optional[str] = None + long_stay_nights: Optional[int] = None + is_package: bool = False + package_id: Optional[int] = None + priority: int = 100 + extra_data: Optional[dict] = None + rules: Optional[List[RatePlanRuleCreate]] = [] + +class RatePlanUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None + base_price_modifier: Optional[float] = None + discount_percentage: Optional[float] = None + fixed_discount: Optional[float] = None + room_type_id: Optional[int] = None + min_nights: Optional[int] = None + max_nights: Optional[int] = None + advance_days_required: Optional[int] = None + valid_from: Optional[str] = None + valid_to: Optional[str] = None + is_refundable: Optional[bool] = None + requires_deposit: Optional[bool] = None + deposit_percentage: Optional[float] = None + cancellation_hours: Optional[int] = None + corporate_code: Optional[str] = None + requires_verification: Optional[bool] = None + verification_type: Optional[str] = None + long_stay_nights: Optional[int] = None + package_id: Optional[int] = None + priority: Optional[int] = None + extra_data: Optional[dict] = None + +@router.get('/') +async def get_rate_plans( + search: Optional[str] = Query(None), + status_filter: Optional[str] = Query(None, alias='status'), + plan_type: Optional[str] = Query(None), + room_type_id: Optional[int] = Query(None), + page: int = Query(1, ge=1), + limit: int = Query(10, ge=1, le=100), + db: Session = Depends(get_db) +): + try: + query = db.query(RatePlan) + + if search: + query = query.filter( + or_( + RatePlan.name.like(f'%{search}%'), + RatePlan.code.like(f'%{search}%'), + RatePlan.description.like(f'%{search}%') + ) + ) + + if status_filter: + try: + query = query.filter(RatePlan.status == RatePlanStatus(status_filter)) + except ValueError: + pass + + if plan_type: + try: + query = query.filter(RatePlan.plan_type == RatePlanType(plan_type)) + except ValueError: + pass + + if room_type_id: + query = query.filter( + or_( + RatePlan.room_type_id == room_type_id, + RatePlan.room_type_id.is_(None) + ) + ) + + total = query.count() + offset = (page - 1) * limit + rate_plans = query.order_by(RatePlan.priority.asc(), RatePlan.created_at.desc()).offset(offset).limit(limit).all() + + result = [] + for plan in rate_plans: + plan_dict = { + 'id': plan.id, + 'name': plan.name, + 'code': plan.code, + 'description': plan.description, + 'plan_type': plan.plan_type.value if isinstance(plan.plan_type, RatePlanType) else plan.plan_type, + 'status': plan.status.value if isinstance(plan.status, RatePlanStatus) else plan.status, + 'base_price_modifier': float(plan.base_price_modifier) if plan.base_price_modifier else 1.0, + 'discount_percentage': float(plan.discount_percentage) if plan.discount_percentage else None, + 'fixed_discount': float(plan.fixed_discount) if plan.fixed_discount else None, + 'room_type_id': plan.room_type_id, + 'room_type_name': plan.room_type.name if plan.room_type else None, + 'min_nights': plan.min_nights, + 'max_nights': plan.max_nights, + 'advance_days_required': plan.advance_days_required, + 'valid_from': plan.valid_from.isoformat() if plan.valid_from else None, + 'valid_to': plan.valid_to.isoformat() if plan.valid_to else None, + 'is_refundable': plan.is_refundable, + 'requires_deposit': plan.requires_deposit, + 'deposit_percentage': float(plan.deposit_percentage) if plan.deposit_percentage else None, + 'cancellation_hours': plan.cancellation_hours, + 'corporate_code': plan.corporate_code, + 'requires_verification': plan.requires_verification, + 'verification_type': plan.verification_type, + 'long_stay_nights': plan.long_stay_nights, + 'is_package': plan.is_package, + 'package_id': plan.package_id, + 'priority': plan.priority, + 'extra_data': plan.extra_data, + 'created_at': plan.created_at.isoformat() if plan.created_at else None, + 'updated_at': plan.updated_at.isoformat() if plan.updated_at else None, + } + result.append(plan_dict) + + return { + 'status': 'success', + 'data': { + 'rate_plans': result, + 'pagination': { + 'total': total, + 'page': page, + 'limit': limit, + 'totalPages': (total + limit - 1) // limit + } + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/{id}') +async def get_rate_plan(id: int, db: Session = Depends(get_db)): + try: + plan = db.query(RatePlan).filter(RatePlan.id == id).first() + if not plan: + raise HTTPException(status_code=404, detail='Rate plan not found') + + rules = db.query(RatePlanRule).filter(RatePlanRule.rate_plan_id == id).order_by(RatePlanRule.priority.asc()).all() + + plan_dict = { + 'id': plan.id, + 'name': plan.name, + 'code': plan.code, + 'description': plan.description, + 'plan_type': plan.plan_type.value if isinstance(plan.plan_type, RatePlanType) else plan.plan_type, + 'status': plan.status.value if isinstance(plan.status, RatePlanStatus) else plan.status, + 'base_price_modifier': float(plan.base_price_modifier) if plan.base_price_modifier else 1.0, + 'discount_percentage': float(plan.discount_percentage) if plan.discount_percentage else None, + 'fixed_discount': float(plan.fixed_discount) if plan.fixed_discount else None, + 'room_type_id': plan.room_type_id, + 'room_type_name': plan.room_type.name if plan.room_type else None, + 'min_nights': plan.min_nights, + 'max_nights': plan.max_nights, + 'advance_days_required': plan.advance_days_required, + 'valid_from': plan.valid_from.isoformat() if plan.valid_from else None, + 'valid_to': plan.valid_to.isoformat() if plan.valid_to else None, + 'is_refundable': plan.is_refundable, + 'requires_deposit': plan.requires_deposit, + 'deposit_percentage': float(plan.deposit_percentage) if plan.deposit_percentage else None, + 'cancellation_hours': plan.cancellation_hours, + 'corporate_code': plan.corporate_code, + 'requires_verification': plan.requires_verification, + 'verification_type': plan.verification_type, + 'long_stay_nights': plan.long_stay_nights, + 'is_package': plan.is_package, + 'package_id': plan.package_id, + 'priority': plan.priority, + 'extra_data': plan.extra_data, + 'rules': [ + { + 'id': rule.id, + 'rule_type': rule.rule_type, + 'rule_key': rule.rule_key, + 'rule_value': rule.rule_value, + 'price_modifier': float(rule.price_modifier) if rule.price_modifier else None, + 'discount_percentage': float(rule.discount_percentage) if rule.discount_percentage else None, + 'fixed_adjustment': float(rule.fixed_adjustment) if rule.fixed_adjustment else None, + 'priority': rule.priority, + } + for rule in rules + ], + 'created_at': plan.created_at.isoformat() if plan.created_at else None, + 'updated_at': plan.updated_at.isoformat() if plan.updated_at else None, + } + + return {'status': 'success', 'data': {'rate_plan': plan_dict}} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/', dependencies=[Depends(authorize_roles('admin'))]) +async def create_rate_plan(plan_data: RatePlanCreate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)): + try: + # Check if code already exists + existing = db.query(RatePlan).filter(RatePlan.code == plan_data.code).first() + if existing: + raise HTTPException(status_code=400, detail='Rate plan code already exists') + + # Validate room_type_id if provided + if plan_data.room_type_id: + room_type = db.query(RoomType).filter(RoomType.id == plan_data.room_type_id).first() + if not room_type: + raise HTTPException(status_code=404, detail='Room type not found') + + # Create rate plan + rate_plan = RatePlan( + name=plan_data.name, + code=plan_data.code, + description=plan_data.description, + plan_type=RatePlanType(plan_data.plan_type), + status=RatePlanStatus(plan_data.status), + base_price_modifier=Decimal(str(plan_data.base_price_modifier)), + discount_percentage=Decimal(str(plan_data.discount_percentage)) if plan_data.discount_percentage else None, + fixed_discount=Decimal(str(plan_data.fixed_discount)) if plan_data.fixed_discount else None, + room_type_id=plan_data.room_type_id, + min_nights=plan_data.min_nights, + max_nights=plan_data.max_nights, + advance_days_required=plan_data.advance_days_required, + valid_from=datetime.strptime(plan_data.valid_from, '%Y-%m-%d').date() if plan_data.valid_from else None, + valid_to=datetime.strptime(plan_data.valid_to, '%Y-%m-%d').date() if plan_data.valid_to else None, + is_refundable=plan_data.is_refundable, + requires_deposit=plan_data.requires_deposit, + deposit_percentage=Decimal(str(plan_data.deposit_percentage)) if plan_data.deposit_percentage else None, + cancellation_hours=plan_data.cancellation_hours, + corporate_code=plan_data.corporate_code, + requires_verification=plan_data.requires_verification, + verification_type=plan_data.verification_type, + long_stay_nights=plan_data.long_stay_nights, + is_package=plan_data.is_package, + package_id=plan_data.package_id, + priority=plan_data.priority, + extra_data=plan_data.extra_data, + ) + + db.add(rate_plan) + db.flush() + + # Create rules + if plan_data.rules: + for rule_data in plan_data.rules: + rule = RatePlanRule( + rate_plan_id=rate_plan.id, + rule_type=rule_data.rule_type, + rule_key=rule_data.rule_key, + rule_value=rule_data.rule_value, + price_modifier=Decimal(str(rule_data.price_modifier)) if rule_data.price_modifier else None, + discount_percentage=Decimal(str(rule_data.discount_percentage)) if rule_data.discount_percentage else None, + fixed_adjustment=Decimal(str(rule_data.fixed_adjustment)) if rule_data.fixed_adjustment else None, + priority=rule_data.priority, + ) + db.add(rule) + + db.commit() + db.refresh(rate_plan) + + return {'status': 'success', 'message': 'Rate plan created successfully', 'data': {'rate_plan_id': rate_plan.id}} + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=f'Invalid enum value: {str(e)}') + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.put('/{id}', dependencies=[Depends(authorize_roles('admin'))]) +async def update_rate_plan(id: int, plan_data: RatePlanUpdate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)): + try: + rate_plan = db.query(RatePlan).filter(RatePlan.id == id).first() + if not rate_plan: + raise HTTPException(status_code=404, detail='Rate plan not found') + + # Update fields + if plan_data.name is not None: + rate_plan.name = plan_data.name + if plan_data.description is not None: + rate_plan.description = plan_data.description + if plan_data.status is not None: + rate_plan.status = RatePlanStatus(plan_data.status) + if plan_data.base_price_modifier is not None: + rate_plan.base_price_modifier = Decimal(str(plan_data.base_price_modifier)) + if plan_data.discount_percentage is not None: + rate_plan.discount_percentage = Decimal(str(plan_data.discount_percentage)) + if plan_data.fixed_discount is not None: + rate_plan.fixed_discount = Decimal(str(plan_data.fixed_discount)) + if plan_data.room_type_id is not None: + if plan_data.room_type_id: + room_type = db.query(RoomType).filter(RoomType.id == plan_data.room_type_id).first() + if not room_type: + raise HTTPException(status_code=404, detail='Room type not found') + rate_plan.room_type_id = plan_data.room_type_id + if plan_data.min_nights is not None: + rate_plan.min_nights = plan_data.min_nights + if plan_data.max_nights is not None: + rate_plan.max_nights = plan_data.max_nights + if plan_data.advance_days_required is not None: + rate_plan.advance_days_required = plan_data.advance_days_required + if plan_data.valid_from is not None: + rate_plan.valid_from = datetime.strptime(plan_data.valid_from, '%Y-%m-%d').date() if plan_data.valid_from else None + if plan_data.valid_to is not None: + rate_plan.valid_to = datetime.strptime(plan_data.valid_to, '%Y-%m-%d').date() if plan_data.valid_to else None + if plan_data.is_refundable is not None: + rate_plan.is_refundable = plan_data.is_refundable + if plan_data.requires_deposit is not None: + rate_plan.requires_deposit = plan_data.requires_deposit + if plan_data.deposit_percentage is not None: + rate_plan.deposit_percentage = Decimal(str(plan_data.deposit_percentage)) if plan_data.deposit_percentage else None + if plan_data.cancellation_hours is not None: + rate_plan.cancellation_hours = plan_data.cancellation_hours + if plan_data.corporate_code is not None: + rate_plan.corporate_code = plan_data.corporate_code + if plan_data.requires_verification is not None: + rate_plan.requires_verification = plan_data.requires_verification + if plan_data.verification_type is not None: + rate_plan.verification_type = plan_data.verification_type + if plan_data.long_stay_nights is not None: + rate_plan.long_stay_nights = plan_data.long_stay_nights + if plan_data.package_id is not None: + rate_plan.package_id = plan_data.package_id + if plan_data.priority is not None: + rate_plan.priority = plan_data.priority + if plan_data.extra_data is not None: + rate_plan.extra_data = plan_data.extra_data + + db.commit() + + return {'status': 'success', 'message': 'Rate plan updated successfully'} + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=f'Invalid enum value: {str(e)}') + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.delete('/{id}', dependencies=[Depends(authorize_roles('admin'))]) +async def delete_rate_plan(id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)): + try: + rate_plan = db.query(RatePlan).filter(RatePlan.id == id).first() + if not rate_plan: + raise HTTPException(status_code=404, detail='Rate plan not found') + + # Check if rate plan is used in bookings + booking_count = db.query(Booking).filter(Booking.rate_plan_id == id).count() + if booking_count > 0: + raise HTTPException(status_code=400, detail=f'Cannot delete rate plan. It is used in {booking_count} booking(s)') + + # Delete rules first + db.query(RatePlanRule).filter(RatePlanRule.rate_plan_id == id).delete() + + db.delete(rate_plan) + db.commit() + + return {'status': 'success', 'message': 'Rate plan deleted successfully'} + except HTTPException: + raise + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/available/{room_type_id}') +async def get_available_rate_plans( + room_type_id: int, + check_in: str = Query(...), + check_out: str = Query(...), + num_nights: Optional[int] = Query(None), + db: Session = Depends(get_db) +): + """Get available rate plans for a room type and date range""" + try: + check_in_date = datetime.strptime(check_in, '%Y-%m-%d').date() + check_out_date = datetime.strptime(check_out, '%Y-%m-%d').date() + + if num_nights is None: + num_nights = (check_out_date - check_in_date).days + + today = date.today() + advance_days = (check_in_date - today).days + + # Query rate plans + query = db.query(RatePlan).filter( + RatePlan.status == RatePlanStatus.active, + or_( + RatePlan.room_type_id == room_type_id, + RatePlan.room_type_id.is_(None) + ) + ) + + # Filter by date range + query = query.filter( + or_( + RatePlan.valid_from.is_(None), + RatePlan.valid_from <= check_in_date + ), + or_( + RatePlan.valid_to.is_(None), + RatePlan.valid_to >= check_out_date + ) + ) + + # Filter by advance days + query = query.filter( + or_( + RatePlan.advance_days_required.is_(None), + RatePlan.advance_days_required <= advance_days + ) + ) + + # Filter by nights + query = query.filter( + or_( + RatePlan.min_nights.is_(None), + RatePlan.min_nights <= num_nights + ), + or_( + RatePlan.max_nights.is_(None), + RatePlan.max_nights >= num_nights + ) + ) + + rate_plans = query.order_by(RatePlan.priority.asc()).all() + + result = [] + for plan in rate_plans: + plan_dict = { + 'id': plan.id, + 'name': plan.name, + 'code': plan.code, + 'description': plan.description, + 'plan_type': plan.plan_type.value if isinstance(plan.plan_type, RatePlanType) else plan.plan_type, + 'base_price_modifier': float(plan.base_price_modifier) if plan.base_price_modifier else 1.0, + 'discount_percentage': float(plan.discount_percentage) if plan.discount_percentage else None, + 'fixed_discount': float(plan.fixed_discount) if plan.fixed_discount else None, + 'is_refundable': plan.is_refundable, + 'requires_deposit': plan.requires_deposit, + 'deposit_percentage': float(plan.deposit_percentage) if plan.deposit_percentage else None, + 'cancellation_hours': plan.cancellation_hours, + 'requires_verification': plan.requires_verification, + 'verification_type': plan.verification_type, + } + result.append(plan_dict) + + return {'status': 'success', 'data': {'rate_plans': result}} + except ValueError as e: + raise HTTPException(status_code=400, detail=f'Invalid date format: {str(e)}') + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/routes/report_routes.py b/Backend/src/routes/report_routes.py index 7e7d6df4..3b24f84f 100644 --- a/Backend/src/routes/report_routes.py +++ b/Backend/src/routes/report_routes.py @@ -1,5 +1,5 @@ from fastapi import APIRouter, Depends, HTTPException, status, Query -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, load_only, joinedload from sqlalchemy import func, and_ from typing import Optional from datetime import datetime, timedelta @@ -8,9 +8,10 @@ from ..middleware.auth import get_current_user, authorize_roles from ..models.user import User from ..models.booking import Booking, BookingStatus from ..models.payment import Payment, PaymentStatus -from ..models.room import Room +from ..models.room import Room, RoomStatus from ..models.service_usage import ServiceUsage from ..models.service import Service +from ..utils.response_helpers import success_response router = APIRouter(prefix='/reports', tags=['reports']) @router.get('') @@ -37,7 +38,8 @@ async def get_reports(from_date: Optional[str]=Query(None, alias='from'), to_dat if end_date: booking_query = booking_query.filter(Booking.created_at <= end_date) payment_query = payment_query.filter(Payment.payment_date <= end_date) - total_bookings = booking_query.count() + # Use func.count() to avoid loading all columns (including non-existent rate_plan_id) + total_bookings = booking_query.with_entities(func.count(Booking.id)).scalar() or 0 total_revenue = payment_query.with_entities(func.sum(Payment.amount)).scalar() or 0.0 total_customers = db.query(func.count(func.distinct(Booking.user_id))).scalar() or 0 if start_date or end_date: @@ -47,7 +49,7 @@ async def get_reports(from_date: Optional[str]=Query(None, alias='from'), to_dat if end_date: customer_query = customer_query.filter(Booking.created_at <= end_date) total_customers = customer_query.scalar() or 0 - available_rooms = db.query(Room).filter(Room.status == 'available').count() + available_rooms = db.query(Room).filter(Room.status == RoomStatus.available).count() occupied_rooms = db.query(func.count(func.distinct(Booking.room_id))).filter(Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in])).scalar() or 0 revenue_by_date = [] if start_date and end_date: @@ -61,7 +63,8 @@ async def get_reports(from_date: Optional[str]=Query(None, alias='from'), to_dat revenue_by_date = [{'date': str(date), 'revenue': float(revenue or 0), 'bookings': int(bookings or 0)} for date, revenue, bookings in daily_data] bookings_by_status = {} for status in BookingStatus: - count = booking_query.filter(Booking.status == status).count() + # Use func.count() to avoid loading all columns (including non-existent rate_plan_id) + count = booking_query.filter(Booking.status == status).with_entities(func.count(Booking.id)).scalar() or 0 status_name = status.value if hasattr(status, 'value') else str(status) bookings_by_status[status_name] = count top_rooms_query = db.query(Room.id, Room.room_number, func.count(Booking.id).label('bookings'), func.sum(Payment.amount).label('revenue')).join(Booking, Room.id == Booking.room_id).join(Payment, Booking.id == Payment.booking_id).filter(Payment.payment_status == PaymentStatus.completed) @@ -78,24 +81,25 @@ async def get_reports(from_date: Optional[str]=Query(None, alias='from'), to_dat service_usage_query = service_usage_query.filter(ServiceUsage.usage_date <= end_date) service_usage_data = service_usage_query.group_by(Service.id, Service.name).order_by(func.sum(ServiceUsage.total_price).desc()).limit(10).all() service_usage = [{'service_id': service_id, 'service_name': service_name, 'usage_count': int(usage_count or 0), 'total_revenue': float(total_revenue or 0)} for service_id, service_name, usage_count, total_revenue in service_usage_data] - return {'status': 'success', 'success': True, 'data': {'total_bookings': total_bookings, 'total_revenue': float(total_revenue), 'total_customers': int(total_customers), 'available_rooms': available_rooms, 'occupied_rooms': occupied_rooms, 'revenue_by_date': revenue_by_date if revenue_by_date else None, 'bookings_by_status': bookings_by_status, 'top_rooms': top_rooms if top_rooms else None, 'service_usage': service_usage if service_usage else None}} + return success_response(data={'total_bookings': total_bookings, 'total_revenue': float(total_revenue), 'total_customers': int(total_customers), 'available_rooms': available_rooms, 'occupied_rooms': occupied_rooms, 'revenue_by_date': revenue_by_date if revenue_by_date else None, 'bookings_by_status': bookings_by_status, 'top_rooms': top_rooms if top_rooms else None, 'service_usage': service_usage if service_usage else None}) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @router.get('/dashboard') async def get_dashboard_stats(current_user: User=Depends(authorize_roles('admin', 'staff', 'accountant')), db: Session=Depends(get_db)): try: - total_bookings = db.query(Booking).count() - active_bookings = db.query(Booking).filter(Booking.status.in_([BookingStatus.pending, BookingStatus.confirmed, BookingStatus.checked_in])).count() + # Use func.count() to avoid loading all columns (including non-existent rate_plan_id) + total_bookings = db.query(Booking).with_entities(func.count(Booking.id)).scalar() or 0 + active_bookings = db.query(Booking).filter(Booking.status.in_([BookingStatus.pending, BookingStatus.confirmed, BookingStatus.checked_in])).with_entities(func.count(Booking.id)).scalar() or 0 total_revenue = db.query(func.sum(Payment.amount)).filter(Payment.payment_status == PaymentStatus.completed).scalar() or 0.0 today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) today_revenue = db.query(func.sum(Payment.amount)).filter(and_(Payment.payment_status == PaymentStatus.completed, Payment.payment_date >= today_start)).scalar() or 0.0 total_rooms = db.query(Room).count() - available_rooms = db.query(Room).filter(Room.status == 'available').count() + available_rooms = db.query(Room).filter(Room.status == RoomStatus.available).count() week_ago = datetime.utcnow() - timedelta(days=7) - recent_bookings = db.query(Booking).filter(Booking.created_at >= week_ago).count() + recent_bookings = db.query(Booking).filter(Booking.created_at >= week_ago).with_entities(func.count(Booking.id)).scalar() or 0 pending_payments = db.query(Payment).filter(Payment.payment_status == PaymentStatus.pending).count() - return {'status': 'success', 'data': {'total_bookings': total_bookings, 'active_bookings': active_bookings, 'total_revenue': float(total_revenue), 'today_revenue': float(today_revenue), 'total_rooms': total_rooms, 'available_rooms': available_rooms, 'recent_bookings': recent_bookings, 'pending_payments': pending_payments}} + return success_response(data={'total_bookings': total_bookings, 'active_bookings': active_bookings, 'total_revenue': float(total_revenue), 'today_revenue': float(today_revenue), 'total_rooms': total_rooms, 'available_rooms': available_rooms, 'recent_bookings': recent_bookings, 'pending_payments': pending_payments}) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @@ -103,19 +107,28 @@ async def get_dashboard_stats(current_user: User=Depends(authorize_roles('admin' async def get_customer_dashboard_stats(current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): try: from datetime import datetime, timedelta - total_bookings = db.query(Booking).filter(Booking.user_id == current_user.id).count() + # Use func.count() to avoid loading all columns (including non-existent rate_plan_id) + total_bookings = db.query(Booking).filter(Booking.user_id == current_user.id).with_entities(func.count(Booking.id)).scalar() or 0 user_bookings = db.query(Booking.id).filter(Booking.user_id == current_user.id).subquery() total_spending = db.query(func.sum(Payment.amount)).filter(and_(Payment.booking_id.in_(db.query(user_bookings.c.id)), Payment.payment_status == PaymentStatus.completed)).scalar() or 0.0 now = datetime.utcnow() - currently_staying = db.query(Booking).filter(and_(Booking.user_id == current_user.id, Booking.status == BookingStatus.checked_in, Booking.check_in_date <= now, Booking.check_out_date >= now)).count() - upcoming_bookings_query = db.query(Booking).filter(and_(Booking.user_id == current_user.id, Booking.status.in_([BookingStatus.confirmed, BookingStatus.pending]), Booking.check_in_date > now)).order_by(Booking.check_in_date.asc()).limit(5).all() + currently_staying = db.query(Booking).filter(and_(Booking.user_id == current_user.id, Booking.status == BookingStatus.checked_in, Booking.check_in_date <= now, Booking.check_out_date >= now)).with_entities(func.count(Booking.id)).scalar() or 0 + # Use load_only to exclude non-existent columns and eagerly load room relationships + upcoming_bookings_query = db.query(Booking).options( + load_only(Booking.id, Booking.booking_number, Booking.check_in_date, Booking.check_out_date, Booking.status, Booking.total_price, Booking.user_id, Booking.room_id, Booking.created_at), + joinedload(Booking.room).joinedload(Room.room_type) + ).filter(and_(Booking.user_id == current_user.id, Booking.status.in_([BookingStatus.confirmed, BookingStatus.pending]), Booking.check_in_date > now)).order_by(Booking.check_in_date.asc()).limit(5).all() upcoming_bookings = [] for booking in upcoming_bookings_query: booking_dict = {'id': booking.id, 'booking_number': booking.booking_number, 'check_in_date': booking.check_in_date.isoformat() if booking.check_in_date else None, 'check_out_date': booking.check_out_date.isoformat() if booking.check_out_date else None, 'status': booking.status.value if isinstance(booking.status, BookingStatus) else booking.status, 'total_price': float(booking.total_price) if booking.total_price else 0.0} if booking.room: booking_dict['room'] = {'id': booking.room.id, 'room_number': booking.room.room_number, 'room_type': {'name': booking.room.room_type.name if booking.room.room_type else None}} upcoming_bookings.append(booking_dict) - recent_bookings_query = db.query(Booking).filter(Booking.user_id == current_user.id).order_by(Booking.created_at.desc()).limit(5).all() + # Use load_only to exclude non-existent columns and eagerly load room relationships + recent_bookings_query = db.query(Booking).options( + load_only(Booking.id, Booking.booking_number, Booking.status, Booking.user_id, Booking.room_id, Booking.created_at), + joinedload(Booking.room) + ).filter(Booking.user_id == current_user.id).order_by(Booking.created_at.desc()).limit(5).all() recent_activity = [] for booking in recent_bookings_query: activity_type = None @@ -135,8 +148,9 @@ async def get_customer_dashboard_stats(current_user: User=Depends(get_current_us recent_activity.append(activity_dict) last_month_start = (now - timedelta(days=30)).replace(day=1, hour=0, minute=0, second=0) last_month_end = now.replace(day=1, hour=0, minute=0, second=0) - timedelta(seconds=1) - last_month_bookings = db.query(Booking).filter(and_(Booking.user_id == current_user.id, Booking.created_at >= last_month_start, Booking.created_at <= last_month_end)).count() - this_month_bookings = db.query(Booking).filter(and_(Booking.user_id == current_user.id, Booking.created_at >= now.replace(day=1, hour=0, minute=0, second=0), Booking.created_at <= now)).count() + # Use func.count() to avoid loading all columns (including non-existent rate_plan_id) + last_month_bookings = db.query(Booking).filter(and_(Booking.user_id == current_user.id, Booking.created_at >= last_month_start, Booking.created_at <= last_month_end)).with_entities(func.count(Booking.id)).scalar() or 0 + this_month_bookings = db.query(Booking).filter(and_(Booking.user_id == current_user.id, Booking.created_at >= now.replace(day=1, hour=0, minute=0, second=0), Booking.created_at <= now)).with_entities(func.count(Booking.id)).scalar() or 0 booking_change_percentage = 0 if last_month_bookings > 0: booking_change_percentage = (this_month_bookings - last_month_bookings) / last_month_bookings * 100 @@ -145,7 +159,7 @@ async def get_customer_dashboard_stats(current_user: User=Depends(get_current_us spending_change_percentage = 0 if last_month_spending > 0: spending_change_percentage = (this_month_spending - last_month_spending) / last_month_spending * 100 - return {'status': 'success', 'success': True, 'data': {'total_bookings': total_bookings, 'total_spending': float(total_spending), 'currently_staying': currently_staying, 'upcoming_bookings': upcoming_bookings, 'recent_activity': recent_activity, 'booking_change_percentage': round(booking_change_percentage, 1), 'spending_change_percentage': round(spending_change_percentage, 1)}} + return success_response(data={'total_bookings': total_bookings, 'total_spending': float(total_spending), 'currently_staying': currently_staying, 'upcoming_bookings': upcoming_bookings, 'recent_activity': recent_activity, 'booking_change_percentage': round(booking_change_percentage, 1), 'spending_change_percentage': round(spending_change_percentage, 1)}) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @@ -167,6 +181,6 @@ async def get_revenue_report(start_date: Optional[str]=Query(None), end_date: Op method_breakdown[method_name] = float(total or 0) daily_revenue = db.query(func.date(Payment.payment_date).label('date'), func.sum(Payment.amount).label('total')).filter(Payment.payment_status == PaymentStatus.completed).group_by(func.date(Payment.payment_date)).order_by(func.date(Payment.payment_date).desc()).limit(30).all() daily_breakdown = [{'date': date.isoformat() if isinstance(date, datetime) else str(date), 'revenue': float(total or 0)} for date, total in daily_revenue] - return {'status': 'success', 'data': {'total_revenue': float(total_revenue), 'revenue_by_method': method_breakdown, 'daily_breakdown': daily_breakdown}} + return success_response(data={'total_revenue': float(total_revenue), 'revenue_by_method': method_breakdown, 'daily_breakdown': daily_breakdown}) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file diff --git a/Backend/src/routes/room_routes.py b/Backend/src/routes/room_routes.py index 8128ac6d..c891029f 100644 --- a/Backend/src/routes/room_routes.py +++ b/Backend/src/routes/room_routes.py @@ -17,7 +17,7 @@ from pathlib import Path router = APIRouter(prefix='/rooms', tags=['rooms']) @router.get('/') -async def get_rooms(request: Request, type: Optional[str]=Query(None), minPrice: Optional[float]=Query(None), maxPrice: Optional[float]=Query(None), capacity: Optional[int]=Query(None), page: int=Query(1, ge=1), limit: int=Query(10, ge=1, le=100), sort: Optional[str]=Query(None), featured: Optional[bool]=Query(None), db: Session=Depends(get_db)): +async def get_rooms(request: Request, type: Optional[str]=Query(None), minPrice: Optional[float]=Query(None), maxPrice: Optional[float]=Query(None), capacity: Optional[int]=Query(None), page: int=Query(1, ge=1), limit: int=Query(10, ge=1, le=1000), sort: Optional[str]=Query(None), featured: Optional[bool]=Query(None), db: Session=Depends(get_db)): try: where_clause = {} room_type_where = {} @@ -90,6 +90,32 @@ async def search_available_rooms(request: Request, from_date: str=Query(..., ali overlapping = db.query(Booking).filter(and_(Booking.room_id == roomId, Booking.status != BookingStatus.cancelled, Booking.check_in_date < check_out, Booking.check_out_date > check_in)).first() if overlapping: return {'status': 'success', 'data': {'available': False, 'message': 'Room is already booked for the selected dates', 'room_id': roomId}} + + # Check for maintenance blocks + from ..models.room_maintenance import RoomMaintenance, MaintenanceStatus + maintenance_block = db.query(RoomMaintenance).filter( + and_( + RoomMaintenance.room_id == roomId, + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]), + or_( + and_( + RoomMaintenance.block_start.isnot(None), + RoomMaintenance.block_end.isnot(None), + RoomMaintenance.block_start < check_out, + RoomMaintenance.block_end > check_in + ), + and_( + RoomMaintenance.scheduled_start < check_out, + RoomMaintenance.scheduled_end.isnot(None), + RoomMaintenance.scheduled_end > check_in + ) + ) + ) + ).first() + if maintenance_block: + return {'status': 'success', 'data': {'available': False, 'message': f'Room is blocked for maintenance: {maintenance_block.title}', 'room_id': roomId}} + return {'status': 'success', 'data': {'available': True, 'message': 'Room is available', 'room_id': roomId}} if check_in >= check_out: raise HTTPException(status_code=400, detail='Check-out date must be after check-in date') @@ -100,6 +126,29 @@ async def search_available_rooms(request: Request, from_date: str=Query(..., ali query = query.filter(RoomType.capacity >= capacity) overlapping_rooms = db.query(Booking.room_id).filter(and_(Booking.status != BookingStatus.cancelled, Booking.check_in_date < check_out, Booking.check_out_date > check_in)).subquery() query = query.filter(~Room.id.in_(db.query(overlapping_rooms.c.room_id))) + + # Exclude rooms blocked by maintenance + from ..models.room_maintenance import RoomMaintenance, MaintenanceStatus + blocked_rooms = db.query(RoomMaintenance.room_id).filter( + and_( + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]), + or_( + and_( + RoomMaintenance.block_start.isnot(None), + RoomMaintenance.block_end.isnot(None), + RoomMaintenance.block_start < check_out, + RoomMaintenance.block_end > check_in + ), + and_( + RoomMaintenance.scheduled_start < check_out, + RoomMaintenance.scheduled_end.isnot(None), + RoomMaintenance.scheduled_end > check_in + ) + ) + ) + ).subquery() + query = query.filter(~Room.id.in_(db.query(blocked_rooms.c.room_id))) total = query.count() query = query.order_by(Room.featured.desc(), Room.created_at.desc()) offset = (page - 1) * limit diff --git a/Backend/src/routes/security_routes.py b/Backend/src/routes/security_routes.py new file mode 100644 index 00000000..151f4555 --- /dev/null +++ b/Backend/src/routes/security_routes.py @@ -0,0 +1,744 @@ +from fastapi import APIRouter, Depends, HTTPException, status, Request, Query +from sqlalchemy.orm import Session +from typing import Optional, List +from datetime import datetime, timedelta +from pydantic import BaseModel, EmailStr +import logging +from ..config.logging_config import get_logger + +logger = get_logger(__name__) + +from ..config.database import get_db +from ..middleware.auth import get_current_user, authorize_roles +from ..models.user import User +from ..models.security_event import ( + SecurityEvent, + SecurityEventType, + SecurityEventSeverity, + IPWhitelist, + IPBlacklist +) +from ..services.security_monitoring_service import security_monitoring_service +from ..services.gdpr_service import gdpr_service +from ..services.encryption_service import encryption_service +from ..services.security_scan_service import security_scan_service + +# OAuth service is optional - only import if httpx is available +try: + from ..services.oauth_service import oauth_service + OAUTH_AVAILABLE = True +except ImportError: + OAUTH_AVAILABLE = False + oauth_service = None + +router = APIRouter(prefix="/security", tags=["Security"]) + +# Security Events +class SecurityEventResponse(BaseModel): + id: int + user_id: Optional[int] + event_type: str + severity: str + ip_address: Optional[str] + description: Optional[str] + created_at: datetime + + class Config: + from_attributes = True + +@router.get("/events", response_model=List[SecurityEventResponse]) +async def get_security_events( + user_id: Optional[int] = Query(None), + event_type: Optional[str] = Query(None), + severity: Optional[str] = Query(None), + ip_address: Optional[str] = Query(None), + resolved: Optional[bool] = Query(None), + days: int = Query(7, ge=1, le=90), + limit: int = Query(100, ge=1, le=1000), + offset: int = Query(0, ge=0), + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Get security events""" + + event_type_enum = None + if event_type: + try: + event_type_enum = SecurityEventType(event_type) + except ValueError: + raise HTTPException(status_code=400, detail="Invalid event type") + + severity_enum = None + if severity: + try: + severity_enum = SecurityEventSeverity(severity) + except ValueError: + raise HTTPException(status_code=400, detail="Invalid severity") + + start_date = datetime.utcnow() - timedelta(days=days) + + events = security_monitoring_service.get_security_events( + db=db, + user_id=user_id, + event_type=event_type_enum, + severity=severity_enum, + ip_address=ip_address, + resolved=resolved, + start_date=start_date, + limit=limit, + offset=offset + ) + + return events + +@router.get("/events/stats") +async def get_security_stats( + days: int = Query(7, ge=1, le=90), + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Get security statistics""" + + stats = security_monitoring_service.get_security_stats(db=db, days=days) + return stats + +@router.post("/events/{event_id}/resolve") +async def resolve_security_event( + event_id: int, + resolution_notes: Optional[str] = None, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Mark a security event as resolved""" + + try: + event = security_monitoring_service.resolve_event( + db=db, + event_id=event_id, + resolved_by=current_user.id, + resolution_notes=resolution_notes + ) + return {"status": "success", "message": "Event resolved", "event_id": event.id} + except ValueError as e: + raise HTTPException(status_code=404, detail=str(e)) + +# IP Whitelist/Blacklist +class IPWhitelistCreate(BaseModel): + ip_address: str + description: Optional[str] = None + +class IPBlacklistCreate(BaseModel): + ip_address: str + reason: Optional[str] = None + blocked_until: Optional[datetime] = None + +@router.post("/ip/whitelist") +async def add_ip_to_whitelist( + data: IPWhitelistCreate, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Add IP address to whitelist""" + + # Check if already exists + existing = db.query(IPWhitelist).filter( + IPWhitelist.ip_address == data.ip_address + ).first() + + if existing: + existing.is_active = True + existing.description = data.description + db.commit() + return {"status": "success", "message": "IP whitelist updated"} + + whitelist = IPWhitelist( + ip_address=data.ip_address, + description=data.description, + created_by=current_user.id + ) + db.add(whitelist) + db.commit() + + return {"status": "success", "message": "IP added to whitelist"} + +@router.delete("/ip/whitelist/{ip_address}") +async def remove_ip_from_whitelist( + ip_address: str, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Remove IP address from whitelist""" + + whitelist = db.query(IPWhitelist).filter( + IPWhitelist.ip_address == ip_address + ).first() + + if not whitelist: + raise HTTPException(status_code=404, detail="IP not found in whitelist") + + whitelist.is_active = False + db.commit() + + return {"status": "success", "message": "IP removed from whitelist"} + +@router.get("/ip/whitelist") +async def get_whitelisted_ips( + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Get all whitelisted IPs""" + + whitelist = db.query(IPWhitelist).filter( + IPWhitelist.is_active == True + ).all() + + return [{"id": w.id, "ip_address": w.ip_address, "description": w.description} for w in whitelist] + +@router.post("/ip/blacklist") +async def add_ip_to_blacklist( + data: IPBlacklistCreate, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Add IP address to blacklist""" + + existing = db.query(IPBlacklist).filter( + IPBlacklist.ip_address == data.ip_address + ).first() + + if existing: + existing.is_active = True + existing.reason = data.reason + existing.blocked_until = data.blocked_until + db.commit() + return {"status": "success", "message": "IP blacklist updated"} + + blacklist = IPBlacklist( + ip_address=data.ip_address, + reason=data.reason, + blocked_until=data.blocked_until, + created_by=current_user.id + ) + db.add(blacklist) + db.commit() + + return {"status": "success", "message": "IP added to blacklist"} + +@router.delete("/ip/blacklist/{ip_address}") +async def remove_ip_from_blacklist( + ip_address: str, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Remove IP address from blacklist""" + + blacklist = db.query(IPBlacklist).filter( + IPBlacklist.ip_address == ip_address + ).first() + + if not blacklist: + raise HTTPException(status_code=404, detail="IP not found in blacklist") + + blacklist.is_active = False + db.commit() + + return {"status": "success", "message": "IP removed from blacklist"} + +@router.get("/ip/blacklist") +async def get_blacklisted_ips( + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Get all blacklisted IPs""" + + blacklist = db.query(IPBlacklist).filter( + IPBlacklist.is_active == True + ).all() + + return [{"id": b.id, "ip_address": b.ip_address, "reason": b.reason, "blocked_until": b.blocked_until} for b in blacklist] + +# OAuth Provider Management +class OAuthProviderCreate(BaseModel): + name: str + display_name: str + client_id: str + client_secret: str + authorization_url: str + token_url: str + userinfo_url: str + scopes: Optional[str] = None + is_active: bool = True + is_sso_enabled: bool = False + +class OAuthProviderUpdate(BaseModel): + display_name: Optional[str] = None + client_id: Optional[str] = None + client_secret: Optional[str] = None + authorization_url: Optional[str] = None + token_url: Optional[str] = None + userinfo_url: Optional[str] = None + scopes: Optional[str] = None + is_active: Optional[bool] = None + is_sso_enabled: Optional[bool] = None + +@router.get("/oauth/providers") +async def get_oauth_providers( + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Get all OAuth providers""" + from ..models.security_event import OAuthProvider + providers = db.query(OAuthProvider).all() + return [{ + "id": p.id, + "name": p.name, + "display_name": p.display_name, + "is_active": p.is_active, + "is_sso_enabled": p.is_sso_enabled, + "created_at": p.created_at.isoformat() if p.created_at else None + } for p in providers] + +@router.post("/oauth/providers") +async def create_oauth_provider( + data: OAuthProviderCreate, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Create a new OAuth provider""" + from ..models.security_event import OAuthProvider + from ..services.encryption_service import encryption_service + + # Encrypt client secret + encrypted_secret = encryption_service.encrypt(data.client_secret) + + provider = OAuthProvider( + name=data.name, + display_name=data.display_name, + client_id=data.client_id, + client_secret=encrypted_secret, + authorization_url=data.authorization_url, + token_url=data.token_url, + userinfo_url=data.userinfo_url, + scopes=data.scopes, + is_active=data.is_active, + is_sso_enabled=data.is_sso_enabled + ) + db.add(provider) + db.commit() + db.refresh(provider) + + return { + "id": provider.id, + "name": provider.name, + "display_name": provider.display_name, + "is_active": provider.is_active, + "is_sso_enabled": provider.is_sso_enabled + } + +@router.put("/oauth/providers/{provider_id}") +async def update_oauth_provider( + provider_id: int, + data: OAuthProviderUpdate, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Update an OAuth provider""" + from ..models.security_event import OAuthProvider + from ..services.encryption_service import encryption_service + + provider = db.query(OAuthProvider).filter(OAuthProvider.id == provider_id).first() + if not provider: + raise HTTPException(status_code=404, detail="OAuth provider not found") + + if data.display_name is not None: + provider.display_name = data.display_name + if data.client_id is not None: + provider.client_id = data.client_id + if data.client_secret is not None: + provider.client_secret = encryption_service.encrypt(data.client_secret) + if data.authorization_url is not None: + provider.authorization_url = data.authorization_url + if data.token_url is not None: + provider.token_url = data.token_url + if data.userinfo_url is not None: + provider.userinfo_url = data.userinfo_url + if data.scopes is not None: + provider.scopes = data.scopes + if data.is_active is not None: + provider.is_active = data.is_active + if data.is_sso_enabled is not None: + provider.is_sso_enabled = data.is_sso_enabled + + db.commit() + db.refresh(provider) + + return { + "id": provider.id, + "name": provider.name, + "display_name": provider.display_name, + "is_active": provider.is_active, + "is_sso_enabled": provider.is_sso_enabled + } + +@router.delete("/oauth/providers/{provider_id}") +async def delete_oauth_provider( + provider_id: int, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Delete an OAuth provider""" + from ..models.security_event import OAuthProvider + + provider = db.query(OAuthProvider).filter(OAuthProvider.id == provider_id).first() + if not provider: + raise HTTPException(status_code=404, detail="OAuth provider not found") + + db.delete(provider) + db.commit() + + return {"status": "success", "message": "OAuth provider deleted"} + +# GDPR Request Management +@router.get("/gdpr/requests") +async def get_gdpr_requests( + status: Optional[str] = Query(None), + request_type: Optional[str] = Query(None), + limit: int = Query(50, ge=1, le=100), + offset: int = Query(0, ge=0), + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Get all GDPR requests""" + from ..models.gdpr_compliance import DataSubjectRequest, DataSubjectRequestStatus, DataSubjectRequestType + + query = db.query(DataSubjectRequest) + + if status: + try: + status_enum = DataSubjectRequestStatus(status) + query = query.filter(DataSubjectRequest.status == status_enum) + except ValueError: + pass + + if request_type: + try: + type_enum = DataSubjectRequestType(request_type) + query = query.filter(DataSubjectRequest.request_type == type_enum) + except ValueError: + pass + + requests = query.order_by(DataSubjectRequest.created_at.desc()).offset(offset).limit(limit).all() + + return [{ + "id": r.id, + "user_id": r.user_id, + "email": r.email, + "request_type": r.request_type.value, + "status": r.status.value, + "description": r.description, + "verified": r.verified, + "verified_at": r.verified_at.isoformat() if r.verified_at else None, + "assigned_to": r.assigned_to, + "completed_at": r.completed_at.isoformat() if r.completed_at else None, + "created_at": r.created_at.isoformat() if r.created_at else None + } for r in requests] + +@router.get("/gdpr/requests/{request_id}") +async def get_gdpr_request( + request_id: int, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Get a specific GDPR request""" + from ..models.gdpr_compliance import DataSubjectRequest + + request = db.query(DataSubjectRequest).filter(DataSubjectRequest.id == request_id).first() + if not request: + raise HTTPException(status_code=404, detail="GDPR request not found") + + return { + "id": request.id, + "user_id": request.user_id, + "email": request.email, + "request_type": request.request_type.value, + "status": request.status.value, + "description": request.description, + "verified": request.verified, + "verified_at": request.verified_at.isoformat() if request.verified_at else None, + "assigned_to": request.assigned_to, + "notes": request.notes, + "response_data": request.response_data, + "completed_at": request.completed_at.isoformat() if request.completed_at else None, + "created_at": request.created_at.isoformat() if request.created_at else None + } + +@router.post("/gdpr/requests/{request_id}/assign") +async def assign_gdpr_request( + request_id: int, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Assign a GDPR request to the current admin""" + from ..models.gdpr_compliance import DataSubjectRequest + + request = db.query(DataSubjectRequest).filter(DataSubjectRequest.id == request_id).first() + if not request: + raise HTTPException(status_code=404, detail="GDPR request not found") + + request.assigned_to = current_user.id + db.commit() + + return {"status": "success", "message": "Request assigned"} + +@router.post("/gdpr/requests/{request_id}/complete") +async def complete_gdpr_request( + request_id: int, + notes: Optional[str] = None, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Mark a GDPR request as completed""" + from ..models.gdpr_compliance import DataSubjectRequest, DataSubjectRequestStatus + + request = db.query(DataSubjectRequest).filter(DataSubjectRequest.id == request_id).first() + if not request: + raise HTTPException(status_code=404, detail="GDPR request not found") + + request.status = DataSubjectRequestStatus.completed + request.completed_at = datetime.utcnow() + request.completed_by = current_user.id + if notes: + request.notes = notes + + db.commit() + + return {"status": "success", "message": "Request completed"} + +# OAuth Routes +@router.get("/oauth/{provider_name}/authorize") +async def oauth_authorize( + provider_name: str, + redirect_uri: str = Query(...), + state: Optional[str] = None, + db: Session = Depends(get_db) +): + """Get OAuth authorization URL""" + if not OAUTH_AVAILABLE: + raise HTTPException(status_code=503, detail="OAuth service is not available. Please install httpx: pip install httpx") + + try: + auth_url = oauth_service.get_authorization_url( + db=db, + provider_name=provider_name, + redirect_uri=redirect_uri, + state=state + ) + return {"authorization_url": auth_url} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + +@router.post("/oauth/{provider_name}/callback") +async def oauth_callback( + provider_name: str, + code: str = Query(...), + redirect_uri: str = Query(...), + state: Optional[str] = None, + db: Session = Depends(get_db) +): + """Handle OAuth callback""" + if not OAUTH_AVAILABLE: + raise HTTPException(status_code=503, detail="OAuth service is not available. Please install httpx: pip install httpx") + + try: + # Exchange code for token + token_data = await oauth_service.exchange_code_for_token( + db=db, + provider_name=provider_name, + code=code, + redirect_uri=redirect_uri + ) + + # Get user info + user_info = await oauth_service.get_user_info( + db=db, + provider_name=provider_name, + access_token=token_data['access_token'] + ) + + # Find or create user + user = oauth_service.find_or_create_user_from_oauth( + db=db, + provider_name=provider_name, + user_info=user_info + ) + + # Save OAuth token + from ..models.security_event import OAuthProvider + provider = db.query(OAuthProvider).filter( + OAuthProvider.name == provider_name + ).first() + + oauth_service.save_oauth_token( + db=db, + user_id=user.id, + provider_id=provider.id, + provider_user_id=user_info.get('sub') or user_info.get('id'), + access_token=token_data['access_token'], + refresh_token=token_data.get('refresh_token'), + expires_in=token_data.get('expires_in'), + scopes=token_data.get('scope') + ) + + # Generate JWT tokens for the user + from ..services.auth_service import auth_service + tokens = auth_service.generate_tokens(user.id) + + return { + "status": "success", + "token": tokens["accessToken"], + "refreshToken": tokens["refreshToken"], + "user": { + "id": user.id, + "email": user.email, + "full_name": user.full_name + } + } + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + +# GDPR Routes +class DataSubjectRequestCreate(BaseModel): + email: EmailStr + request_type: str + description: Optional[str] = None + +@router.post("/gdpr/request") +async def create_data_subject_request( + data: DataSubjectRequestCreate, + request: Request, + db: Session = Depends(get_db) +): + """Create a GDPR data subject request""" + + try: + request_type = DataSubjectRequestType(data.request_type) + except ValueError: + raise HTTPException(status_code=400, detail="Invalid request type") + + try: + gdpr_request = gdpr_service.create_data_subject_request( + db=db, + email=data.email, + request_type=request_type, + description=data.description, + ip_address=request.client.host if request.client else None, + user_agent=request.headers.get("User-Agent") + ) + + return { + "status": "success", + "message": "Request created. Please check your email for verification.", + "verification_token": gdpr_request.verification_token + } + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + +@router.post("/gdpr/verify/{verification_token}") +async def verify_data_subject_request( + verification_token: str, + db: Session = Depends(get_db) +): + """Verify a data subject request""" + + verified = gdpr_service.verify_request(db=db, verification_token=verification_token) + + if not verified: + raise HTTPException(status_code=404, detail="Invalid verification token") + + return {"status": "success", "message": "Request verified"} + +@router.get("/gdpr/data/{user_id}") +async def get_user_data( + user_id: int, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get user data (GDPR access request)""" + # Users can only access their own data, unless admin + if current_user.id != user_id: + # Check if user is admin + from ..models.role import Role + role = db.query(Role).filter(Role.id == current_user.role_id).first() + if not role or role.name != "admin": + raise HTTPException(status_code=403, detail="Access denied") + + try: + data = gdpr_service.get_user_data(db=db, user_id=user_id) + return {"status": "success", "data": data} + except ValueError as e: + raise HTTPException(status_code=404, detail=str(e)) + +@router.delete("/gdpr/data/{user_id}") +async def delete_user_data( + user_id: int, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Delete user data (GDPR erasure request)""" + + success = gdpr_service.delete_user_data(db=db, user_id=user_id) + + if not success: + raise HTTPException(status_code=404, detail="User not found") + + return {"status": "success", "message": "User data deleted"} + +@router.get("/gdpr/export/{user_id}") +async def export_user_data( + user_id: int, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Export user data (GDPR portability request)""" + if current_user.id != user_id: + # Check if user is admin + from ..models.role import Role + role = db.query(Role).filter(Role.id == current_user.role_id).first() + if not role or role.name != "admin": + raise HTTPException(status_code=403, detail="Access denied") + + try: + data = gdpr_service.export_user_data(db=db, user_id=user_id) + return {"status": "success", "data": data} + except ValueError as e: + raise HTTPException(status_code=404, detail=str(e)) + +# Security Scanning +@router.post("/scan/run") +async def run_security_scan( + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Run a manual security scan""" + try: + results = security_scan_service.run_full_scan(db=db) + return {"status": "success", "results": results} + except Exception as e: + import traceback + error_details = traceback.format_exc() + logger.error(f"Security scan failed: {str(e)}\n{error_details}") + raise HTTPException(status_code=500, detail=f"Scan failed: {str(e)}") + +@router.post("/scan/schedule") +async def schedule_security_scan( + interval_hours: int = Query(24, ge=1, le=168), # 1 hour to 1 week + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")) +): + """Schedule automatic security scans""" + try: + schedule = security_scan_service.schedule_scan(db=db, interval_hours=interval_hours) + return {"status": "success", "schedule": schedule} + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to schedule scan: {str(e)}") + diff --git a/Backend/src/routes/service_booking_routes.py b/Backend/src/routes/service_booking_routes.py index 5c9bfb33..937ddcf2 100644 --- a/Backend/src/routes/service_booking_routes.py +++ b/Backend/src/routes/service_booking_routes.py @@ -7,6 +7,7 @@ import random from ..config.database import get_db from ..middleware.auth import get_current_user from ..models.user import User +from ..utils.role_helpers import is_admin from ..models.service import Service from ..models.service_booking import ( ServiceBooking, @@ -212,8 +213,7 @@ async def get_service_booking_by_id( if not booking: raise HTTPException(status_code=404, detail="Service booking not found") - - if booking.user_id != current_user.id and current_user.role_id != 1: + if not is_admin(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail="Forbidden") booking_dict = { @@ -281,10 +281,9 @@ async def create_service_stripe_payment_intent( if not booking: raise HTTPException(status_code=404, detail="Service booking not found") - if booking.user_id != current_user.id and current_user.role_id != 1: + if not is_admin(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail="Forbidden") - if abs(float(booking.total_amount) - amount) > 0.01: raise HTTPException( status_code=400, @@ -341,10 +340,9 @@ async def confirm_service_stripe_payment( if not booking: raise HTTPException(status_code=404, detail="Service booking not found") - if booking.user_id != current_user.id and current_user.role_id != 1: + if not is_admin(current_user, db) and booking.user_id != current_user.id: raise HTTPException(status_code=403, detail="Forbidden") - intent_data = StripeService.retrieve_payment_intent(payment_intent_id, db) if intent_data["status"] != "succeeded": diff --git a/Backend/src/routes/system_settings_routes.py b/Backend/src/routes/system_settings_routes.py index 5e53ec59..5da042b7 100644 --- a/Backend/src/routes/system_settings_routes.py +++ b/Backend/src/routes/system_settings_routes.py @@ -1,4 +1,4 @@ -from fastapi import APIRouter, Depends, HTTPException, status, Request, UploadFile, File +from fastapi import APIRouter, Depends, HTTPException, status, Request, UploadFile, File, Form from sqlalchemy.orm import Session from typing import Optional from datetime import datetime @@ -462,6 +462,366 @@ async def update_paypal_settings( db.rollback() raise HTTPException(status_code=500, detail=str(e)) +@router.get("/borica") +async def get_borica_settings( + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + try: + terminal_id_setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_terminal_id" + ).first() + + merchant_id_setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_merchant_id" + ).first() + + private_key_path_setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_private_key_path" + ).first() + + certificate_path_setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_certificate_path" + ).first() + + gateway_url_setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_gateway_url" + ).first() + + mode_setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_mode" + ).first() + + def mask_key(key_value: str) -> str: + if not key_value or len(key_value) < 4: + return "" + return "*" * (len(key_value) - 4) + key_value[-4:] + + result = { + "borica_terminal_id": "", + "borica_merchant_id": "", + "borica_private_key_path": "", + "borica_certificate_path": "", + "borica_gateway_url": "", + "borica_mode": "test", + "borica_terminal_id_masked": "", + "borica_merchant_id_masked": "", + "has_terminal_id": False, + "has_merchant_id": False, + "has_private_key_path": False, + "has_certificate_path": False, + } + + if terminal_id_setting: + result["borica_terminal_id"] = terminal_id_setting.value + result["borica_terminal_id_masked"] = mask_key(terminal_id_setting.value) if terminal_id_setting.value else "" + result["has_terminal_id"] = bool(terminal_id_setting.value) + result["updated_at"] = terminal_id_setting.updated_at.isoformat() if terminal_id_setting.updated_at else None + result["updated_by"] = terminal_id_setting.updated_by.full_name if terminal_id_setting.updated_by else None + + if merchant_id_setting: + result["borica_merchant_id"] = merchant_id_setting.value + result["borica_merchant_id_masked"] = mask_key(merchant_id_setting.value) if merchant_id_setting.value else "" + result["has_merchant_id"] = bool(merchant_id_setting.value) + + if private_key_path_setting: + result["borica_private_key_path"] = private_key_path_setting.value + result["has_private_key_path"] = bool(private_key_path_setting.value) + + if certificate_path_setting: + result["borica_certificate_path"] = certificate_path_setting.value + result["has_certificate_path"] = bool(certificate_path_setting.value) + + if gateway_url_setting: + result["borica_gateway_url"] = gateway_url_setting.value or "" + + if mode_setting: + result["borica_mode"] = mode_setting.value or "test" + + return { + "status": "success", + "data": result + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.put("/borica") +async def update_borica_settings( + borica_data: dict, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + try: + terminal_id = borica_data.get("borica_terminal_id", "").strip() + merchant_id = borica_data.get("borica_merchant_id", "").strip() + private_key_path = borica_data.get("borica_private_key_path", "").strip() + certificate_path = borica_data.get("borica_certificate_path", "").strip() + gateway_url = borica_data.get("borica_gateway_url", "").strip() + mode = borica_data.get("borica_mode", "test").strip().lower() + + if mode and mode not in ["test", "production"]: + raise HTTPException( + status_code=400, + detail="Invalid Borica mode. Must be 'test' or 'production'" + ) + + if terminal_id: + setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_terminal_id" + ).first() + + if setting: + setting.value = terminal_id + setting.updated_by_id = current_user.id + else: + setting = SystemSettings( + key="borica_terminal_id", + value=terminal_id, + description="Borica Terminal ID for processing payments", + updated_by_id=current_user.id + ) + db.add(setting) + + if merchant_id: + setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_merchant_id" + ).first() + + if setting: + setting.value = merchant_id + setting.updated_by_id = current_user.id + else: + setting = SystemSettings( + key="borica_merchant_id", + value=merchant_id, + description="Borica Merchant ID for processing payments", + updated_by_id=current_user.id + ) + db.add(setting) + + if private_key_path: + setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_private_key_path" + ).first() + + if setting: + setting.value = private_key_path + setting.updated_by_id = current_user.id + else: + setting = SystemSettings( + key="borica_private_key_path", + value=private_key_path, + description="Path to Borica private key file", + updated_by_id=current_user.id + ) + db.add(setting) + + if certificate_path: + setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_certificate_path" + ).first() + + if setting: + setting.value = certificate_path + setting.updated_by_id = current_user.id + else: + setting = SystemSettings( + key="borica_certificate_path", + value=certificate_path, + description="Path to Borica certificate file", + updated_by_id=current_user.id + ) + db.add(setting) + + if gateway_url: + setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_gateway_url" + ).first() + + if setting: + setting.value = gateway_url + setting.updated_by_id = current_user.id + else: + setting = SystemSettings( + key="borica_gateway_url", + value=gateway_url, + description="Borica gateway URL (test or production)", + updated_by_id=current_user.id + ) + db.add(setting) + + if mode: + setting = db.query(SystemSettings).filter( + SystemSettings.key == "borica_mode" + ).first() + + if setting: + setting.value = mode + setting.updated_by_id = current_user.id + else: + setting = SystemSettings( + key="borica_mode", + value=mode, + description="Borica mode: test or production", + updated_by_id=current_user.id + ) + db.add(setting) + + db.commit() + + def mask_key(key_value: str) -> str: + if not key_value or len(key_value) < 4: + return "" + return "*" * (len(key_value) - 4) + key_value[-4:] + + return { + "status": "success", + "message": "Borica settings updated successfully", + "data": { + "borica_terminal_id": terminal_id if terminal_id else "", + "borica_merchant_id": merchant_id if merchant_id else "", + "borica_private_key_path": private_key_path if private_key_path else "", + "borica_certificate_path": certificate_path if certificate_path else "", + "borica_gateway_url": gateway_url if gateway_url else "", + "borica_mode": mode, + "borica_terminal_id_masked": mask_key(terminal_id) if terminal_id else "", + "borica_merchant_id_masked": mask_key(merchant_id) if merchant_id else "", + "has_terminal_id": bool(terminal_id), + "has_merchant_id": bool(merchant_id), + "has_private_key_path": bool(private_key_path), + "has_certificate_path": bool(certificate_path), + } + } + except HTTPException: + raise + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + +@router.post("/borica/upload-certificate") +async def upload_borica_certificate( + file: UploadFile = File(...), + file_type: str = Form("private_key"), # "private_key" or "certificate" + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """ + Upload Borica certificate or private key file. + file_type: "private_key" or "certificate" + """ + try: + if not file: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="No file provided" + ) + + if not file.filename: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Filename is required" + ) + + # Validate file type + allowed_extensions = ['.pem', '.key', '.crt', '.cer', '.p12', '.pfx'] + file_ext = Path(file.filename).suffix.lower() + + if file_ext not in allowed_extensions: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Invalid file type. Allowed extensions: {', '.join(allowed_extensions)}" + ) + + # Validate file_type parameter + if file_type not in ["private_key", "certificate"]: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="file_type must be 'private_key' or 'certificate'" + ) + + # Create upload directory + upload_dir = Path(__file__).parent.parent.parent / "uploads" / "certificates" / "borica" + upload_dir.mkdir(parents=True, exist_ok=True) + + # Generate unique filename + file_type_prefix = "private_key" if file_type == "private_key" else "certificate" + filename = f"borica_{file_type_prefix}_{uuid.uuid4()}{file_ext}" + file_path = upload_dir / filename + + # Read and save file + content = await file.read() + if not content: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="File is empty" + ) + + # Validate file size (max 1MB for certificate files) + max_size = 1024 * 1024 # 1MB + if len(content) > max_size: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"File size exceeds maximum allowed size of {max_size / 1024}KB" + ) + + # Save file + async with aiofiles.open(file_path, 'wb') as f: + await f.write(content) + + # Get absolute path + absolute_path = str(file_path.resolve()) + + # Update system settings with the new path + setting_key = "borica_private_key_path" if file_type == "private_key" else "borica_certificate_path" + + # Delete old file if exists + old_setting = db.query(SystemSettings).filter( + SystemSettings.key == setting_key + ).first() + + if old_setting and old_setting.value: + old_file_path = Path(old_setting.value) + # Only delete if it's in our uploads directory (safety check) + if old_file_path.exists() and str(old_file_path).startswith(str(upload_dir)): + try: + old_file_path.unlink() + except Exception as e: + logger.warning(f"Could not delete old file {old_setting.value}: {e}") + + # Update or create setting + if old_setting: + old_setting.value = absolute_path + old_setting.updated_by_id = current_user.id + else: + setting = SystemSettings( + key=setting_key, + value=absolute_path, + description=f"Path to Borica {file_type.replace('_', ' ')} file", + updated_by_id=current_user.id + ) + db.add(setting) + + db.commit() + + return { + "status": "success", + "message": f"Borica {file_type.replace('_', ' ')} uploaded successfully", + "data": { + "file_path": absolute_path, + "file_type": file_type, + "filename": filename + } + } + except HTTPException: + raise + except Exception as e: + db.rollback() + logger.error(f"Error uploading Borica certificate: {e}", exc_info=True) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Error uploading file: {str(e)}" + ) + @router.get("/smtp") async def get_smtp_settings( current_user: User = Depends(authorize_roles("admin")), diff --git a/Backend/src/routes/task_routes.py b/Backend/src/routes/task_routes.py new file mode 100644 index 00000000..cf3d94a5 --- /dev/null +++ b/Backend/src/routes/task_routes.py @@ -0,0 +1,418 @@ +from fastapi import APIRouter, Depends, HTTPException, Query, Body +from sqlalchemy.orm import Session +from typing import Optional, List, Dict, Any +from ..config.database import get_db +from ..middleware.auth import authorize_roles, get_current_user +from ..models.user import User +from ..models.workflow import TaskStatus, TaskPriority +from ..services.task_service import TaskService +from pydantic import BaseModel +from datetime import datetime + +router = APIRouter(prefix='/tasks', tags=['tasks']) + +# Request/Response Models +class TaskCreate(BaseModel): + title: str + description: Optional[str] = None + task_type: str = 'general' + priority: Optional[str] = 'medium' + workflow_instance_id: Optional[int] = None + booking_id: Optional[int] = None + room_id: Optional[int] = None + assigned_to: Optional[int] = None + due_date: Optional[str] = None + estimated_duration_minutes: Optional[int] = None + metadata: Optional[Dict[str, Any]] = None + +class TaskUpdate(BaseModel): + title: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None + priority: Optional[str] = None + assigned_to: Optional[int] = None + due_date: Optional[str] = None + notes: Optional[str] = None + actual_duration_minutes: Optional[int] = None + +class TaskCommentCreate(BaseModel): + comment: str + +# Task CRUD +@router.post('/') +async def create_task( + task_data: TaskCreate, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Create a new task""" + try: + due_date = None + if task_data.due_date: + try: + due_date = datetime.fromisoformat(task_data.due_date.replace('Z', '+00:00')) + except: + due_date = datetime.strptime(task_data.due_date, '%Y-%m-%dT%H:%M:%S') + + task = TaskService.create_task( + db=db, + title=task_data.title, + created_by=current_user.id, + task_type=task_data.task_type, + description=task_data.description, + priority=TaskPriority(task_data.priority) if task_data.priority else TaskPriority.medium, + workflow_instance_id=task_data.workflow_instance_id, + booking_id=task_data.booking_id, + room_id=task_data.room_id, + assigned_to=task_data.assigned_to, + due_date=due_date, + estimated_duration_minutes=task_data.estimated_duration_minutes, + metadata=task_data.metadata + ) + return {'status': 'success', 'data': { + 'id': task.id, + 'title': task.title, + 'status': task.status.value, + 'priority': task.priority.value, + 'created_at': task.created_at.isoformat() + }} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/') +async def get_tasks( + assigned_to: Optional[int] = Query(None), + created_by: Optional[int] = Query(None), + status: Optional[str] = Query(None), + priority: Optional[str] = Query(None), + task_type: Optional[str] = Query(None), + booking_id: Optional[int] = Query(None), + room_id: Optional[int] = Query(None), + workflow_instance_id: Optional[int] = Query(None), + overdue_only: bool = Query(False), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=1000), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get tasks""" + try: + tasks = TaskService.get_tasks( + db=db, + assigned_to=assigned_to, + created_by=created_by, + status=TaskStatus(status) if status else None, + priority=TaskPriority(priority) if priority else None, + task_type=task_type, + booking_id=booking_id, + room_id=room_id, + workflow_instance_id=workflow_instance_id, + overdue_only=overdue_only, + skip=skip, + limit=limit + ) + + # Build response data safely + result = [] + for t in tasks: + try: + result.append({ + 'id': t.id, + 'title': t.title, + 'description': t.description, + 'task_type': t.task_type, + 'status': t.status.value, + 'priority': t.priority.value, + 'workflow_instance_id': t.workflow_instance_id, + 'booking_id': t.booking_id, + 'room_id': t.room_id, + 'assigned_to': t.assigned_to, + 'assigned_to_name': t.assignee.full_name if t.assignee else None, + 'created_by': t.created_by, + 'due_date': t.due_date.isoformat() if t.due_date else None, + 'completed_at': t.completed_at.isoformat() if t.completed_at else None, + 'estimated_duration_minutes': t.estimated_duration_minutes, + 'actual_duration_minutes': t.actual_duration_minutes, + 'notes': t.notes, + 'created_at': t.created_at.isoformat() if t.created_at else None, + 'updated_at': t.updated_at.isoformat() if t.updated_at else None + }) + except Exception as task_error: + # Log the error for this specific task but continue with others + import logging + logger = logging.getLogger(__name__) + logger.error(f"Error serializing task {t.id}: {str(task_error)}") + continue + + return {'status': 'success', 'data': result} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f"Error in get_tasks: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/my-tasks') +async def get_my_tasks( + status: Optional[str] = Query(None), + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get tasks assigned to current user""" + try: + tasks = TaskService.get_my_tasks( + db=db, + user_id=current_user.id, + status=TaskStatus(status) if status else None + ) + return {'status': 'success', 'data': [{ + 'id': t.id, + 'title': t.title, + 'description': t.description, + 'task_type': t.task_type, + 'status': t.status.value, + 'priority': t.priority.value, + 'due_date': t.due_date.isoformat() if t.due_date else None, + 'created_at': t.created_at.isoformat() + } for t in tasks]} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/{task_id}') +async def get_task( + task_id: int, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get task by ID""" + try: + task = TaskService.get_task_by_id(db, task_id) + if not task: + raise HTTPException(status_code=404, detail='Task not found') + + comments = TaskService.get_task_comments(db, task_id) + + return {'status': 'success', 'data': { + 'id': task.id, + 'title': task.title, + 'description': task.description, + 'task_type': task.task_type, + 'status': task.status.value, + 'priority': task.priority.value, + 'workflow_instance_id': task.workflow_instance_id, + 'booking_id': task.booking_id, + 'room_id': task.room_id, + 'assigned_to': task.assigned_to, + 'assigned_to_name': task.assignee.full_name if task.assignee else None, + 'created_by': task.created_by, + 'created_by_name': task.creator_user.full_name if task.creator_user else None, + 'due_date': task.due_date.isoformat() if task.due_date else None, + 'completed_at': task.completed_at.isoformat() if task.completed_at else None, + 'estimated_duration_minutes': task.estimated_duration_minutes, + 'actual_duration_minutes': task.actual_duration_minutes, + 'notes': task.notes, + 'metadata': task.meta_data, + 'comments': [{ + 'id': c.id, + 'user_id': c.user_id, + 'user_name': c.user.full_name if c.user else None, + 'comment': c.comment, + 'created_at': c.created_at.isoformat() + } for c in comments], + 'created_at': task.created_at.isoformat(), + 'updated_at': task.updated_at.isoformat() + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.put('/{task_id}') +async def update_task( + task_id: int, + task_data: TaskUpdate, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Update task""" + try: + due_date = None + if task_data.due_date: + try: + due_date = datetime.fromisoformat(task_data.due_date.replace('Z', '+00:00')) + except: + due_date = datetime.strptime(task_data.due_date, '%Y-%m-%dT%H:%M:%S') + + task = TaskService.update_task( + db=db, + task_id=task_id, + title=task_data.title, + description=task_data.description, + status=TaskStatus(task_data.status) if task_data.status else None, + priority=TaskPriority(task_data.priority) if task_data.priority else None, + assigned_to=task_data.assigned_to, + due_date=due_date, + notes=task_data.notes, + actual_duration_minutes=task_data.actual_duration_minutes + ) + if not task: + raise HTTPException(status_code=404, detail='Task not found') + + return {'status': 'success', 'data': { + 'id': task.id, + 'status': task.status.value, + 'updated_at': task.updated_at.isoformat() + }} + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/{task_id}/assign') +async def assign_task( + task_id: int, + user_id: int = Body(..., embed=True), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Assign task to a user""" + try: + task = TaskService.assign_task(db, task_id, user_id) + if not task: + raise HTTPException(status_code=404, detail='Task not found') + + return {'status': 'success', 'data': { + 'id': task.id, + 'assigned_to': task.assigned_to, + 'status': task.status.value + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/{task_id}/start') +async def start_task( + task_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Start task (mark as in progress)""" + try: + task = TaskService.start_task(db, task_id) + if not task: + raise HTTPException(status_code=404, detail='Task not found') + + return {'status': 'success', 'data': { + 'id': task.id, + 'status': task.status.value + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/{task_id}/complete') +async def complete_task( + task_id: int, + notes: Optional[str] = Body(None, embed=True), + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Complete task""" + try: + task = TaskService.complete_task(db, task_id, notes) + if not task: + raise HTTPException(status_code=404, detail='Task not found') + + return {'status': 'success', 'data': { + 'id': task.id, + 'status': task.status.value, + 'completed_at': task.completed_at.isoformat() if task.completed_at else None + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/{task_id}/cancel') +async def cancel_task( + task_id: int, + reason: Optional[str] = Body(None, embed=True), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Cancel task""" + try: + task = TaskService.cancel_task(db, task_id, reason) + if not task: + raise HTTPException(status_code=404, detail='Task not found') + + return {'status': 'success', 'data': { + 'id': task.id, + 'status': task.status.value + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/{task_id}/comments') +async def add_task_comment( + task_id: int, + comment_data: TaskCommentCreate, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Add comment to task""" + try: + comment = TaskService.add_task_comment( + db=db, + task_id=task_id, + user_id=current_user.id, + comment=comment_data.comment + ) + return {'status': 'success', 'data': { + 'id': comment.id, + 'user_id': comment.user_id, + 'user_name': comment.user.full_name if comment.user else None, + 'comment': comment.comment, + 'created_at': comment.created_at.isoformat() + }} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/statistics/') +async def get_task_statistics( + assigned_to: Optional[int] = Query(None), + start_date: Optional[str] = Query(None), + end_date: Optional[str] = Query(None), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get task statistics""" + try: + start = None + end = None + if start_date: + start = datetime.fromisoformat(start_date.replace('Z', '+00:00')) + if end_date: + end = datetime.fromisoformat(end_date.replace('Z', '+00:00')) + + stats = TaskService.get_task_statistics( + db=db, + assigned_to=assigned_to or (current_user.id if current_user.role.name != 'admin' else None), + start_date=start, + end_date=end + ) + return {'status': 'success', 'data': stats} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/routes/user_routes.py b/Backend/src/routes/user_routes.py index 7c4ae2ed..a0300fd1 100644 --- a/Backend/src/routes/user_routes.py +++ b/Backend/src/routes/user_routes.py @@ -8,6 +8,8 @@ from ..middleware.auth import get_current_user, authorize_roles from ..models.user import User from ..models.role import Role from ..models.booking import Booking, BookingStatus +from ..utils.role_helpers import can_manage_users +from ..utils.response_helpers import success_response router = APIRouter(prefix='/users', tags=['users']) @router.get('/', dependencies=[Depends(authorize_roles('admin'))]) @@ -30,7 +32,7 @@ async def get_users(search: Optional[str]=Query(None), role: Optional[str]=Query for user in users: user_dict = {'id': user.id, 'email': user.email, 'full_name': user.full_name, 'phone': user.phone, 'phone_number': user.phone, 'address': user.address, 'avatar': user.avatar, 'currency': getattr(user, 'currency', 'VND'), 'is_active': user.is_active, 'status': 'active' if user.is_active else 'inactive', 'role_id': user.role_id, 'role': user.role.name if user.role else 'customer', 'created_at': user.created_at.isoformat() if user.created_at else None, 'updated_at': user.updated_at.isoformat() if user.updated_at else None} result.append(user_dict) - return {'status': 'success', 'data': {'users': result, 'pagination': {'total': total, 'page': page, 'limit': limit, 'totalPages': (total + limit - 1) // limit}}} + return success_response(data={'users': result, 'pagination': {'total': total, 'page': page, 'limit': limit, 'totalPages': (total + limit - 1) // limit}}) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @@ -42,7 +44,7 @@ async def get_user_by_id(id: int, current_user: User=Depends(authorize_roles('ad raise HTTPException(status_code=404, detail='User not found') bookings = db.query(Booking).filter(Booking.user_id == id).order_by(Booking.created_at.desc()).limit(5).all() user_dict = {'id': user.id, 'email': user.email, 'full_name': user.full_name, 'phone': user.phone, 'phone_number': user.phone, 'address': user.address, 'avatar': user.avatar, 'currency': getattr(user, 'currency', 'VND'), 'is_active': user.is_active, 'status': 'active' if user.is_active else 'inactive', 'role_id': user.role_id, 'role': user.role.name if user.role else 'customer', 'created_at': user.created_at.isoformat() if user.created_at else None, 'updated_at': user.updated_at.isoformat() if user.updated_at else None, 'bookings': [{'id': b.id, 'booking_number': b.booking_number, 'status': b.status.value if isinstance(b.status, BookingStatus) else b.status, 'created_at': b.created_at.isoformat() if b.created_at else None} for b in bookings]} - return {'status': 'success', 'data': {'user': user_dict}} + return success_response(data={'user': user_dict}) except HTTPException: raise except Exception as e: @@ -70,7 +72,7 @@ async def create_user(user_data: dict, current_user: User=Depends(authorize_role db.commit() db.refresh(user) user_dict = {'id': user.id, 'email': user.email, 'full_name': user.full_name, 'phone': user.phone, 'phone_number': user.phone, 'currency': getattr(user, 'currency', 'VND'), 'role_id': user.role_id, 'is_active': user.is_active} - return {'status': 'success', 'message': 'User created successfully', 'data': {'user': user_dict}} + return success_response(data={'user': user_dict}, message='User created successfully') except HTTPException: raise except Exception as e: @@ -80,7 +82,7 @@ async def create_user(user_data: dict, current_user: User=Depends(authorize_role @router.put('/{id}') async def update_user(id: int, user_data: dict, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): try: - if current_user.role_id != 1 and current_user.id != id: + if not can_manage_users(current_user, db) and current_user.id != id: raise HTTPException(status_code=403, detail='Forbidden') user = db.query(User).filter(User.id == id).first() if not user: @@ -93,13 +95,13 @@ async def update_user(id: int, user_data: dict, current_user: User=Depends(get_c role_map = {'admin': 1, 'staff': 2, 'customer': 3, 'accountant': 4} if 'full_name' in user_data: user.full_name = user_data['full_name'] - if 'email' in user_data and current_user.role_id == 1: + if 'email' in user_data and can_manage_users(current_user, db): user.email = user_data['email'] if 'phone_number' in user_data: user.phone = user_data['phone_number'] - if 'role' in user_data and current_user.role_id == 1: + if 'role' in user_data and can_manage_users(current_user, db): user.role_id = role_map.get(user_data['role'], 3) - if 'status' in user_data and current_user.role_id == 1: + if 'status' in user_data and can_manage_users(current_user, db): user.is_active = user_data['status'] == 'active' if 'currency' in user_data: currency = user_data['currency'] @@ -112,7 +114,7 @@ async def update_user(id: int, user_data: dict, current_user: User=Depends(get_c db.commit() db.refresh(user) user_dict = {'id': user.id, 'email': user.email, 'full_name': user.full_name, 'phone': user.phone, 'phone_number': user.phone, 'currency': getattr(user, 'currency', 'VND'), 'role_id': user.role_id, 'is_active': user.is_active} - return {'status': 'success', 'message': 'User updated successfully', 'data': {'user': user_dict}} + return success_response(data={'user': user_dict}, message='User updated successfully') except HTTPException: raise except Exception as e: @@ -130,7 +132,7 @@ async def delete_user(id: int, current_user: User=Depends(authorize_roles('admin raise HTTPException(status_code=400, detail='Cannot delete user with active bookings') db.delete(user) db.commit() - return {'status': 'success', 'message': 'User deleted successfully'} + return success_response(message='User deleted successfully') except HTTPException: raise except Exception as e: diff --git a/Backend/src/routes/workflow_routes.py b/Backend/src/routes/workflow_routes.py new file mode 100644 index 00000000..1a1ef555 --- /dev/null +++ b/Backend/src/routes/workflow_routes.py @@ -0,0 +1,314 @@ +from fastapi import APIRouter, Depends, HTTPException, Query, Body +from sqlalchemy.orm import Session +from typing import Optional, List, Dict, Any +from ..config.database import get_db +from ..middleware.auth import authorize_roles, get_current_user +from ..models.user import User +from ..models.workflow import WorkflowType, WorkflowStatus, WorkflowTrigger +from ..services.workflow_service import WorkflowService +from pydantic import BaseModel + +router = APIRouter(prefix='/workflows', tags=['workflows']) + +# Request/Response Models +class WorkflowCreate(BaseModel): + name: str + description: Optional[str] = None + workflow_type: str + trigger: str + steps: List[Dict[str, Any]] + trigger_config: Optional[Dict[str, Any]] = None + sla_hours: Optional[int] = None + +class WorkflowUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + steps: Optional[List[Dict[str, Any]]] = None + status: Optional[str] = None + trigger_config: Optional[Dict[str, Any]] = None + sla_hours: Optional[int] = None + +class WorkflowTriggerRequest(BaseModel): + booking_id: Optional[int] = None + room_id: Optional[int] = None + user_id: Optional[int] = None + metadata: Optional[Dict[str, Any]] = None + +# Workflow CRUD +@router.post('/') +async def create_workflow( + workflow_data: WorkflowCreate, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Create a new workflow""" + try: + workflow = WorkflowService.create_workflow( + db=db, + name=workflow_data.name, + workflow_type=WorkflowType(workflow_data.workflow_type), + trigger=WorkflowTrigger(workflow_data.trigger), + steps=workflow_data.steps, + created_by=current_user.id, + description=workflow_data.description, + trigger_config=workflow_data.trigger_config, + sla_hours=workflow_data.sla_hours + ) + return {'status': 'success', 'data': { + 'id': workflow.id, + 'name': workflow.name, + 'workflow_type': workflow.workflow_type.value, + 'trigger': workflow.trigger.value, + 'status': workflow.status.value, + 'created_at': workflow.created_at.isoformat() + }} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/') +async def get_workflows( + workflow_type: Optional[str] = Query(None), + status: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=1000), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get workflows""" + try: + workflows = WorkflowService.get_workflows( + db=db, + workflow_type=WorkflowType(workflow_type) if workflow_type else None, + status=WorkflowStatus(status) if status else None, + skip=skip, + limit=limit + ) + return {'status': 'success', 'data': [{ + 'id': w.id, + 'name': w.name, + 'description': w.description, + 'workflow_type': w.workflow_type.value, + 'trigger': w.trigger.value, + 'status': w.status.value, + 'sla_hours': w.sla_hours, + 'steps': w.steps, + 'trigger_config': w.trigger_config, + 'created_at': w.created_at.isoformat(), + 'updated_at': w.updated_at.isoformat() + } for w in workflows]} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/{workflow_id}') +async def get_workflow( + workflow_id: int, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get workflow by ID""" + try: + workflow = WorkflowService.get_workflow_by_id(db, workflow_id) + if not workflow: + raise HTTPException(status_code=404, detail='Workflow not found') + + return {'status': 'success', 'data': { + 'id': workflow.id, + 'name': workflow.name, + 'description': workflow.description, + 'workflow_type': workflow.workflow_type.value, + 'trigger': workflow.trigger.value, + 'status': workflow.status.value, + 'sla_hours': workflow.sla_hours, + 'steps': workflow.steps, + 'trigger_config': workflow.trigger_config, + 'created_at': workflow.created_at.isoformat(), + 'updated_at': workflow.updated_at.isoformat() + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.put('/{workflow_id}') +async def update_workflow( + workflow_id: int, + workflow_data: WorkflowUpdate, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Update workflow""" + try: + workflow = WorkflowService.update_workflow( + db=db, + workflow_id=workflow_id, + name=workflow_data.name, + description=workflow_data.description, + steps=workflow_data.steps, + status=WorkflowStatus(workflow_data.status) if workflow_data.status else None, + trigger_config=workflow_data.trigger_config, + sla_hours=workflow_data.sla_hours + ) + if not workflow: + raise HTTPException(status_code=404, detail='Workflow not found') + + return {'status': 'success', 'data': { + 'id': workflow.id, + 'name': workflow.name, + 'status': workflow.status.value, + 'updated_at': workflow.updated_at.isoformat() + }} + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.delete('/{workflow_id}') +async def delete_workflow( + workflow_id: int, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Delete workflow""" + try: + success = WorkflowService.delete_workflow(db, workflow_id) + if not success: + raise HTTPException(status_code=404, detail='Workflow not found') + + return {'status': 'success', 'message': 'Workflow deleted successfully'} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +# Workflow Instances +@router.post('/{workflow_id}/trigger') +async def trigger_workflow( + workflow_id: int, + trigger_data: WorkflowTriggerRequest, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Trigger a workflow""" + try: + instance = WorkflowService.trigger_workflow( + db=db, + workflow_id=workflow_id, + booking_id=trigger_data.booking_id, + room_id=trigger_data.room_id, + user_id=trigger_data.user_id, + metadata=trigger_data.metadata + ) + if not instance: + raise HTTPException(status_code=404, detail='Workflow not found or inactive') + + return {'status': 'success', 'data': { + 'id': instance.id, + 'workflow_id': instance.workflow_id, + 'status': instance.status, + 'started_at': instance.started_at.isoformat(), + 'due_date': instance.due_date.isoformat() if instance.due_date else None + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/instances/') +async def get_workflow_instances( + workflow_id: Optional[int] = Query(None), + booking_id: Optional[int] = Query(None), + status: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=1000), + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get workflow instances""" + try: + instances = WorkflowService.get_workflow_instances( + db=db, + workflow_id=workflow_id, + booking_id=booking_id, + status=status, + skip=skip, + limit=limit + ) + return {'status': 'success', 'data': [{ + 'id': i.id, + 'workflow_id': i.workflow_id, + 'booking_id': i.booking_id, + 'room_id': i.room_id, + 'user_id': i.user_id, + 'status': i.status, + 'started_at': i.started_at.isoformat(), + 'completed_at': i.completed_at.isoformat() if i.completed_at else None, + 'due_date': i.due_date.isoformat() if i.due_date else None + } for i in instances]} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/instances/{instance_id}/complete') +async def complete_workflow_instance( + instance_id: int, + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Complete workflow instance""" + try: + instance = WorkflowService.complete_workflow_instance(db, instance_id) + if not instance: + raise HTTPException(status_code=404, detail='Workflow instance not found') + + return {'status': 'success', 'data': { + 'id': instance.id, + 'status': instance.status, + 'completed_at': instance.completed_at.isoformat() if instance.completed_at else None + }} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +# Predefined workflow types +@router.get('/types/pre-arrival') +async def get_pre_arrival_workflows( + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get pre-arrival workflows""" + try: + workflows = WorkflowService.get_pre_arrival_workflows(db) + return {'status': 'success', 'data': [{ + 'id': w.id, + 'name': w.name, + 'description': w.description, + 'trigger': w.trigger.value, + 'sla_hours': w.sla_hours + } for w in workflows]} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/types/room-preparation') +async def get_room_preparation_workflows( + current_user: User = Depends(authorize_roles('admin', 'staff')), + db: Session = Depends(get_db) +): + """Get room preparation workflows""" + try: + workflows = WorkflowService.get_room_preparation_workflows(db) + return {'status': 'success', 'data': [{ + 'id': w.id, + 'name': w.name, + 'description': w.description, + 'trigger': w.trigger.value, + 'sla_hours': w.sla_hours + } for w in workflows]} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/services/__pycache__/analytics_service.cpython-312.pyc b/Backend/src/services/__pycache__/analytics_service.cpython-312.pyc new file mode 100644 index 00000000..e21e3387 Binary files /dev/null and b/Backend/src/services/__pycache__/analytics_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/borica_service.cpython-312.pyc b/Backend/src/services/__pycache__/borica_service.cpython-312.pyc new file mode 100644 index 00000000..3ee49d46 Binary files /dev/null and b/Backend/src/services/__pycache__/borica_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/email_campaign_service.cpython-312.pyc b/Backend/src/services/__pycache__/email_campaign_service.cpython-312.pyc new file mode 100644 index 00000000..5f48fc38 Binary files /dev/null and b/Backend/src/services/__pycache__/email_campaign_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/encryption_service.cpython-312.pyc b/Backend/src/services/__pycache__/encryption_service.cpython-312.pyc new file mode 100644 index 00000000..d80143f4 Binary files /dev/null and b/Backend/src/services/__pycache__/encryption_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/gdpr_service.cpython-312.pyc b/Backend/src/services/__pycache__/gdpr_service.cpython-312.pyc new file mode 100644 index 00000000..af2e4634 Binary files /dev/null and b/Backend/src/services/__pycache__/gdpr_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/group_booking_service.cpython-312.pyc b/Backend/src/services/__pycache__/group_booking_service.cpython-312.pyc new file mode 100644 index 00000000..c46592a7 Binary files /dev/null and b/Backend/src/services/__pycache__/group_booking_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/guest_profile_service.cpython-312.pyc b/Backend/src/services/__pycache__/guest_profile_service.cpython-312.pyc index 1e7a985a..024f6f93 100644 Binary files a/Backend/src/services/__pycache__/guest_profile_service.cpython-312.pyc and b/Backend/src/services/__pycache__/guest_profile_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/notification_service.cpython-312.pyc b/Backend/src/services/__pycache__/notification_service.cpython-312.pyc new file mode 100644 index 00000000..3135b6b7 Binary files /dev/null and b/Backend/src/services/__pycache__/notification_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/oauth_service.cpython-312.pyc b/Backend/src/services/__pycache__/oauth_service.cpython-312.pyc new file mode 100644 index 00000000..a5dc5bc7 Binary files /dev/null and b/Backend/src/services/__pycache__/oauth_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/room_assignment_service.cpython-312.pyc b/Backend/src/services/__pycache__/room_assignment_service.cpython-312.pyc new file mode 100644 index 00000000..a4873c1b Binary files /dev/null and b/Backend/src/services/__pycache__/room_assignment_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/security_monitoring_service.cpython-312.pyc b/Backend/src/services/__pycache__/security_monitoring_service.cpython-312.pyc new file mode 100644 index 00000000..efc63630 Binary files /dev/null and b/Backend/src/services/__pycache__/security_monitoring_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/security_scan_service.cpython-312.pyc b/Backend/src/services/__pycache__/security_scan_service.cpython-312.pyc new file mode 100644 index 00000000..77c766cd Binary files /dev/null and b/Backend/src/services/__pycache__/security_scan_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/task_service.cpython-312.pyc b/Backend/src/services/__pycache__/task_service.cpython-312.pyc new file mode 100644 index 00000000..0cb4f518 Binary files /dev/null and b/Backend/src/services/__pycache__/task_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/workflow_service.cpython-312.pyc b/Backend/src/services/__pycache__/workflow_service.cpython-312.pyc new file mode 100644 index 00000000..53c95362 Binary files /dev/null and b/Backend/src/services/__pycache__/workflow_service.cpython-312.pyc differ diff --git a/Backend/src/services/analytics_service.py b/Backend/src/services/analytics_service.py new file mode 100644 index 00000000..3bad04cb --- /dev/null +++ b/Backend/src/services/analytics_service.py @@ -0,0 +1,739 @@ +from sqlalchemy.orm import Session, load_only +from sqlalchemy import func, and_, or_, case, extract, distinct +from typing import Optional, Dict, List, Any +from datetime import datetime, timedelta, date +from ..models.booking import Booking, BookingStatus +from ..models.payment import Payment, PaymentStatus, PaymentMethod +from ..models.room import Room, RoomStatus +from ..models.room_type import RoomType +from ..models.user import User +from ..models.service_usage import ServiceUsage +from ..models.service import Service +from ..models.review import Review, ReviewStatus +from ..models.invoice import Invoice +import logging + +logger = logging.getLogger(__name__) + +class AnalyticsService: + """Advanced Analytics & Business Intelligence Service""" + + @staticmethod + def parse_date_range(start_date: Optional[str], end_date: Optional[str]) -> tuple[Optional[datetime], Optional[datetime]]: + """Parse date range strings to datetime objects""" + start = None + end = None + + if start_date: + try: + start = datetime.strptime(start_date, '%Y-%m-%d') + except ValueError: + start = datetime.fromisoformat(start_date.replace('Z', '+00:00')) + + if end_date: + try: + end = datetime.strptime(end_date, '%Y-%m-%d') + end = end.replace(hour=23, minute=59, second=59) + except ValueError: + end = datetime.fromisoformat(end_date.replace('Z', '+00:00')) + + return start, end + + # ==================== REVENUE ANALYTICS ==================== + + @staticmethod + def get_revpar(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Calculate RevPAR (Revenue Per Available Room)""" + total_rooms = db.query(Room).count() + + if not start_date or not end_date: + # Default to last 30 days + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=30) + + days = (end_date - start_date).days + 1 + available_room_nights = total_rooms * days + + # Calculate total revenue from completed payments + revenue_query = db.query(func.sum(Payment.amount)).filter( + Payment.payment_status == PaymentStatus.completed + ) + if start_date: + revenue_query = revenue_query.filter(Payment.payment_date >= start_date) + if end_date: + revenue_query = revenue_query.filter(Payment.payment_date <= end_date) + + total_revenue = revenue_query.scalar() or 0.0 + + revpar = float(total_revenue) / available_room_nights if available_room_nights > 0 else 0.0 + + return { + 'revpar': round(revpar, 2), + 'total_revenue': float(total_revenue), + 'available_room_nights': available_room_nights, + 'period_days': days, + 'total_rooms': total_rooms + } + + @staticmethod + def get_adr(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Calculate ADR (Average Daily Rate)""" + try: + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + query = db.query(Booking).options( + load_only(Booking.id, Booking.check_in_date, Booking.check_out_date, Booking.total_price, Booking.status) + ).filter( + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in, BookingStatus.checked_out]) + ) + + if start_date: + query = query.filter(Booking.check_in_date >= start_date) + if end_date: + query = query.filter(Booking.check_in_date <= end_date) + + bookings = query.all() + + if not bookings: + return { + 'adr': 0.0, + 'period': { + 'start': start_date.isoformat() if start_date else None, + 'end': end_date.isoformat() if end_date else None + } + } + + total_adr = 0.0 + count = 0 + + for booking in bookings: + try: + if booking.check_in_date and booking.check_out_date and booking.total_price: + nights = (booking.check_out_date - booking.check_in_date).days + 1 + if nights > 0: + daily_rate = float(booking.total_price) / nights + total_adr += daily_rate + count += 1 + except Exception as e: + logger.warning(f"Error processing booking {booking.id} for ADR: {str(e)}") + continue + + adr = total_adr / count if count > 0 else 0.0 + + return { + 'adr': round(adr, 2), + 'period': { + 'start': start_date.isoformat() if start_date else None, + 'end': end_date.isoformat() if end_date else None + } + } + except Exception as e: + logger.error(f"Error calculating ADR: {str(e)}") + raise + + @staticmethod + def get_occupancy_rate(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Calculate Occupancy Rate""" + try: + total_rooms = db.query(Room).count() + + if not start_date or not end_date: + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=30) + + if start_date > end_date: + # Swap if dates are reversed + start_date, end_date = end_date, start_date + + days = (end_date - start_date).days + 1 + if days <= 0: + days = 1 + + available_room_nights = total_rooms * days if total_rooms > 0 else 0 + + # Calculate occupied room nights by fetching bookings and calculating in Python + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + bookings_query = db.query(Booking).options( + load_only(Booking.id, Booking.check_in_date, Booking.check_out_date, Booking.status) + ).filter( + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in, BookingStatus.checked_out]) + ) + + # Filter bookings that overlap with the date range + bookings = bookings_query.filter( + and_(Booking.check_in_date <= end_date, Booking.check_out_date >= start_date) + ).all() + + occupied_room_nights = 0 + + for booking in bookings: + try: + if booking.check_in_date and booking.check_out_date: + # Calculate overlap with date range + overlap_start = max(booking.check_in_date, start_date) + overlap_end = min(booking.check_out_date, end_date) + + if overlap_start <= overlap_end: + nights = (overlap_end - overlap_start).days + 1 + if nights > 0: + occupied_room_nights += nights + except Exception as e: + logger.warning(f"Error processing booking {booking.id} for occupancy: {str(e)}") + continue + + occupancy_rate = (occupied_room_nights / available_room_nights * 100) if available_room_nights > 0 else 0.0 + + return { + 'occupancy_rate': round(occupancy_rate, 2), + 'occupied_room_nights': int(occupied_room_nights), + 'available_room_nights': available_room_nights, + 'period_days': days + } + except Exception as e: + logger.error(f"Error calculating occupancy rate: {str(e)}") + raise + + @staticmethod + def get_revenue_forecast(db: Session, forecast_days: int = 30) -> Dict[str, Any]: + """Revenue forecasting based on historical data""" + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=90) # Use last 90 days for forecast + + # Get daily revenue for last 90 days + daily_revenue = db.query( + func.date(Payment.payment_date).label('date'), + func.sum(Payment.amount).label('revenue') + ).filter( + Payment.payment_status == PaymentStatus.completed, + Payment.payment_date >= start_date + ).group_by(func.date(Payment.payment_date)).order_by(func.date(Payment.payment_date)).all() + + if not daily_revenue: + return {'forecast': [], 'average_daily_revenue': 0, 'forecast_period': forecast_days} + + # Calculate average daily revenue + total_revenue = sum(float(rev) for _, rev in daily_revenue) + avg_daily_revenue = total_revenue / len(daily_revenue) if daily_revenue else 0 + + # Simple forecast: use average daily revenue + forecast = [] + for i in range(1, forecast_days + 1): + forecast_date = (end_date + timedelta(days=i)).date() + forecast.append({ + 'date': forecast_date.isoformat(), + 'forecasted_revenue': round(avg_daily_revenue, 2), + 'confidence': 'medium' + }) + + return { + 'forecast': forecast, + 'average_daily_revenue': round(avg_daily_revenue, 2), + 'forecast_period': forecast_days, + 'based_on_days': len(daily_revenue) + } + + @staticmethod + def get_market_penetration(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Market penetration analysis""" + if not start_date or not end_date: + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=30) + + # Get bookings by source/channel (if available) + # For now, we'll analyze by booking creation method + # Use func.count() to avoid loading all columns (including non-existent rate_plan_id) + total_bookings = db.query(Booking).filter( + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).with_entities(func.count(Booking.id)).scalar() or 0 + + # Analyze by room type popularity + room_type_bookings = db.query( + RoomType.name, + func.count(Booking.id).label('bookings'), + func.sum(Payment.amount).label('revenue') + ).join(Room, RoomType.id == Room.room_type_id).join( + Booking, Room.id == Booking.room_id + ).join( + Payment, Booking.id == Payment.booking_id + ).filter( + Payment.payment_status == PaymentStatus.completed, + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).group_by(RoomType.name).all() + + penetration_by_type = [ + { + 'room_type': name, + 'bookings': int(count), + 'revenue': float(revenue or 0), + 'market_share': round((count / total_bookings * 100) if total_bookings > 0 else 0, 2) + } + for name, count, revenue in room_type_bookings + ] + + return { + 'total_bookings': total_bookings, + 'penetration_by_room_type': penetration_by_type, + 'period': { + 'start': start_date.isoformat(), + 'end': end_date.isoformat() + } + } + + # ==================== OPERATIONAL ANALYTICS ==================== + + @staticmethod + def get_staff_performance(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Staff performance metrics""" + if not start_date or not end_date: + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=30) + + # Get staff users + from ..models.role import Role + staff_users = db.query(User).join(Role, User.role_id == Role.id).filter( + or_(Role.name == 'staff', Role.name == 'admin') + ).all() + + performance_metrics = [] + for staff in staff_users: + # Count bookings handled (if we track this) + # For now, we'll use check-ins/check-outs as proxy + checkins = db.query(func.count(Booking.id)).filter( + Booking.status == BookingStatus.checked_in, + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).scalar() or 0 + + performance_metrics.append({ + 'staff_id': staff.id, + 'staff_name': staff.full_name, + 'email': staff.email, + 'check_ins_handled': checkins, + 'performance_score': checkins # Simple metric + }) + + return { + 'staff_performance': performance_metrics, + 'period': { + 'start': start_date.isoformat(), + 'end': end_date.isoformat() + } + } + + @staticmethod + def get_service_usage_analytics(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Service usage analytics""" + query = db.query( + Service.id, + Service.name, + Service.category, + func.count(ServiceUsage.id).label('usage_count'), + func.sum(ServiceUsage.total_price).label('total_revenue'), + func.avg(ServiceUsage.unit_price).label('avg_price') + ).join(ServiceUsage, Service.id == ServiceUsage.service_id) + + if start_date: + query = query.filter(ServiceUsage.usage_date >= start_date) + if end_date: + query = query.filter(ServiceUsage.usage_date <= end_date) + + service_data = query.group_by(Service.id, Service.name, Service.category).all() + + services = [ + { + 'service_id': sid, + 'service_name': name, + 'category': category, + 'usage_count': int(count), + 'total_revenue': float(revenue or 0), + 'average_price': float(avg_price or 0) + } + for sid, name, category, count, revenue, avg_price in service_data + ] + + return { + 'services': services, + 'total_services': len(services), + 'total_usage': sum(s['usage_count'] for s in services), + 'total_revenue': sum(s['total_revenue'] for s in services) + } + + @staticmethod + def get_operational_efficiency(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Operational efficiency metrics""" + if not start_date or not end_date: + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=30) + + # Booking conversion rate + # Use func.count() to avoid loading all columns (including non-existent rate_plan_id) + total_bookings = db.query(Booking).filter( + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).with_entities(func.count(Booking.id)).scalar() or 0 + + confirmed_bookings = db.query(Booking).filter( + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in, BookingStatus.checked_out]), + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).with_entities(func.count(Booking.id)).scalar() or 0 + + conversion_rate = (confirmed_bookings / total_bookings * 100) if total_bookings > 0 else 0 + + # Average booking value + avg_booking_value = db.query(func.avg(Booking.total_price)).filter( + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in, BookingStatus.checked_out]), + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).scalar() or 0.0 + + # Cancellation rate + cancelled_bookings = db.query(Booking).filter( + Booking.status == BookingStatus.cancelled, + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).with_entities(func.count(Booking.id)).scalar() or 0 + + cancellation_rate = (cancelled_bookings / total_bookings * 100) if total_bookings > 0 else 0 + + return { + 'conversion_rate': round(conversion_rate, 2), + 'average_booking_value': round(float(avg_booking_value), 2), + 'cancellation_rate': round(cancellation_rate, 2), + 'total_bookings': total_bookings, + 'confirmed_bookings': confirmed_bookings, + 'cancelled_bookings': cancelled_bookings + } + + # ==================== GUEST ANALYTICS ==================== + + @staticmethod + def get_guest_lifetime_value(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Guest Lifetime Value (LTV) analysis""" + query = db.query( + User.id, + User.full_name, + User.email, + func.count(distinct(Booking.id)).label('total_bookings'), + func.sum(Payment.amount).label('total_spent') + ).join(Booking, User.id == Booking.user_id).join( + Payment, Booking.id == Payment.booking_id + ).filter( + Payment.payment_status == PaymentStatus.completed + ) + + if start_date: + query = query.filter(Payment.payment_date >= start_date) + if end_date: + query = query.filter(Payment.payment_date <= end_date) + + guest_data = query.group_by(User.id, User.full_name, User.email).order_by( + func.sum(Payment.amount).desc() + ).limit(100).all() + + guests = [ + { + 'user_id': uid, + 'name': name, + 'email': email, + 'total_bookings': int(bookings), + 'lifetime_value': float(spent or 0), + 'average_booking_value': round(float(spent or 0) / int(bookings) if bookings > 0 else 0, 2) + } + for uid, name, email, bookings, spent in guest_data + ] + + avg_ltv = sum(g['lifetime_value'] for g in guests) / len(guests) if guests else 0 + + return { + 'guests': guests, + 'average_ltv': round(avg_ltv, 2), + 'total_guests_analyzed': len(guests) + } + + @staticmethod + def get_customer_acquisition_cost(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Customer Acquisition Cost (CAC) analysis""" + if not start_date or not end_date: + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=30) + + # Get new customers (first booking in period) + new_customers = db.query(distinct(Booking.user_id)).filter( + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).subquery() + + # Get first booking date for each user + first_booking_dates = db.query( + Booking.user_id, + func.min(Booking.created_at).label('first_booking') + ).group_by(Booking.user_id).subquery() + + new_customers_in_period = db.query(Booking.user_id).join( + first_booking_dates, + Booking.user_id == first_booking_dates.c.user_id + ).filter( + first_booking_dates.c.first_booking >= start_date, + first_booking_dates.c.first_booking <= end_date + ).distinct().count() + + # For CAC calculation, we'd need marketing spend data + # For now, we'll return the number of new customers + # In a real system, CAC = Marketing Spend / New Customers + + return { + 'new_customers': new_customers_in_period, + 'period': { + 'start': start_date.isoformat(), + 'end': end_date.isoformat() + }, + 'note': 'CAC calculation requires marketing spend data' + } + + @staticmethod + def get_repeat_guest_rate(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Repeat guest rate analysis""" + if not start_date or not end_date: + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=90) + + # Total unique guests + total_guests = db.query(distinct(Booking.user_id)).filter( + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).count() + + # Guests with multiple bookings + repeat_guests = db.query(Booking.user_id).filter( + Booking.created_at >= start_date, + Booking.created_at <= end_date + ).group_by(Booking.user_id).having( + func.count(Booking.id) > 1 + ).count() + + repeat_rate = (repeat_guests / total_guests * 100) if total_guests > 0 else 0 + + return { + 'repeat_guest_rate': round(repeat_rate, 2), + 'total_guests': total_guests, + 'repeat_guests': repeat_guests, + 'one_time_guests': total_guests - repeat_guests + } + + @staticmethod + def get_guest_satisfaction_trends(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Guest satisfaction trends from reviews""" + query = db.query( + func.date(Review.created_at).label('date'), + func.avg(Review.rating).label('avg_rating'), + func.count(Review.id).label('review_count') + ).filter(Review.status == ReviewStatus.approved) + + if start_date: + query = query.filter(Review.created_at >= start_date) + if end_date: + query = query.filter(Review.created_at <= end_date) + + trends = query.group_by(func.date(Review.created_at)).order_by( + func.date(Review.created_at) + ).all() + + satisfaction_data = [ + { + 'date': str(date), + 'average_rating': round(float(avg_rating or 0), 2), + 'review_count': int(count) + } + for date, avg_rating, count in trends + ] + + overall_avg = sum(d['average_rating'] for d in satisfaction_data) / len(satisfaction_data) if satisfaction_data else 0 + + return { + 'trends': satisfaction_data, + 'overall_average_rating': round(overall_avg, 2), + 'total_reviews': sum(d['review_count'] for d in satisfaction_data) + } + + # ==================== FINANCIAL ANALYTICS ==================== + + @staticmethod + def get_profit_loss(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Profit & Loss report""" + if not start_date or not end_date: + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=30) + + # Revenue + total_revenue = db.query(func.sum(Payment.amount)).filter( + Payment.payment_status == PaymentStatus.completed, + Payment.payment_date >= start_date, + Payment.payment_date <= end_date + ).scalar() or 0.0 + + # Refunds + refunds = db.query(func.sum(Payment.amount)).filter( + Payment.payment_status == PaymentStatus.refunded, + Payment.payment_date >= start_date, + Payment.payment_date <= end_date + ).scalar() or 0.0 + + # Service costs (if we track service costs) + # For now, we'll use service revenue as proxy for costs + service_revenue = db.query(func.sum(ServiceUsage.total_price)).filter( + ServiceUsage.usage_date >= start_date, + ServiceUsage.usage_date <= end_date + ).scalar() or 0.0 + + # Net revenue + net_revenue = float(total_revenue) - float(refunds) + + # Gross profit (simplified - would need actual cost data) + gross_profit = net_revenue # Assuming no direct costs tracked + + return { + 'total_revenue': round(float(total_revenue), 2), + 'refunds': round(float(refunds), 2), + 'net_revenue': round(net_revenue, 2), + 'gross_profit': round(gross_profit, 2), + 'period': { + 'start': start_date.isoformat(), + 'end': end_date.isoformat() + } + } + + @staticmethod + def get_payment_method_analytics(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Payment method analytics""" + query = db.query( + Payment.payment_method, + func.count(Payment.id).label('transaction_count'), + func.sum(Payment.amount).label('total_amount'), + func.avg(Payment.amount).label('avg_amount') + ).filter(Payment.payment_status == PaymentStatus.completed) + + if start_date: + query = query.filter(Payment.payment_date >= start_date) + if end_date: + query = query.filter(Payment.payment_date <= end_date) + + method_data = query.group_by(Payment.payment_method).all() + + methods = [ + { + 'payment_method': method.value if hasattr(method, 'value') else str(method), + 'transaction_count': int(count), + 'total_amount': float(total or 0), + 'average_amount': round(float(avg or 0), 2), + 'percentage': 0 # Will calculate below + } + for method, count, total, avg in method_data + ] + + total_amount = sum(m['total_amount'] for m in methods) + for method in methods: + method['percentage'] = round((method['total_amount'] / total_amount * 100) if total_amount > 0 else 0, 2) + + return { + 'payment_methods': methods, + 'total_transactions': sum(m['transaction_count'] for m in methods), + 'total_amount': round(total_amount, 2) + } + + @staticmethod + def get_refund_analysis(db: Session, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None) -> Dict[str, Any]: + """Refund analysis""" + query = db.query( + func.date(Payment.payment_date).label('date'), + func.count(Payment.id).label('refund_count'), + func.sum(Payment.amount).label('refund_amount') + ).filter( + Payment.payment_status == PaymentStatus.refunded + ) + + if start_date: + query = query.filter(Payment.payment_date >= start_date) + if end_date: + query = query.filter(Payment.payment_date <= end_date) + + refund_data = query.group_by(func.date(Payment.payment_date)).order_by( + func.date(Payment.payment_date) + ).all() + + refunds = [ + { + 'date': str(date), + 'refund_count': int(count), + 'refund_amount': float(amount or 0) + } + for date, count, amount in refund_data + ] + + total_refunds = sum(r['refund_amount'] for r in refunds) + total_count = sum(r['refund_count'] for r in refunds) + avg_refund = total_refunds / total_count if total_count > 0 else 0 + + return { + 'refunds': refunds, + 'total_refund_amount': round(total_refunds, 2), + 'total_refund_count': total_count, + 'average_refund_amount': round(avg_refund, 2) + } + + # ==================== COMPREHENSIVE ANALYTICS ==================== + + @staticmethod + def get_comprehensive_analytics( + db: Session, + start_date: Optional[str] = None, + end_date: Optional[str] = None, + include_revenue: bool = True, + include_operational: bool = True, + include_guest: bool = True, + include_financial: bool = True + ) -> Dict[str, Any]: + """Get comprehensive analytics across all categories""" + start, end = AnalyticsService.parse_date_range(start_date, end_date) + + result = { + 'period': { + 'start': start.isoformat() if start else None, + 'end': end.isoformat() if end else None + } + } + + if include_revenue: + result['revenue'] = { + 'revpar': AnalyticsService.get_revpar(db, start, end), + 'adr': AnalyticsService.get_adr(db, start, end), + 'occupancy_rate': AnalyticsService.get_occupancy_rate(db, start, end), + 'revenue_forecast': AnalyticsService.get_revenue_forecast(db), + 'market_penetration': AnalyticsService.get_market_penetration(db, start, end) + } + + if include_operational: + result['operational'] = { + 'staff_performance': AnalyticsService.get_staff_performance(db, start, end), + 'service_usage': AnalyticsService.get_service_usage_analytics(db, start, end), + 'operational_efficiency': AnalyticsService.get_operational_efficiency(db, start, end) + } + + if include_guest: + result['guest'] = { + 'lifetime_value': AnalyticsService.get_guest_lifetime_value(db, start, end), + 'customer_acquisition_cost': AnalyticsService.get_customer_acquisition_cost(db, start, end), + 'repeat_guest_rate': AnalyticsService.get_repeat_guest_rate(db, start, end), + 'satisfaction_trends': AnalyticsService.get_guest_satisfaction_trends(db, start, end) + } + + if include_financial: + result['financial'] = { + 'profit_loss': AnalyticsService.get_profit_loss(db, start, end), + 'payment_methods': AnalyticsService.get_payment_method_analytics(db, start, end), + 'refund_analysis': AnalyticsService.get_refund_analysis(db, start, end) + } + + return result + diff --git a/Backend/src/services/borica_service.py b/Backend/src/services/borica_service.py new file mode 100644 index 00000000..3e732dd8 --- /dev/null +++ b/Backend/src/services/borica_service.py @@ -0,0 +1,388 @@ +import logging +import hashlib +import hmac +import base64 +import urllib.parse +from typing import Optional, Dict, Any +from datetime import datetime +from ..config.settings import settings +from ..models.payment import Payment, PaymentMethod, PaymentType, PaymentStatus +from ..models.booking import Booking, BookingStatus +from ..models.system_settings import SystemSettings +from sqlalchemy.orm import Session +import os + +logger = logging.getLogger(__name__) + +def get_borica_terminal_id(db: Session) -> Optional[str]: + try: + setting = db.query(SystemSettings).filter(SystemSettings.key == 'borica_terminal_id').first() + if setting and setting.value: + return setting.value + except Exception: + pass + return settings.BORICA_TERMINAL_ID if settings.BORICA_TERMINAL_ID else None + +def get_borica_merchant_id(db: Session) -> Optional[str]: + try: + setting = db.query(SystemSettings).filter(SystemSettings.key == 'borica_merchant_id').first() + if setting and setting.value: + return setting.value + except Exception: + pass + return settings.BORICA_MERCHANT_ID if settings.BORICA_MERCHANT_ID else None + +def get_borica_private_key_path(db: Session) -> Optional[str]: + try: + setting = db.query(SystemSettings).filter(SystemSettings.key == 'borica_private_key_path').first() + if setting and setting.value: + return setting.value + except Exception: + pass + return settings.BORICA_PRIVATE_KEY_PATH if settings.BORICA_PRIVATE_KEY_PATH else None + +def get_borica_certificate_path(db: Session) -> Optional[str]: + try: + setting = db.query(SystemSettings).filter(SystemSettings.key == 'borica_certificate_path').first() + if setting and setting.value: + return setting.value + except Exception: + pass + return settings.BORICA_CERTIFICATE_PATH if settings.BORICA_CERTIFICATE_PATH else None + +def get_borica_gateway_url(db: Session) -> str: + try: + setting = db.query(SystemSettings).filter(SystemSettings.key == 'borica_gateway_url').first() + if setting and setting.value: + return setting.value + except Exception: + pass + mode = get_borica_mode(db) + if mode == 'production': + return settings.BORICA_GATEWAY_URL.replace('dev', 'gate') if 'dev' in settings.BORICA_GATEWAY_URL else settings.BORICA_GATEWAY_URL + return settings.BORICA_GATEWAY_URL + +def get_borica_mode(db: Session) -> str: + try: + setting = db.query(SystemSettings).filter(SystemSettings.key == 'borica_mode').first() + if setting and setting.value: + return setting.value + except Exception: + pass + return settings.BORICA_MODE if settings.BORICA_MODE else 'test' + +class BoricaService: + """ + Borica payment gateway service for processing online payments. + Borica is the Bulgarian payment gateway system. + """ + + @staticmethod + def generate_transaction_id(booking_id: int) -> str: + """Generate a unique transaction ID for Borica""" + timestamp = datetime.utcnow().strftime('%Y%m%d%H%M%S') + return f"BOOK{booking_id:06d}{timestamp}" + + @staticmethod + def create_payment_request( + amount: float, + currency: str, + order_id: str, + description: str, + return_url: str, + db: Optional[Session] = None + ) -> Dict[str, Any]: + """ + Create a Borica payment request. + Returns the payment form data that needs to be submitted to Borica gateway. + """ + terminal_id = None + merchant_id = None + gateway_url = None + + if db: + terminal_id = get_borica_terminal_id(db) + merchant_id = get_borica_merchant_id(db) + gateway_url = get_borica_gateway_url(db) + + if not terminal_id: + terminal_id = settings.BORICA_TERMINAL_ID + if not merchant_id: + merchant_id = settings.BORICA_MERCHANT_ID + if not gateway_url: + gateway_url = get_borica_gateway_url(db) if db else settings.BORICA_GATEWAY_URL + + if not terminal_id or not merchant_id: + raise ValueError('Borica Terminal ID and Merchant ID are required') + + # Convert amount to minor units (cents/stotinki) + amount_minor = int(round(amount * 100)) + + # Format amount as string with leading zeros (12 digits) + amount_str = f"{amount_minor:012d}" + + # Create transaction timestamp (YYYYMMDDHHMMSS) + transaction_timestamp = datetime.utcnow().strftime('%Y%m%d%H%M%S') + + # Create order description (max 125 chars) + order_description = description[:125] if description else f"Booking Payment {order_id}" + + # Create signature data string + # Format: TERMINAL=TERMINAL_ID,TRTYPE=1,ORDER=ORDER_ID,AMOUNT=AMOUNT,TIMESTAMP=TIMESTAMP + signature_data = f"TERMINAL={terminal_id},TRTYPE=1,ORDER={order_id},AMOUNT={amount_str},TIMESTAMP={transaction_timestamp}" + + # For test mode, we'll use a simple HMAC signature + # In production, you would use the private key to sign + private_key_path = get_borica_private_key_path(db) if db else settings.BORICA_PRIVATE_KEY_PATH + + # Generate signature (simplified for testing - in production use proper certificate signing) + signature = BoricaService._generate_signature(signature_data, private_key_path) + + return { + 'terminal_id': terminal_id, + 'merchant_id': merchant_id, + 'order_id': order_id, + 'amount': amount_str, + 'currency': currency.upper(), + 'description': order_description, + 'timestamp': transaction_timestamp, + 'signature': signature, + 'gateway_url': gateway_url, + 'return_url': return_url, + 'trtype': '1' # Sale transaction + } + + @staticmethod + def _generate_signature(data: str, private_key_path: Optional[str] = None) -> str: + """ + Generate signature for Borica request. + In production, this should use the actual private key certificate. + For testing, we'll use a simple hash. + """ + if private_key_path and os.path.exists(private_key_path): + try: + # In production, you would load the private key and sign the data + # This is a simplified version for testing + from cryptography.hazmat.primitives import hashes, serialization + from cryptography.hazmat.primitives.asymmetric import padding + from cryptography.hazmat.backends import default_backend + + with open(private_key_path, 'rb') as key_file: + private_key = serialization.load_pem_private_key( + key_file.read(), + password=None, + backend=default_backend() + ) + + signature = private_key.sign( + data.encode('utf-8'), + padding.PKCS1v15(), + hashes.SHA1() + ) + return base64.b64encode(signature).decode('utf-8') + except Exception as e: + logger.warning(f'Failed to sign with private key, using test signature: {e}') + + # Fallback: simple hash for testing + return hashlib.sha256(data.encode('utf-8')).hexdigest()[:40] + + @staticmethod + def verify_response_signature(response_data: Dict[str, Any], db: Optional[Session] = None) -> bool: + """ + Verify the signature of a Borica response. + """ + try: + # Extract signature from response + signature = response_data.get('P_SIGN', '') + if not signature: + return False + + # Reconstruct signature data from response + terminal_id = response_data.get('TERMINAL', '') + trtype = response_data.get('TRTYPE', '') + order_id = response_data.get('ORDER', '') + amount = response_data.get('AMOUNT', '') + timestamp = response_data.get('TIMESTAMP', '') + nonce = response_data.get('NONCE', '') + rcode = response_data.get('RC', '') + + # Build signature string + signature_data = f"TERMINAL={terminal_id},TRTYPE={trtype},ORDER={order_id},AMOUNT={amount},TIMESTAMP={timestamp},NONCE={nonce},RC={rcode}" + + # Verify signature using certificate + certificate_path = get_borica_certificate_path(db) if db else settings.BORICA_CERTIFICATE_PATH + + if certificate_path and os.path.exists(certificate_path): + try: + from cryptography import x509 + from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.asymmetric import padding + + with open(certificate_path, 'rb') as cert_file: + cert = x509.load_pem_x509_certificate( + cert_file.read(), + default_backend() + ) + + public_key = cert.public_key() + signature_bytes = base64.b64decode(signature) + + public_key.verify( + signature_bytes, + signature_data.encode('utf-8'), + padding.PKCS1v15(), + hashes.SHA1() + ) + return True + except Exception as e: + logger.error(f'Signature verification failed: {e}') + return False + + # For testing, accept if signature exists + return bool(signature) + except Exception as e: + logger.error(f'Error verifying signature: {e}') + return False + + @staticmethod + async def confirm_payment( + response_data: Dict[str, Any], + db: Session, + booking_id: Optional[int] = None + ) -> Dict[str, Any]: + """ + Confirm a Borica payment from the response data. + """ + try: + # Verify signature + if not BoricaService.verify_response_signature(response_data, db): + raise ValueError('Invalid payment response signature') + + # Extract response data + order_id = response_data.get('ORDER', '') + amount_str = response_data.get('AMOUNT', '') + rcode = response_data.get('RC', '') + rcode_msg = response_data.get('RCTEXT', '') + + # Convert amount from minor units + amount = float(amount_str) / 100 if amount_str else 0.0 + + # Get booking ID from order_id if not provided + if not booking_id and order_id: + # Extract booking ID from order_id (format: BOOK{booking_id}{timestamp}) + try: + if order_id.startswith('BOOK'): + booking_id = int(order_id[4:10]) + except (ValueError, IndexError): + pass + + if not booking_id: + raise ValueError('Booking ID is required') + + booking = db.query(Booking).filter(Booking.id == booking_id).first() + if not booking: + raise ValueError('Booking not found') + + # Check response code (00 = success) + if rcode != '00': + # Payment failed + payment = db.query(Payment).filter( + Payment.booking_id == booking_id, + Payment.transaction_id == order_id, + Payment.payment_method == PaymentMethod.borica + ).first() + + if payment: + payment.payment_status = PaymentStatus.failed + payment.notes = f'Borica payment failed: {rcode} - {rcode_msg}' + db.commit() + db.refresh(payment) + + raise ValueError(f'Payment failed: {rcode} - {rcode_msg}') + + # Payment successful + payment = db.query(Payment).filter( + Payment.booking_id == booking_id, + Payment.transaction_id == order_id, + Payment.payment_method == PaymentMethod.borica + ).first() + + if not payment: + payment = db.query(Payment).filter( + Payment.booking_id == booking_id, + Payment.payment_method == PaymentMethod.borica, + Payment.payment_status == PaymentStatus.pending + ).order_by(Payment.created_at.desc()).first() + + if payment: + payment.payment_status = PaymentStatus.completed + payment.payment_date = datetime.utcnow() + payment.amount = amount + payment.transaction_id = order_id + payment.notes = f'Borica payment completed: {rcode_msg}' + else: + payment_type = PaymentType.full + if booking.requires_deposit and not booking.deposit_paid: + payment_type = PaymentType.deposit + + payment = Payment( + booking_id=booking_id, + amount=amount, + payment_method=PaymentMethod.borica, + payment_type=payment_type, + payment_status=PaymentStatus.completed, + transaction_id=order_id, + payment_date=datetime.utcnow(), + notes=f'Borica payment completed: {rcode_msg}' + ) + db.add(payment) + + db.commit() + db.refresh(payment) + + # Update booking status if payment completed + if payment.payment_status == PaymentStatus.completed: + db.refresh(booking) + total_paid = sum( + float(p.amount) for p in booking.payments + if p.payment_status == PaymentStatus.completed + ) + + if payment.payment_type == PaymentType.deposit: + booking.deposit_paid = True + if booking.status in [BookingStatus.pending, BookingStatus.cancelled]: + booking.status = BookingStatus.confirmed + elif payment.payment_type == PaymentType.full: + if total_paid >= float(booking.total_price): + if booking.status in [BookingStatus.pending, BookingStatus.cancelled]: + booking.status = BookingStatus.confirmed + + db.commit() + db.refresh(booking) + + def get_enum_value(enum_obj): + if enum_obj is None: + return None + if isinstance(enum_obj, (PaymentMethod, PaymentType, PaymentStatus)): + return enum_obj.value + return enum_obj + + return { + 'id': payment.id, + 'booking_id': payment.booking_id, + 'amount': float(payment.amount) if payment.amount else 0.0, + 'payment_method': get_enum_value(payment.payment_method), + 'payment_type': get_enum_value(payment.payment_type), + 'payment_status': get_enum_value(payment.payment_status), + 'transaction_id': payment.transaction_id, + 'payment_date': payment.payment_date.isoformat() if payment.payment_date else None + } + except ValueError as e: + db.rollback() + raise + except Exception as e: + logger.error(f'Error confirming Borica payment: {e}', exc_info=True) + db.rollback() + raise ValueError(f'Error confirming payment: {str(e)}') + diff --git a/Backend/src/services/email_campaign_service.py b/Backend/src/services/email_campaign_service.py new file mode 100644 index 00000000..0a9d138e --- /dev/null +++ b/Backend/src/services/email_campaign_service.py @@ -0,0 +1,517 @@ +from sqlalchemy.orm import Session +from typing import List, Dict, Any, Optional +from datetime import datetime, timedelta +from ..models.email_campaign import ( + Campaign, CampaignStatus, CampaignType, EmailStatus, + CampaignSegment, EmailTemplate, CampaignEmail, EmailClick, + DripSequence, DripSequenceStep, DripSequenceEnrollment, Unsubscribe +) +from ..models.user import User +from ..models.booking import Booking +from ..config.logging_config import get_logger +from ..utils.mailer import send_email + +logger = get_logger(__name__) + +class EmailCampaignService: + """Service for managing email campaigns""" + + @staticmethod + def create_campaign( + db: Session, + name: str, + subject: str, + html_content: str, + text_content: Optional[str] = None, + campaign_type: CampaignType = CampaignType.newsletter, + segment_id: Optional[int] = None, + scheduled_at: Optional[datetime] = None, + created_by: Optional[int] = None, + **kwargs + ) -> Campaign: + """Create a new email campaign""" + campaign = Campaign( + name=name, + subject=subject, + html_content=html_content, + text_content=text_content, + campaign_type=campaign_type, + segment_id=segment_id, + scheduled_at=scheduled_at, + created_by=created_by, + **kwargs + ) + db.add(campaign) + db.commit() + db.refresh(campaign) + return campaign + + @staticmethod + def get_campaign_recipients( + db: Session, + campaign: Campaign + ) -> List[User]: + """Get list of recipients for a campaign based on segment""" + if campaign.segment_id: + segment = db.query(CampaignSegment).filter( + CampaignSegment.id == campaign.segment_id + ).first() + if segment: + return EmailCampaignService._apply_segment_criteria(db, segment.criteria) + + # If no segment, return all active users (or based on campaign type) + if campaign.campaign_type == CampaignType.newsletter: + return db.query(User).filter(User.is_active == True).all() + + return [] + + @staticmethod + def _apply_segment_criteria(db: Session, criteria: Dict[str, Any]) -> List[User]: + """Apply segment criteria to get matching users""" + query = db.query(User).filter(User.is_active == True) + + # Role filter + if 'role' in criteria: + from ..models.role import Role + role = db.query(Role).filter(Role.name == criteria['role']).first() + if role: + query = query.filter(User.role_id == role.id) + + # Last booking days + if 'last_booking_days' in criteria: + cutoff_date = datetime.utcnow() - timedelta(days=criteria['last_booking_days']) + query = query.join(Booking).filter(Booking.created_at >= cutoff_date) + + # VIP status + if 'is_vip' in criteria: + query = query.filter(User.is_vip == criteria['is_vip']) + + # Has bookings + if 'has_bookings' in criteria: + if criteria['has_bookings']: + query = query.join(Booking).distinct() + else: + query = query.outerjoin(Booking).filter(Booking.id.is_(None)) + + return query.distinct().all() + + @staticmethod + def send_campaign(db: Session, campaign_id: int) -> Dict[str, Any]: + """Send an email campaign""" + campaign = db.query(Campaign).filter(Campaign.id == campaign_id).first() + if not campaign: + raise ValueError("Campaign not found") + + if campaign.status not in [CampaignStatus.draft, CampaignStatus.scheduled]: + raise ValueError(f"Cannot send campaign with status: {campaign.status}") + + # Get recipients + recipients = EmailCampaignService.get_campaign_recipients(db, campaign) + campaign.total_recipients = len(recipients) + + # Update status + campaign.status = CampaignStatus.sending + db.commit() + + sent_count = 0 + failed_count = 0 + + for user in recipients: + # Check if user unsubscribed + if EmailCampaignService._is_unsubscribed(db, user.email, campaign): + continue + + try: + # Create campaign email record + campaign_email = CampaignEmail( + campaign_id=campaign.id, + user_id=user.id, + email=user.email, + status=EmailStatus.pending + ) + db.add(campaign_email) + db.flush() + + # Replace template variables + html_content = EmailCampaignService._replace_variables( + campaign.html_content or '', + user + ) + subject = EmailCampaignService._replace_variables( + campaign.subject, + user + ) + + # Send email (async function) + import asyncio + try: + loop = asyncio.get_event_loop() + except RuntimeError: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + loop.run_until_complete(send_email( + to=user.email, + subject=subject, + html=html_content, + text=campaign.text_content + )) + + campaign_email.status = EmailStatus.sent + campaign_email.sent_at = datetime.utcnow() + sent_count += 1 + + except Exception as e: + logger.error(f"Failed to send email to {user.email}: {str(e)}") + if 'campaign_email' in locals(): + campaign_email.status = EmailStatus.failed + campaign_email.error_message = str(e) + failed_count += 1 + + # Update campaign stats + campaign.total_sent = sent_count + campaign.status = CampaignStatus.sent + campaign.sent_at = datetime.utcnow() + db.commit() + + return { + "sent": sent_count, + "failed": failed_count, + "total": len(recipients) + } + + @staticmethod + def _replace_variables(content: str, user: User) -> str: + """Replace template variables with user data""" + replacements = { + '{{name}}': user.full_name or 'Guest', + '{{email}}': user.email, + '{{first_name}}': user.full_name.split()[0] if user.full_name else 'Guest', + } + + for key, value in replacements.items(): + content = content.replace(key, str(value)) + + return content + + @staticmethod + def _is_unsubscribed(db: Session, email: str, campaign: Optional[Campaign] = None) -> bool: + """Check if email is unsubscribed""" + query = db.query(Unsubscribe).filter(Unsubscribe.email == email) + + # Check for global unsubscribe + global_unsubscribe = query.filter(Unsubscribe.unsubscribe_all == True).first() + if global_unsubscribe: + return True + + # Check for campaign-specific unsubscribe + if campaign: + campaign_unsubscribe = query.filter( + Unsubscribe.campaign_id == campaign.id + ).first() + if campaign_unsubscribe: + return True + + # Check for type-specific unsubscribe + type_unsubscribe = query.filter( + Unsubscribe.unsubscribe_type == campaign.campaign_type.value + ).first() + if type_unsubscribe: + return True + + return False + + @staticmethod + def track_email_open(db: Session, campaign_email_id: int): + """Track email open""" + campaign_email = db.query(CampaignEmail).filter( + CampaignEmail.id == campaign_email_id + ).first() + + if campaign_email and campaign_email.status == EmailStatus.sent: + campaign_email.status = EmailStatus.opened + campaign_email.opened_at = datetime.utcnow() + campaign_email.open_count += 1 + campaign_email.last_opened_at = datetime.utcnow() + + # Update campaign stats + campaign = campaign_email.campaign + campaign.total_opened += 1 + if campaign.total_delivered > 0: + campaign.open_rate = (campaign.total_opened / campaign.total_delivered) * 100 + + db.commit() + + @staticmethod + def track_email_click(db: Session, campaign_email_id: int, url: str, ip_address: Optional[str] = None, user_agent: Optional[str] = None): + """Track email click""" + campaign_email = db.query(CampaignEmail).filter( + CampaignEmail.id == campaign_email_id + ).first() + + if campaign_email: + # Create click record + click = EmailClick( + campaign_email_id=campaign_email_id, + url=url, + ip_address=ip_address, + user_agent=user_agent + ) + db.add(click) + + # Update email status + if campaign_email.status == EmailStatus.opened: + campaign_email.status = EmailStatus.clicked + campaign_email.clicked_at = datetime.utcnow() + + campaign_email.click_count += 1 + campaign_email.last_clicked_at = datetime.utcnow() + + # Update campaign stats + campaign = campaign_email.campaign + campaign.total_clicked += 1 + if campaign.total_opened > 0: + campaign.click_rate = (campaign.total_clicked / campaign.total_opened) * 100 + + db.commit() + + @staticmethod + def create_segment( + db: Session, + name: str, + criteria: Dict[str, Any], + description: Optional[str] = None, + created_by: Optional[int] = None + ) -> CampaignSegment: + """Create a new campaign segment""" + segment = CampaignSegment( + name=name, + description=description, + criteria=criteria, + created_by=created_by + ) + db.add(segment) + db.commit() + db.refresh(segment) + + # Calculate estimated count + EmailCampaignService._calculate_segment_count(db, segment) + + return segment + + @staticmethod + def _calculate_segment_count(db: Session, segment: CampaignSegment): + """Calculate and update segment estimated count""" + users = EmailCampaignService._apply_segment_criteria(db, segment.criteria) + segment.estimated_count = len(users) + segment.last_calculated_at = datetime.utcnow() + db.commit() + + @staticmethod + def create_drip_sequence( + db: Session, + name: str, + trigger_event: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[int] = None + ) -> DripSequence: + """Create a new drip sequence""" + sequence = DripSequence( + name=name, + description=description, + trigger_event=trigger_event, + created_by=created_by + ) + db.add(sequence) + db.commit() + db.refresh(sequence) + return sequence + + @staticmethod + def add_drip_step( + db: Session, + sequence_id: int, + subject: str, + html_content: str, + delay_days: int = 0, + delay_hours: int = 0, + step_order: Optional[int] = None, + text_content: Optional[str] = None, + template_id: Optional[int] = None + ) -> DripSequenceStep: + """Add a step to a drip sequence""" + if step_order is None: + # Get next step order + last_step = db.query(DripSequenceStep).filter( + DripSequenceStep.sequence_id == sequence_id + ).order_by(DripSequenceStep.step_order.desc()).first() + step_order = (last_step.step_order + 1) if last_step else 1 + + step = DripSequenceStep( + sequence_id=sequence_id, + step_order=step_order, + subject=subject, + html_content=html_content, + text_content=text_content, + template_id=template_id, + delay_days=delay_days, + delay_hours=delay_hours + ) + db.add(step) + db.commit() + db.refresh(step) + return step + + @staticmethod + def enroll_user_in_drip( + db: Session, + sequence_id: int, + user_id: int, + trigger_data: Optional[Dict[str, Any]] = None + ) -> DripSequenceEnrollment: + """Enroll a user in a drip sequence""" + # Check if already enrolled + existing = db.query(DripSequenceEnrollment).filter( + DripSequenceEnrollment.sequence_id == sequence_id, + DripSequenceEnrollment.user_id == user_id, + DripSequenceEnrollment.completed == False + ).first() + + if existing: + return existing + + sequence = db.query(DripSequence).filter(DripSequence.id == sequence_id).first() + if not sequence: + raise ValueError("Drip sequence not found") + + # Get first step + first_step = db.query(DripSequenceStep).filter( + DripSequenceStep.sequence_id == sequence_id, + DripSequenceStep.step_order == 1 + ).first() + + if not first_step: + raise ValueError("Drip sequence has no steps") + + # Calculate next send time + next_send_at = datetime.utcnow() + timedelta(days=first_step.delay_days, hours=first_step.delay_hours) + + enrollment = DripSequenceEnrollment( + sequence_id=sequence_id, + user_id=user_id, + current_step=0, + next_send_at=next_send_at, + trigger_data=trigger_data + ) + db.add(enrollment) + db.commit() + db.refresh(enrollment) + return enrollment + + @staticmethod + def process_drip_sequences(db: Session): + """Process drip sequences and send pending emails""" + # Get enrollments ready to send + enrollments = db.query(DripSequenceEnrollment).filter( + DripSequenceEnrollment.completed == False, + DripSequenceEnrollment.next_send_at <= datetime.utcnow() + ).all() + + for enrollment in enrollments: + try: + sequence = enrollment.sequence + if not sequence.is_active: + continue + + # Get next step + next_step_order = enrollment.current_step + 1 + step = db.query(DripSequenceStep).filter( + DripSequenceStep.sequence_id == sequence.id, + DripSequenceStep.step_order == next_step_order, + DripSequenceStep.is_active == True + ).first() + + if not step: + # Sequence completed + enrollment.completed = True + enrollment.completed_at = datetime.utcnow() + db.commit() + continue + + # Get user + user = db.query(User).filter(User.id == enrollment.user_id).first() + if not user or not user.is_active: + continue + + # Check unsubscribe + if EmailCampaignService._is_unsubscribed(db, user.email): + enrollment.completed = True + db.commit() + continue + + # Send email + html_content = EmailCampaignService._replace_variables(step.html_content, user) + subject = EmailCampaignService._replace_variables(step.subject, user) + + # Send email (async function) + import asyncio + try: + loop = asyncio.get_event_loop() + except RuntimeError: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + loop.run_until_complete(send_email( + to=user.email, + subject=subject, + html=html_content, + text=step.text_content + )) + + # Update enrollment + enrollment.current_step = next_step_order + + # Calculate next send time + next_step = db.query(DripSequenceStep).filter( + DripSequenceStep.sequence_id == sequence.id, + DripSequenceStep.step_order == next_step_order + 1 + ).first() + + if next_step: + enrollment.next_send_at = datetime.utcnow() + timedelta( + days=next_step.delay_days, + hours=next_step.delay_hours + ) + else: + enrollment.completed = True + enrollment.completed_at = datetime.utcnow() + + db.commit() + + except Exception as e: + logger.error(f"Error processing drip enrollment {enrollment.id}: {str(e)}") + db.rollback() + + @staticmethod + def handle_abandoned_booking(db: Session, booking_id: int): + """Handle abandoned booking recovery email""" + booking = db.query(Booking).filter(Booking.id == booking_id).first() + if not booking: + return + + # Find abandoned booking recovery sequence + sequence = db.query(DripSequence).filter( + DripSequence.trigger_event == 'checkout_abandoned', + DripSequence.is_active == True + ).first() + + if sequence and booking.user_id: + EmailCampaignService.enroll_user_in_drip( + db=db, + sequence_id=sequence.id, + user_id=booking.user_id, + trigger_data={"booking_id": booking_id} + ) + +email_campaign_service = EmailCampaignService() + diff --git a/Backend/src/services/encryption_service.py b/Backend/src/services/encryption_service.py new file mode 100644 index 00000000..2d254b7f --- /dev/null +++ b/Backend/src/services/encryption_service.py @@ -0,0 +1,89 @@ +from cryptography.fernet import Fernet +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC +from cryptography.hazmat.backends import default_backend +import base64 +import os +from typing import Optional +import logging +from ..config.settings import settings + +logger = logging.getLogger(__name__) + +class EncryptionService: + """Service for data encryption at rest""" + + def __init__(self, encryption_key: Optional[str] = None): + """ + Initialize encryption service + + Args: + encryption_key: Base64-encoded encryption key. If not provided, will use ENCRYPTION_KEY from settings or env var + """ + if encryption_key: + self.key = encryption_key.encode() + else: + # Try to get key from settings first (loads from .env), then fall back to os.getenv + key_str = getattr(settings, 'ENCRYPTION_KEY', None) or os.getenv('ENCRYPTION_KEY') + if not key_str or key_str.strip() == '': + # Generate a key if not provided (for development only) + logger.warning("ENCRYPTION_KEY not set. Generating temporary key. This should be set in production!") + key = Fernet.generate_key() + self.key = key + else: + self.key = key_str.encode() + + try: + self.cipher = Fernet(self.key) + except Exception as e: + logger.error(f"Failed to initialize encryption: {str(e)}") + raise + + def encrypt(self, data: str) -> str: + """Encrypt a string""" + try: + if not data: + return data + encrypted = self.cipher.encrypt(data.encode()) + return base64.urlsafe_b64encode(encrypted).decode() + except Exception as e: + logger.error(f"Encryption failed: {str(e)}") + raise + + def decrypt(self, encrypted_data: str) -> str: + """Decrypt a string""" + try: + if not encrypted_data: + return encrypted_data + decoded = base64.urlsafe_b64decode(encrypted_data.encode()) + decrypted = self.cipher.decrypt(decoded) + return decrypted.decode() + except Exception as e: + logger.error(f"Decryption failed: {str(e)}") + raise + + def encrypt_dict(self, data: dict) -> dict: + """Encrypt sensitive fields in a dictionary""" + encrypted = {} + sensitive_fields = ['password', 'token', 'secret', 'key', 'api_key', 'access_token', 'refresh_token'] + + for key, value in data.items(): + if any(sensitive in key.lower() for sensitive in sensitive_fields): + if isinstance(value, str): + encrypted[key] = self.encrypt(value) + else: + encrypted[key] = value + else: + encrypted[key] = value + + return encrypted + + @staticmethod + def generate_key() -> str: + """Generate a new encryption key""" + key = Fernet.generate_key() + return key.decode() + +# Global instance +encryption_service = EncryptionService() + diff --git a/Backend/src/services/gdpr_service.py b/Backend/src/services/gdpr_service.py new file mode 100644 index 00000000..fec41425 --- /dev/null +++ b/Backend/src/services/gdpr_service.py @@ -0,0 +1,215 @@ +from sqlalchemy.orm import Session +from typing import Optional, Dict, Any, List +from datetime import datetime +import secrets +import logging + +from ..models.gdpr_compliance import ( + DataSubjectRequest, + DataSubjectRequestType, + DataSubjectRequestStatus, + DataRetentionPolicy, + ConsentRecord +) +from ..models.user import User +from ..models.booking import Booking +from ..models.payment import Payment +from ..models.review import Review +from ..config.logging_config import get_logger + +logger = get_logger(__name__) + +class GDPRService: + """Service for GDPR compliance operations""" + + @staticmethod + def create_data_subject_request( + db: Session, + email: str, + request_type: DataSubjectRequestType, + description: Optional[str] = None, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None + ) -> DataSubjectRequest: + """Create a new data subject request""" + # Find user by email + user = db.query(User).filter(User.email == email.lower()).first() + + # Generate verification token + verification_token = secrets.token_urlsafe(32) + + request = DataSubjectRequest( + user_id=user.id if user else None, + email=email.lower(), + request_type=request_type, + status=DataSubjectRequestStatus.pending, + description=description, + verification_token=verification_token, + ip_address=ip_address, + user_agent=user_agent + ) + + db.add(request) + db.commit() + db.refresh(request) + + logger.info(f"Data subject request created: {request_type.value} for {email}") + return request + + @staticmethod + def verify_request(db: Session, verification_token: str) -> bool: + """Verify a data subject request""" + request = db.query(DataSubjectRequest).filter( + DataSubjectRequest.verification_token == verification_token + ).first() + + if not request: + return False + + request.verified = True + request.verified_at = datetime.utcnow() + db.commit() + + return True + + @staticmethod + def get_user_data(db: Session, user_id: int) -> Dict[str, Any]: + """Get all data for a user (for access request)""" + user = db.query(User).filter(User.id == user_id).first() + if not user: + raise ValueError("User not found") + + # Collect all user data + data = { + "user": { + "id": user.id, + "email": user.email, + "full_name": user.full_name, + "phone": user.phone, + "created_at": user.created_at.isoformat() if user.created_at else None, + }, + "bookings": [], + "payments": [], + "reviews": [], + } + + # Get bookings + bookings = db.query(Booking).filter(Booking.user_id == user_id).all() + for booking in bookings: + data["bookings"].append({ + "id": booking.id, + "booking_number": booking.booking_number, + "check_in_date": booking.check_in_date.isoformat() if booking.check_in_date else None, + "check_out_date": booking.check_out_date.isoformat() if booking.check_out_date else None, + "total_price": float(booking.total_price) if booking.total_price else None, + "status": booking.status.value if hasattr(booking.status, 'value') else booking.status, + }) + + # Get payments + payments = db.query(Payment).filter(Payment.booking.has(user_id=user_id)).all() + for payment in payments: + data["payments"].append({ + "id": payment.id, + "amount": float(payment.amount) if payment.amount else None, + "payment_method": payment.payment_method, + "payment_status": payment.payment_status, + "payment_date": payment.payment_date.isoformat() if payment.payment_date else None, + }) + + # Get reviews + reviews = db.query(Review).filter(Review.user_id == user_id).all() + for review in reviews: + data["reviews"].append({ + "id": review.id, + "rating": review.rating, + "comment": review.comment, + "created_at": review.created_at.isoformat() if review.created_at else None, + }) + + return data + + @staticmethod + def delete_user_data(db: Session, user_id: int) -> bool: + """Delete all user data (for erasure request)""" + try: + user = db.query(User).filter(User.id == user_id).first() + if not user: + return False + + # Anonymize user data instead of deleting (for audit trail) + user.email = f"deleted_{user.id}@deleted.local" + user.full_name = "Deleted User" + user.phone = None + user.password = "deleted" # Invalidate password + + # Delete related data + # Note: In production, you might want to soft-delete or anonymize instead + db.query(Booking).filter(Booking.user_id == user_id).delete() + db.query(Review).filter(Review.user_id == user_id).delete() + + db.commit() + logger.info(f"User data deleted/anonymized for user {user_id}") + return True + except Exception as e: + logger.error(f"Error deleting user data: {str(e)}") + db.rollback() + return False + + @staticmethod + def export_user_data(db: Session, user_id: int) -> Dict[str, Any]: + """Export user data in portable format (for portability request)""" + return GDPRService.get_user_data(db, user_id) + + @staticmethod + def record_consent( + db: Session, + user_id: int, + consent_type: str, + granted: bool, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None, + version: Optional[str] = None + ) -> ConsentRecord: + """Record user consent""" + # Revoke previous consent if granting new one + if granted: + previous = db.query(ConsentRecord).filter( + ConsentRecord.user_id == user_id, + ConsentRecord.consent_type == consent_type, + ConsentRecord.revoked_at.is_(None) + ).first() + + if previous: + previous.revoked_at = datetime.utcnow() + + consent = ConsentRecord( + user_id=user_id, + consent_type=consent_type, + granted=granted, + granted_at=datetime.utcnow() if granted else None, + revoked_at=datetime.utcnow() if not granted else None, + ip_address=ip_address, + user_agent=user_agent, + version=version + ) + + db.add(consent) + db.commit() + db.refresh(consent) + + return consent + + @staticmethod + def check_consent(db: Session, user_id: int, consent_type: str) -> bool: + """Check if user has granted consent""" + consent = db.query(ConsentRecord).filter( + ConsentRecord.user_id == user_id, + ConsentRecord.consent_type == consent_type, + ConsentRecord.granted == True, + ConsentRecord.revoked_at.is_(None) + ).order_by(ConsentRecord.granted_at.desc()).first() + + return consent is not None + +gdpr_service = GDPRService() + diff --git a/Backend/src/services/group_booking_service.py b/Backend/src/services/group_booking_service.py new file mode 100644 index 00000000..a736a7b9 --- /dev/null +++ b/Backend/src/services/group_booking_service.py @@ -0,0 +1,574 @@ +from sqlalchemy.orm import Session +from datetime import datetime, timedelta +from typing import Optional, List, Dict, Any +import random +import string +from decimal import Decimal +from ..models.group_booking import ( + GroupBooking, GroupBookingMember, GroupRoomBlock, GroupPayment, + GroupBookingStatus, PaymentOption +) +from ..models.booking import Booking, BookingStatus +from ..models.room import Room, RoomStatus +from ..models.room_type import RoomType +from ..models.user import User +from ..models.payment import Payment, PaymentStatus, PaymentMethod +import logging + +logger = logging.getLogger(__name__) + +class GroupBookingService: + + @staticmethod + def generate_group_booking_number(db: Session) -> str: + """Generate unique group booking number""" + max_attempts = 10 + for _ in range(max_attempts): + timestamp = datetime.utcnow().strftime('%Y%m%d') + random_suffix = ''.join(random.choices(string.ascii_uppercase + string.digits, k=6)) + booking_number = f"GRP-{timestamp}-{random_suffix}" + + existing = db.query(GroupBooking).filter( + GroupBooking.group_booking_number == booking_number + ).first() + + if not existing: + return booking_number + + # Fallback + return f"GRP-{int(datetime.utcnow().timestamp())}" + + @staticmethod + def calculate_group_discount( + total_rooms: int, + base_rate: Decimal, + discount_percentage: Optional[float] = None + ) -> Dict[str, Any]: + """Calculate group discount based on number of rooms""" + original_total = base_rate * total_rooms + + # Default discount tiers + if discount_percentage is None: + if total_rooms >= 20: + discount_percentage = 15.0 + elif total_rooms >= 10: + discount_percentage = 10.0 + elif total_rooms >= 5: + discount_percentage = 5.0 + else: + discount_percentage = 0.0 + + discount_amount = original_total * Decimal(str(discount_percentage)) / Decimal('100') + total_price = original_total - discount_amount + + return { + 'original_total': float(original_total), + 'discount_percentage': discount_percentage, + 'discount_amount': float(discount_amount), + 'total_price': float(total_price) + } + + @staticmethod + def check_room_availability( + db: Session, + room_type_id: int, + check_in: datetime, + check_out: datetime, + num_rooms: int + ) -> Dict[str, Any]: + """Check if enough rooms are available for blocking""" + # Get all rooms of this type + rooms = db.query(Room).filter(Room.room_type_id == room_type_id).all() + + if len(rooms) < num_rooms: + return { + 'available': False, + 'available_count': len(rooms), + 'required_count': num_rooms, + 'message': f'Only {len(rooms)} rooms available, {num_rooms} required' + } + + # Check for conflicting bookings + available_rooms = [] + for room in rooms: + # Check if room has any bookings during the period + conflicting_bookings = db.query(Booking).filter( + Booking.room_id == room.id, + Booking.status.in_([BookingStatus.confirmed, BookingStatus.checked_in]), + Booking.check_in_date < check_out, + Booking.check_out_date > check_in + ).count() + + # Check for other group blocks + conflicting_blocks = db.query(GroupRoomBlock).join(GroupBooking).filter( + GroupRoomBlock.room_type_id == room_type_id, + GroupBooking.status.in_([ + GroupBookingStatus.confirmed, + GroupBookingStatus.partially_confirmed, + GroupBookingStatus.checked_in + ]), + GroupBooking.check_in_date < check_out, + GroupBooking.check_out_date > check_in, + GroupRoomBlock.is_active == True + ).count() + + if conflicting_bookings == 0 and conflicting_blocks == 0: + available_rooms.append(room) + + if len(available_rooms) < num_rooms: + return { + 'available': False, + 'available_count': len(available_rooms), + 'required_count': num_rooms, + 'message': f'Only {len(available_rooms)} rooms available for the selected dates' + } + + return { + 'available': True, + 'available_count': len(available_rooms), + 'required_count': num_rooms + } + + @staticmethod + def create_group_booking( + db: Session, + coordinator_id: int, + coordinator_name: str, + coordinator_email: str, + coordinator_phone: Optional[str], + check_in_date: datetime, + check_out_date: datetime, + room_blocks: List[Dict[str, Any]], + group_name: Optional[str] = None, + group_type: Optional[str] = None, + payment_option: PaymentOption = PaymentOption.coordinator_pays_all, + deposit_required: bool = False, + deposit_percentage: Optional[int] = None, + special_requests: Optional[str] = None, + notes: Optional[str] = None, + cancellation_policy: Optional[str] = None, + cancellation_deadline: Optional[datetime] = None, + cancellation_penalty_percentage: Optional[float] = None, + group_discount_percentage: Optional[float] = None + ) -> GroupBooking: + """Create a new group booking with room blocks""" + + # Validate dates + if check_out_date <= check_in_date: + raise ValueError("Check-out date must be after check-in date") + + # Calculate total rooms and base pricing + total_rooms = sum(block.get('num_rooms', 0) for block in room_blocks) + if total_rooms == 0: + raise ValueError("At least one room must be blocked") + + # Calculate pricing for each room block + total_original_price = Decimal('0') + room_block_objects = [] + + for block_data in room_blocks: + room_type_id = block_data.get('room_type_id') + num_rooms = block_data.get('num_rooms', 0) + rate_per_room = Decimal(str(block_data.get('rate_per_room', 0))) + + if not room_type_id or num_rooms <= 0: + continue + + # Check availability + availability = GroupBookingService.check_room_availability( + db, room_type_id, check_in_date, check_out_date, num_rooms + ) + + if not availability['available']: + raise ValueError(availability.get('message', 'Rooms not available')) + + # Get room type + room_type = db.query(RoomType).filter(RoomType.id == room_type_id).first() + if not room_type: + raise ValueError(f"Room type {room_type_id} not found") + + block_total = rate_per_room * num_rooms + total_original_price += block_total + + # Create room block object (will be saved later) + room_block = GroupRoomBlock( + room_type_id=room_type_id, + rooms_blocked=num_rooms, + rooms_confirmed=0, + rooms_available=num_rooms, + rate_per_room=rate_per_room, + total_block_price=block_total, + is_active=True + ) + room_block_objects.append(room_block) + + # Calculate group discount + base_rate = total_original_price / total_rooms if total_rooms > 0 else Decimal('0') + pricing = GroupBookingService.calculate_group_discount( + total_rooms, base_rate, group_discount_percentage + ) + + # Calculate deposit + deposit_amount = None + if deposit_required: + if deposit_percentage: + deposit_amount = Decimal(str(pricing['total_price'])) * Decimal(str(deposit_percentage)) / Decimal('100') + else: + deposit_amount = Decimal(str(pricing['total_price'])) * Decimal('0.2') # Default 20% + + # Create group booking + group_booking_number = GroupBookingService.generate_group_booking_number(db) + + group_booking = GroupBooking( + group_booking_number=group_booking_number, + coordinator_id=coordinator_id, + coordinator_name=coordinator_name, + coordinator_email=coordinator_email, + coordinator_phone=coordinator_phone, + group_name=group_name, + group_type=group_type, + total_rooms=total_rooms, + total_guests=0, # Will be updated when members are added + check_in_date=check_in_date, + check_out_date=check_out_date, + base_rate_per_room=base_rate, + group_discount_percentage=Decimal(str(pricing['discount_percentage'])), + group_discount_amount=Decimal(str(pricing['discount_amount'])), + original_total_price=Decimal(str(pricing['original_total'])), + discount_amount=Decimal(str(pricing['discount_amount'])), + total_price=Decimal(str(pricing['total_price'])), + payment_option=payment_option, + deposit_required=deposit_required, + deposit_percentage=deposit_percentage, + deposit_amount=deposit_amount, + amount_paid=Decimal('0'), + balance_due=Decimal(str(pricing['total_price'])), + status=GroupBookingStatus.draft, + cancellation_policy=cancellation_policy, + cancellation_deadline=cancellation_deadline, + cancellation_penalty_percentage=Decimal(str(cancellation_penalty_percentage)) if cancellation_penalty_percentage else None, + special_requests=special_requests, + notes=notes + ) + + db.add(group_booking) + db.flush() + + # Add room blocks + for room_block in room_block_objects: + room_block.group_booking_id = group_booking.id + db.add(room_block) + + db.commit() + db.refresh(group_booking) + + return group_booking + + @staticmethod + def add_member_to_group( + db: Session, + group_booking_id: int, + full_name: str, + email: Optional[str] = None, + phone: Optional[str] = None, + user_id: Optional[int] = None, + room_block_id: Optional[int] = None, + special_requests: Optional[str] = None, + preferences: Optional[Dict[str, Any]] = None + ) -> GroupBookingMember: + """Add a member to a group booking""" + + group_booking = db.query(GroupBooking).filter( + GroupBooking.id == group_booking_id + ).first() + + if not group_booking: + raise ValueError("Group booking not found") + + # Calculate individual amount if individual payment + individual_amount = None + if group_booking.payment_option == PaymentOption.individual_payments: + # Distribute cost evenly among members + current_member_count = db.query(GroupBookingMember).filter( + GroupBookingMember.group_booking_id == group_booking_id + ).count() + individual_amount = group_booking.total_price / (current_member_count + 1) + + member = GroupBookingMember( + group_booking_id=group_booking_id, + full_name=full_name, + email=email, + phone=phone, + user_id=user_id, + room_block_id=room_block_id, + special_requests=special_requests, + preferences=preferences, + individual_amount=individual_amount, + individual_paid=Decimal('0'), + individual_balance=individual_amount if individual_amount else Decimal('0') + ) + + db.add(member) + + # Update total guests + group_booking.total_guests += 1 + + db.commit() + db.refresh(member) + + return member + + @staticmethod + def confirm_group_booking( + db: Session, + group_booking_id: int + ) -> GroupBooking: + """Confirm a group booking and activate room blocks""" + + group_booking = db.query(GroupBooking).filter( + GroupBooking.id == group_booking_id + ).first() + + if not group_booking: + raise ValueError("Group booking not found") + + if group_booking.status not in [GroupBookingStatus.draft, GroupBookingStatus.pending]: + raise ValueError(f"Cannot confirm booking with status {group_booking.status}") + + # Re-check availability + room_blocks = db.query(GroupRoomBlock).filter( + GroupRoomBlock.group_booking_id == group_booking_id + ).all() + + for room_block in room_blocks: + availability = GroupBookingService.check_room_availability( + db, + room_block.room_type_id, + group_booking.check_in_date, + group_booking.check_out_date, + room_block.rooms_blocked + ) + + if not availability['available']: + raise ValueError(f"Rooms no longer available: {availability.get('message')}") + + # Update status + group_booking.status = GroupBookingStatus.confirmed + group_booking.confirmed_at = datetime.utcnow() + + db.commit() + db.refresh(group_booking) + + return group_booking + + @staticmethod + def create_individual_booking_from_member( + db: Session, + member_id: int, + room_id: int + ) -> Booking: + """Create an individual booking for a group member""" + + member = db.query(GroupBookingMember).filter( + GroupBookingMember.id == member_id + ).first() + + if not member: + raise ValueError("Group member not found") + + group_booking = member.group_booking + + if not group_booking: + raise ValueError("Group booking not found") + + # Check if room is available + room = db.query(Room).filter(Room.id == room_id).first() + if not room: + raise ValueError("Room not found") + + # Verify room type matches + if member.room_block_id: + room_block = db.query(GroupRoomBlock).filter( + GroupRoomBlock.id == member.room_block_id + ).first() + if room_block and room.room_type_id != room_block.room_type_id: + raise ValueError("Room type does not match the assigned room block") + + # Calculate price for this booking + nights = (group_booking.check_out_date - group_booking.check_in_date).days + if nights <= 0: + nights = 1 + + if member.individual_amount: + booking_price = member.individual_amount + else: + # Use proportional share + booking_price = group_booking.total_price / group_booking.total_rooms + + # Generate booking number + import random + prefix = 'BK' + ts = int(datetime.utcnow().timestamp() * 1000) + rand = random.randint(1000, 9999) + booking_number = f'{prefix}-{ts}-{rand}' + + # Ensure uniqueness + existing = db.query(Booking).filter(Booking.booking_number == booking_number).first() + if existing: + booking_number = f'{prefix}-{ts}-{rand + 1}' + + # Create booking + booking = Booking( + booking_number=booking_number, + user_id=member.user_id if member.user_id else group_booking.coordinator_id, + room_id=room_id, + check_in_date=group_booking.check_in_date, + check_out_date=group_booking.check_out_date, + num_guests=1, + total_price=booking_price, + original_price=booking_price, + discount_amount=Decimal('0'), + status=BookingStatus.confirmed, + deposit_paid=False, + requires_deposit=False, + special_requests=member.special_requests, + group_booking_id=group_booking_id + ) + + db.add(booking) + + # Update member + member.assigned_room_id = room_id + member.individual_booking_id = booking.id + + # Update room block + if member.room_block_id: + room_block = db.query(GroupRoomBlock).filter( + GroupRoomBlock.id == member.room_block_id + ).first() + if room_block: + room_block.rooms_confirmed += 1 + room_block.rooms_available -= 1 + + # Update group booking status + confirmed_count = db.query(GroupBookingMember).filter( + GroupBookingMember.group_booking_id == group_booking_id, + GroupBookingMember.individual_booking_id.isnot(None) + ).count() + + if confirmed_count == group_booking.total_rooms: + group_booking.status = GroupBookingStatus.confirmed + elif confirmed_count > 0: + group_booking.status = GroupBookingStatus.partially_confirmed + + db.commit() + db.refresh(booking) + + return booking + + @staticmethod + def add_group_payment( + db: Session, + group_booking_id: int, + amount: Decimal, + payment_method: str, + payment_type: str = 'deposit', + transaction_id: Optional[str] = None, + paid_by_member_id: Optional[int] = None, + paid_by_user_id: Optional[int] = None, + notes: Optional[str] = None + ) -> GroupPayment: + """Add a payment to a group booking""" + + group_booking = db.query(GroupBooking).filter( + GroupBooking.id == group_booking_id + ).first() + + if not group_booking: + raise ValueError("Group booking not found") + + payment = GroupPayment( + group_booking_id=group_booking_id, + amount=amount, + payment_method=payment_method, + payment_type=payment_type, + payment_status='completed', + transaction_id=transaction_id, + payment_date=datetime.utcnow(), + paid_by_member_id=paid_by_member_id, + paid_by_user_id=paid_by_user_id, + notes=notes + ) + + db.add(payment) + + # Update group booking payment totals + group_booking.amount_paid += amount + group_booking.balance_due = group_booking.total_price - group_booking.amount_paid + + # Update member payment if individual payment + if paid_by_member_id: + member = db.query(GroupBookingMember).filter( + GroupBookingMember.id == paid_by_member_id + ).first() + if member: + member.individual_paid += amount + member.individual_balance = (member.individual_amount or Decimal('0')) - member.individual_paid + + db.commit() + db.refresh(payment) + + return payment + + @staticmethod + def cancel_group_booking( + db: Session, + group_booking_id: int, + cancellation_reason: Optional[str] = None + ) -> GroupBooking: + """Cancel a group booking""" + + group_booking = db.query(GroupBooking).filter( + GroupBooking.id == group_booking_id + ).first() + + if not group_booking: + raise ValueError("Group booking not found") + + if group_booking.status in [GroupBookingStatus.checked_out, GroupBookingStatus.cancelled]: + raise ValueError(f"Cannot cancel booking with status {group_booking.status}") + + # Calculate cancellation penalty + penalty_amount = Decimal('0') + if group_booking.cancellation_penalty_percentage: + penalty_amount = group_booking.total_price * ( + Decimal(str(group_booking.cancellation_penalty_percentage)) / Decimal('100') + ) + + # Update status + group_booking.status = GroupBookingStatus.cancelled + group_booking.cancelled_at = datetime.utcnow() + + # Release room blocks + room_blocks = db.query(GroupRoomBlock).filter( + GroupRoomBlock.group_booking_id == group_booking_id + ).all() + + for room_block in room_blocks: + room_block.is_active = False + room_block.block_released_at = datetime.utcnow() + + # Cancel individual bookings if any + individual_bookings = db.query(Booking).filter( + Booking.group_booking_id == group_booking_id + ).all() + + for booking in individual_bookings: + if booking.status not in [BookingStatus.checked_out, BookingStatus.cancelled]: + booking.status = BookingStatus.cancelled + + db.commit() + db.refresh(group_booking) + + return group_booking + diff --git a/Backend/src/services/guest_profile_service.py b/Backend/src/services/guest_profile_service.py index 01679be3..5ba067f4 100644 --- a/Backend/src/services/guest_profile_service.py +++ b/Backend/src/services/guest_profile_service.py @@ -1,4 +1,4 @@ -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, load_only from sqlalchemy import func, and_, or_, desc from typing import List, Dict, Optional from datetime import datetime, timedelta @@ -50,7 +50,17 @@ class GuestProfileService: @staticmethod def get_booking_history(user_id: int, db: Session, limit: Optional[int] = None) -> List[Booking]: """Get complete booking history for a guest""" - query = db.query(Booking).filter(Booking.user_id == user_id).order_by(desc(Booking.created_at)) + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + query = db.query(Booking).options( + load_only( + Booking.id, Booking.booking_number, Booking.user_id, Booking.room_id, + Booking.check_in_date, Booking.check_out_date, Booking.num_guests, + Booking.total_price, Booking.original_price, Booking.discount_amount, + Booking.promotion_code, Booking.status, Booking.deposit_paid, + Booking.requires_deposit, Booking.special_requests, + Booking.created_at, Booking.updated_at + ) + ).filter(Booking.user_id == user_id).order_by(desc(Booking.created_at)) if limit: query = query.limit(limit) return query.all() @@ -58,18 +68,30 @@ class GuestProfileService: @staticmethod def get_booking_statistics(user_id: int, db: Session) -> Dict: """Get booking statistics for a guest""" - total_bookings = db.query(Booking).filter(Booking.user_id == user_id).count() - completed_bookings = db.query(Booking).filter( + # Use func.count with load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + # This avoids SQLAlchemy generating subqueries with all columns + total_bookings = db.query(func.count(Booking.id)).filter(Booking.user_id == user_id).scalar() or 0 + completed_bookings = db.query(func.count(Booking.id)).filter( Booking.user_id == user_id, Booking.status == BookingStatus.checked_out - ).count() - cancelled_bookings = db.query(Booking).filter( + ).scalar() or 0 + cancelled_bookings = db.query(func.count(Booking.id)).filter( Booking.user_id == user_id, Booking.status == BookingStatus.cancelled - ).count() + ).scalar() or 0 # Get last visit date - last_booking = db.query(Booking).filter( + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + last_booking = db.query(Booking).options( + load_only( + Booking.id, Booking.booking_number, Booking.user_id, Booking.room_id, + Booking.check_in_date, Booking.check_out_date, Booking.num_guests, + Booking.total_price, Booking.original_price, Booking.discount_amount, + Booking.promotion_code, Booking.status, Booking.deposit_paid, + Booking.requires_deposit, Booking.special_requests, + Booking.created_at, Booking.updated_at + ) + ).filter( Booking.user_id == user_id, Booking.status == BookingStatus.checked_out ).order_by(desc(Booking.check_in_date)).first() @@ -77,6 +99,7 @@ class GuestProfileService: last_visit_date = last_booking.check_in_date if last_booking else None # Get total nights stayed + # Aggregate queries don't need load_only as they don't load full objects total_nights = db.query( func.sum(func.extract('day', Booking.check_out_date - Booking.check_in_date)) ).filter( @@ -177,7 +200,17 @@ class GuestProfileService: satisfaction_score = GuestProfileService.calculate_satisfaction_score(user_id, db) # Get preferred room types - bookings = db.query(Booking).filter(Booking.user_id == user_id).all() + # Use load_only to exclude non-existent columns (rate_plan_id, group_booking_id) + bookings = db.query(Booking).options( + load_only( + Booking.id, Booking.booking_number, Booking.user_id, Booking.room_id, + Booking.check_in_date, Booking.check_out_date, Booking.num_guests, + Booking.total_price, Booking.original_price, Booking.discount_amount, + Booking.promotion_code, Booking.status, Booking.deposit_paid, + Booking.requires_deposit, Booking.special_requests, + Booking.created_at, Booking.updated_at + ) + ).filter(Booking.user_id == user_id).all() room_type_counts = {} for booking in bookings: if booking.room and booking.room.room_type: diff --git a/Backend/src/services/notification_service.py b/Backend/src/services/notification_service.py new file mode 100644 index 00000000..7cc9d665 --- /dev/null +++ b/Backend/src/services/notification_service.py @@ -0,0 +1,373 @@ +from sqlalchemy.orm import Session +from sqlalchemy import and_, or_, func, desc +from typing import Optional, Dict, List, Any +from datetime import datetime, timedelta +from ..models.notification import ( + Notification, NotificationTemplate, NotificationPreference, NotificationDeliveryLog, + NotificationChannel, NotificationStatus, NotificationType +) +from ..models.user import User +from ..models.booking import Booking +from ..models.payment import Payment +import logging +import re + +logger = logging.getLogger(__name__) + +class NotificationService: + """Multi-channel Notification Service""" + + @staticmethod + def get_user_preferences(db: Session, user_id: int) -> NotificationPreference: + """Get or create user notification preferences""" + preferences = db.query(NotificationPreference).filter( + NotificationPreference.user_id == user_id + ).first() + + if not preferences: + preferences = NotificationPreference(user_id=user_id) + db.add(preferences) + db.commit() + db.refresh(preferences) + + return preferences + + @staticmethod + def update_user_preferences( + db: Session, + user_id: int, + preferences_data: Dict[str, Any] + ) -> NotificationPreference: + """Update user notification preferences""" + preferences = NotificationService.get_user_preferences(db, user_id) + + for key, value in preferences_data.items(): + if hasattr(preferences, key): + setattr(preferences, key, value) + + preferences.updated_at = datetime.utcnow() + db.commit() + db.refresh(preferences) + return preferences + + @staticmethod + def should_send_notification( + db: Session, + user_id: int, + notification_type: NotificationType, + channel: NotificationChannel + ) -> bool: + """Check if notification should be sent based on user preferences""" + preferences = NotificationService.get_user_preferences(db, user_id) + + # Check global channel preference + channel_attr = f'{channel.value}_enabled' + if not getattr(preferences, channel_attr, False): + return False + + # Check type-specific preference + type_attr = f'{notification_type.value}_{channel.value}' + return getattr(preferences, type_attr, True) + + @staticmethod + def create_template( + db: Session, + name: str, + notification_type: NotificationType, + channel: NotificationChannel, + content: str, + created_by: int, + subject: Optional[str] = None, + variables: Optional[List[str]] = None + ) -> NotificationTemplate: + """Create notification template""" + template = NotificationTemplate( + name=name, + notification_type=notification_type, + channel=channel, + subject=subject, + content=content, + variables=variables or [], + created_by=created_by, + is_active=True + ) + db.add(template) + db.commit() + db.refresh(template) + return template + + @staticmethod + def get_template( + db: Session, + notification_type: NotificationType, + channel: NotificationChannel + ) -> Optional[NotificationTemplate]: + """Get active template for notification type and channel""" + return db.query(NotificationTemplate).filter( + and_( + NotificationTemplate.notification_type == notification_type, + NotificationTemplate.channel == channel, + NotificationTemplate.is_active == True + ) + ).first() + + @staticmethod + def render_template(template: NotificationTemplate, variables: Dict[str, Any]) -> Dict[str, str]: + """Render template with variables""" + content = template.content + subject = template.subject or '' + + # Replace variables in format {{variable_name}} + for key, value in variables.items(): + placeholder = f'{{{{{key}}}}}' + content = content.replace(placeholder, str(value)) + subject = subject.replace(placeholder, str(value)) + + return {'subject': subject, 'content': content} + + @staticmethod + def send_notification( + db: Session, + user_id: Optional[int], + notification_type: NotificationType, + channel: NotificationChannel, + content: str, + subject: Optional[str] = None, + template_id: Optional[int] = None, + priority: str = 'normal', + scheduled_at: Optional[datetime] = None, + booking_id: Optional[int] = None, + payment_id: Optional[int] = None, + meta_data: Optional[Dict[str, Any]] = None + ) -> Notification: + """Create and send notification""" + # Check user preferences if user_id is provided + if user_id and not NotificationService.should_send_notification(db, user_id, notification_type, channel): + logger.info(f"Notification skipped due to user preferences: user_id={user_id}, type={notification_type}, channel={channel}") + # Still create notification but mark as skipped + notification = Notification( + user_id=user_id, + notification_type=notification_type, + channel=channel, + subject=subject, + content=content, + template_id=template_id, + status=NotificationStatus.failed, + priority=priority, + scheduled_at=scheduled_at, + booking_id=booking_id, + payment_id=payment_id, + meta_data={**(meta_data or {}), 'skipped': True, 'reason': 'user_preference'} + ) + db.add(notification) + db.commit() + return notification + + notification = Notification( + user_id=user_id, + notification_type=notification_type, + channel=channel, + subject=subject, + content=content, + template_id=template_id, + status=NotificationStatus.pending, + priority=priority, + scheduled_at=scheduled_at, + booking_id=booking_id, + payment_id=payment_id, + meta_data=meta_data or {} + ) + db.add(notification) + db.commit() + db.refresh(notification) + + # Send notification if not scheduled + if not scheduled_at or scheduled_at <= datetime.utcnow(): + NotificationService._deliver_notification(db, notification) + + return notification + + @staticmethod + def _deliver_notification(db: Session, notification: Notification): + """Deliver notification through appropriate channel""" + try: + if notification.channel == NotificationChannel.email: + NotificationService._send_email(db, notification) + elif notification.channel == NotificationChannel.sms: + NotificationService._send_sms(db, notification) + elif notification.channel == NotificationChannel.push: + NotificationService._send_push(db, notification) + elif notification.channel == NotificationChannel.whatsapp: + NotificationService._send_whatsapp(db, notification) + elif notification.channel == NotificationChannel.in_app: + NotificationService._send_in_app(db, notification) + except Exception as e: + logger.error(f"Failed to deliver notification {notification.id}: {str(e)}") + notification.status = NotificationStatus.failed + notification.error_message = str(e) + db.commit() + + @staticmethod + def _send_email(db: Session, notification: Notification): + """Send email notification""" + # TODO: Integrate with email service (SendGrid, AWS SES, etc.) + # For now, just mark as sent + notification.status = NotificationStatus.sent + notification.sent_at = datetime.utcnow() + db.commit() + logger.info(f"Email notification {notification.id} sent (mock)") + + @staticmethod + def _send_sms(db: Session, notification: Notification): + """Send SMS notification""" + # TODO: Integrate with SMS service (Twilio, AWS SNS, etc.) + # For now, just mark as sent + notification.status = NotificationStatus.sent + notification.sent_at = datetime.utcnow() + db.commit() + logger.info(f"SMS notification {notification.id} sent (mock)") + + @staticmethod + def _send_push(db: Session, notification: Notification): + """Send push notification""" + # TODO: Integrate with push notification service (FCM, APNS, etc.) + # For now, just mark as sent + notification.status = NotificationStatus.sent + notification.sent_at = datetime.utcnow() + db.commit() + logger.info(f"Push notification {notification.id} sent (mock)") + + @staticmethod + def _send_whatsapp(db: Session, notification: Notification): + """Send WhatsApp notification""" + # TODO: Integrate with WhatsApp Business API + # For now, just mark as sent + notification.status = NotificationStatus.sent + notification.sent_at = datetime.utcnow() + db.commit() + logger.info(f"WhatsApp notification {notification.id} sent (mock)") + + @staticmethod + def _send_in_app(db: Session, notification: Notification): + """Send in-app notification""" + # In-app notifications are always "delivered" immediately + notification.status = NotificationStatus.delivered + notification.sent_at = datetime.utcnow() + notification.delivered_at = datetime.utcnow() + db.commit() + logger.info(f"In-app notification {notification.id} delivered") + + @staticmethod + def send_booking_confirmation(db: Session, booking: Booking): + """Send booking confirmation notifications""" + user = booking.user + if not user: + return + + # Get template or use default + template = NotificationService.get_template(db, NotificationType.booking_confirmation, NotificationChannel.email) + + if template: + variables = { + 'booking_number': booking.booking_number, + 'guest_name': user.full_name, + 'check_in': booking.check_in_date.strftime('%Y-%m-%d') if booking.check_in_date else '', + 'check_out': booking.check_out_date.strftime('%Y-%m-%d') if booking.check_out_date else '', + 'total_price': str(booking.total_price), + } + rendered = NotificationService.render_template(template, variables) + NotificationService.send_notification( + db=db, + user_id=user.id, + notification_type=NotificationType.booking_confirmation, + channel=NotificationChannel.email, + content=rendered['content'], + subject=rendered['subject'], + template_id=template.id, + booking_id=booking.id + ) + else: + # Fallback to default message + NotificationService.send_notification( + db=db, + user_id=user.id, + notification_type=NotificationType.booking_confirmation, + channel=NotificationChannel.email, + subject=f'Booking Confirmation - {booking.booking_number}', + content=f'Your booking {booking.booking_number} has been confirmed.', + booking_id=booking.id + ) + + @staticmethod + def send_payment_receipt(db: Session, payment: Payment): + """Send payment receipt notifications""" + booking = payment.booking + if not booking or not booking.user: + return + + user = booking.user + template = NotificationService.get_template(db, NotificationType.payment_receipt, NotificationChannel.email) + + if template: + variables = { + 'payment_amount': str(payment.amount), + 'payment_method': payment.payment_method.value if hasattr(payment.payment_method, 'value') else str(payment.payment_method), + 'transaction_id': payment.transaction_id or '', + 'booking_number': booking.booking_number, + 'guest_name': user.full_name, + } + rendered = NotificationService.render_template(template, variables) + NotificationService.send_notification( + db=db, + user_id=user.id, + notification_type=NotificationType.payment_receipt, + channel=NotificationChannel.email, + content=rendered['content'], + subject=rendered['subject'], + template_id=template.id, + payment_id=payment.id, + booking_id=booking.id + ) + + @staticmethod + def get_notifications( + db: Session, + user_id: Optional[int] = None, + notification_type: Optional[NotificationType] = None, + channel: Optional[NotificationChannel] = None, + status: Optional[NotificationStatus] = None, + skip: int = 0, + limit: int = 100 + ) -> List[Notification]: + """Get notifications with filters""" + query = db.query(Notification) + + if user_id: + query = query.filter(Notification.user_id == user_id) + if notification_type: + query = query.filter(Notification.notification_type == notification_type) + if channel: + query = query.filter(Notification.channel == channel) + if status: + query = query.filter(Notification.status == status) + + return query.order_by(desc(Notification.created_at)).offset(skip).limit(limit).all() + + @staticmethod + def mark_as_read(db: Session, notification_id: int, user_id: int) -> Optional[Notification]: + """Mark notification as read""" + notification = db.query(Notification).filter( + and_( + Notification.id == notification_id, + Notification.user_id == user_id + ) + ).first() + + if notification: + notification.status = NotificationStatus.read + notification.read_at = datetime.utcnow() + db.commit() + db.refresh(notification) + + return notification + diff --git a/Backend/src/services/oauth_service.py b/Backend/src/services/oauth_service.py new file mode 100644 index 00000000..71a09fa1 --- /dev/null +++ b/Backend/src/services/oauth_service.py @@ -0,0 +1,209 @@ +from typing import Optional, Dict, Any +from sqlalchemy.orm import Session +from datetime import datetime, timedelta +import httpx +import secrets +from urllib.parse import urlencode +import logging + +from ..models.security_event import OAuthProvider, OAuthToken +from ..models.user import User +from ..config.logging_config import get_logger + +logger = get_logger(__name__) + +class OAuthService: + """Service for handling OAuth 2.0 / OpenID Connect authentication""" + + @staticmethod + def get_authorization_url(db: Session, provider_name: str, redirect_uri: str, state: Optional[str] = None) -> str: + """Generate OAuth authorization URL""" + provider = db.query(OAuthProvider).filter( + OAuthProvider.name == provider_name, + OAuthProvider.is_active == True + ).first() + + if not provider: + raise ValueError(f"OAuth provider '{provider_name}' not found or inactive") + + if not state: + state = secrets.token_urlsafe(32) + + params = { + 'client_id': provider.client_id, + 'redirect_uri': redirect_uri, + 'response_type': 'code', + 'scope': provider.scopes or 'openid profile email', + 'state': state, + } + + return f"{provider.authorization_url}?{urlencode(params)}" + + @staticmethod + async def exchange_code_for_token( + db: Session, + provider_name: str, + code: str, + redirect_uri: str + ) -> Dict[str, Any]: + """Exchange authorization code for access token""" + provider = db.query(OAuthProvider).filter( + OAuthProvider.name == provider_name, + OAuthProvider.is_active == True + ).first() + + if not provider: + raise ValueError(f"OAuth provider '{provider_name}' not found or inactive") + + async with httpx.AsyncClient() as client: + response = await client.post( + provider.token_url, + data={ + 'grant_type': 'authorization_code', + 'code': code, + 'redirect_uri': redirect_uri, + 'client_id': provider.client_id, + 'client_secret': provider.client_secret, + }, + headers={'Accept': 'application/json'} + ) + + if response.status_code != 200: + logger.error(f"OAuth token exchange failed: {response.text}") + raise ValueError("Failed to exchange authorization code for token") + + token_data = response.json() + return token_data + + @staticmethod + async def get_user_info( + db: Session, + provider_name: str, + access_token: str + ) -> Dict[str, Any]: + """Get user information from OAuth provider""" + provider = db.query(OAuthProvider).filter( + OAuthProvider.name == provider_name, + OAuthProvider.is_active == True + ).first() + + if not provider: + raise ValueError(f"OAuth provider '{provider_name}' not found or inactive") + + async with httpx.AsyncClient() as client: + response = await client.get( + provider.userinfo_url, + headers={ + 'Authorization': f'Bearer {access_token}', + 'Accept': 'application/json' + } + ) + + if response.status_code != 200: + logger.error(f"Failed to get user info: {response.text}") + raise ValueError("Failed to get user information from OAuth provider") + + return response.json() + + @staticmethod + def save_oauth_token( + db: Session, + user_id: int, + provider_id: int, + provider_user_id: str, + access_token: str, + refresh_token: Optional[str] = None, + expires_in: Optional[int] = None, + scopes: Optional[str] = None + ) -> OAuthToken: + """Save or update OAuth token for user""" + expires_at = None + if expires_in: + expires_at = datetime.utcnow() + timedelta(seconds=expires_in) + + # Check if token already exists + existing_token = db.query(OAuthToken).filter( + OAuthToken.user_id == user_id, + OAuthToken.provider_id == provider_id + ).first() + + if existing_token: + existing_token.access_token = access_token + existing_token.refresh_token = refresh_token + existing_token.expires_at = expires_at + existing_token.scopes = scopes + existing_token.updated_at = datetime.utcnow() + db.commit() + db.refresh(existing_token) + return existing_token + else: + new_token = OAuthToken( + user_id=user_id, + provider_id=provider_id, + provider_user_id=provider_user_id, + access_token=access_token, + refresh_token=refresh_token, + expires_at=expires_at, + scopes=scopes + ) + db.add(new_token) + db.commit() + db.refresh(new_token) + return new_token + + @staticmethod + def find_or_create_user_from_oauth( + db: Session, + provider_name: str, + user_info: Dict[str, Any] + ) -> User: + """Find existing user or create new user from OAuth user info""" + provider = db.query(OAuthProvider).filter( + OAuthProvider.name == provider_name + ).first() + + if not provider: + raise ValueError(f"OAuth provider '{provider_name}' not found") + + # Try to find user by OAuth token + provider_user_id = user_info.get('sub') or user_info.get('id') + oauth_token = db.query(OAuthToken).filter( + OAuthToken.provider_id == provider.id, + OAuthToken.provider_user_id == str(provider_user_id) + ).first() + + if oauth_token: + return oauth_token.user + + # Try to find user by email + email = user_info.get('email') + if email: + user = db.query(User).filter(User.email == email.lower()).first() + if user: + return user + + # Create new user + from ..models.role import Role + customer_role = db.query(Role).filter(Role.name == 'customer').first() + if not customer_role: + raise ValueError("Customer role not found") + + name = user_info.get('name') or user_info.get('given_name', '') + ' ' + user_info.get('family_name', '') + if not name.strip(): + name = email.split('@')[0] if email else 'User' + + new_user = User( + email=email.lower() if email else f"{provider_user_id}@{provider_name}.oauth", + full_name=name.strip(), + role_id=customer_role.id, + is_active=True, + email_verified=True # OAuth providers verify emails + ) + db.add(new_user) + db.commit() + db.refresh(new_user) + + return new_user + +oauth_service = OAuthService() + diff --git a/Backend/src/services/room_assignment_service.py b/Backend/src/services/room_assignment_service.py new file mode 100644 index 00000000..5ab650e8 --- /dev/null +++ b/Backend/src/services/room_assignment_service.py @@ -0,0 +1,241 @@ +""" +Room Assignment Optimization Service +Provides intelligent room assignment based on guest preferences, room attributes, and availability +""" +from sqlalchemy.orm import Session +from sqlalchemy import and_, or_, func +from datetime import datetime +from typing import List, Optional, Dict +from ..models.room import Room, RoomStatus +from ..models.booking import Booking, BookingStatus +from ..models.room_attribute import RoomAttribute +from ..models.user import User + + +class RoomAssignmentService: + """Service for optimizing room assignments""" + + @staticmethod + def find_best_room( + db: Session, + room_type_id: int, + check_in: datetime, + check_out: datetime, + num_guests: int, + guest_preferences: Optional[Dict] = None, + exclude_room_ids: Optional[List[int]] = None + ) -> Optional[Room]: + """ + Find the best available room for a booking based on multiple criteria + + Args: + db: Database session + room_type_id: Required room type ID + check_in: Check-in date + check_out: Check-out date + num_guests: Number of guests + guest_preferences: Optional dict with preferences like {'view': 'ocean', 'floor': 'high', 'quiet': True} + exclude_room_ids: List of room IDs to exclude from consideration + + Returns: + Best matching Room or None if no room available + """ + # Base query: available rooms of the correct type + query = db.query(Room).filter( + Room.room_type_id == room_type_id, + Room.status == RoomStatus.available + ) + + # Exclude specific rooms if provided + if exclude_room_ids: + query = query.filter(~Room.id.in_(exclude_room_ids)) + + # Exclude rooms with overlapping bookings + overlapping_rooms = db.query(Booking.room_id).filter( + and_( + Booking.status != BookingStatus.cancelled, + Booking.check_in_date < check_out, + Booking.check_out_date > check_in + ) + ).subquery() + query = query.filter(~Room.id.in_(db.query(overlapping_rooms.c.room_id))) + + # Exclude rooms blocked by maintenance + from ..models.room_maintenance import RoomMaintenance, MaintenanceStatus + blocked_rooms = db.query(RoomMaintenance.room_id).filter( + and_( + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]), + or_( + and_( + RoomMaintenance.block_start.isnot(None), + RoomMaintenance.block_end.isnot(None), + RoomMaintenance.block_start < check_out, + RoomMaintenance.block_end > check_in + ), + and_( + RoomMaintenance.scheduled_start < check_out, + RoomMaintenance.scheduled_end.isnot(None), + RoomMaintenance.scheduled_end > check_in + ) + ) + ) + ).subquery() + query = query.filter(~Room.id.in_(db.query(blocked_rooms.c.room_id))) + + available_rooms = query.all() + + if not available_rooms: + return None + + # Score rooms based on preferences + if guest_preferences: + scored_rooms = [] + for room in available_rooms: + score = RoomAssignmentService._calculate_room_score(room, guest_preferences, db) + scored_rooms.append((score, room)) + + # Sort by score (highest first) and return best match + scored_rooms.sort(key=lambda x: x[0], reverse=True) + return scored_rooms[0][1] if scored_rooms else None + + # If no preferences, return first available room + return available_rooms[0] if available_rooms else None + + @staticmethod + def _calculate_room_score(room: Room, preferences: Dict, db: Session) -> float: + """ + Calculate a score for a room based on guest preferences + Higher score = better match + """ + score = 0.0 + + # View preference + if 'view' in preferences and room.view: + if preferences['view'].lower() in room.view.lower(): + score += 10.0 + + # Floor preference + if 'floor' in preferences: + preferred_floor = preferences['floor'] + if preferred_floor == 'high' and room.floor >= 3: + score += 5.0 + elif preferred_floor == 'low' and room.floor <= 2: + score += 5.0 + elif isinstance(preferred_floor, int) and room.floor == preferred_floor: + score += 10.0 + + # Quiet preference + if preferences.get('quiet'): + # Check room attributes for noise level + quiet_attr = db.query(RoomAttribute).filter( + and_( + RoomAttribute.room_id == room.id, + RoomAttribute.attribute_name == 'noise_level', + RoomAttribute.is_active == True + ) + ).first() + if quiet_attr and 'quiet' in quiet_attr.attribute_value.lower(): + score += 8.0 + + # Accessibility preference + if preferences.get('accessible'): + accessible_attr = db.query(RoomAttribute).filter( + and_( + RoomAttribute.room_id == room.id, + RoomAttribute.attribute_name == 'accessibility', + RoomAttribute.is_active == True + ) + ).first() + if accessible_attr and 'accessible' in accessible_attr.attribute_value.lower(): + score += 15.0 # High priority for accessibility + + # Featured rooms get a small boost + if room.featured: + score += 2.0 + + # Rooms with higher ratings get a boost + from ..models.review import Review, ReviewStatus + avg_rating = db.query(func.avg(Review.rating)).filter( + and_( + Review.room_id == room.id, + Review.status == ReviewStatus.approved + ) + ).scalar() + if avg_rating: + score += float(avg_rating) * 2.0 + + return score + + @staticmethod + def get_room_availability_calendar( + db: Session, + room_id: int, + start_date: datetime, + end_date: datetime + ) -> Dict: + """ + Get detailed availability information for a room over a date range + """ + room = db.query(Room).filter(Room.id == room_id).first() + if not room: + return None + + # Get bookings + bookings = db.query(Booking).filter( + and_( + Booking.room_id == room_id, + Booking.status != BookingStatus.cancelled, + Booking.check_in_date < end_date, + Booking.check_out_date > start_date + ) + ).all() + + # Get maintenance blocks + from ..models.room_maintenance import RoomMaintenance, MaintenanceStatus + maintenance_blocks = db.query(RoomMaintenance).filter( + and_( + RoomMaintenance.room_id == room_id, + RoomMaintenance.blocks_room == True, + RoomMaintenance.status.in_([MaintenanceStatus.scheduled, MaintenanceStatus.in_progress]), + or_( + and_( + RoomMaintenance.block_start.isnot(None), + RoomMaintenance.block_end.isnot(None), + RoomMaintenance.block_start < end_date, + RoomMaintenance.block_end > start_date + ), + and_( + RoomMaintenance.scheduled_start < end_date, + RoomMaintenance.scheduled_end.isnot(None), + RoomMaintenance.scheduled_end > start_date + ) + ) + ) + ).all() + + return { + 'room_id': room_id, + 'room_number': room.room_number, + 'status': room.status.value, + 'bookings': [ + { + 'id': b.id, + 'check_in': b.check_in_date.isoformat(), + 'check_out': b.check_out_date.isoformat(), + 'status': b.status.value + } + for b in bookings + ], + 'maintenance_blocks': [ + { + 'id': m.id, + 'title': m.title, + 'start': (m.block_start or m.scheduled_start).isoformat(), + 'end': (m.block_end or m.scheduled_end).isoformat() if (m.block_end or m.scheduled_end) else None, + 'type': m.maintenance_type.value + } + for m in maintenance_blocks + ] + } + diff --git a/Backend/src/services/security_monitoring_service.py b/Backend/src/services/security_monitoring_service.py new file mode 100644 index 00000000..4b9a4c7a --- /dev/null +++ b/Backend/src/services/security_monitoring_service.py @@ -0,0 +1,189 @@ +from sqlalchemy.orm import Session +from typing import Optional, List, Dict, Any +from datetime import datetime, timedelta +from ..models.security_event import SecurityEvent, SecurityEventType, SecurityEventSeverity +from ..models.user import User +from ..config.logging_config import get_logger + +logger = get_logger(__name__) + +class SecurityMonitoringService: + """Service for monitoring and analyzing security events""" + + @staticmethod + def log_security_event( + db: Session, + event_type: SecurityEventType, + severity: SecurityEventSeverity, + user_id: Optional[int] = None, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None, + request_path: Optional[str] = None, + request_method: Optional[str] = None, + request_id: Optional[str] = None, + description: Optional[str] = None, + details: Optional[Dict[str, Any]] = None + ) -> SecurityEvent: + """Log a security event""" + try: + event = SecurityEvent( + user_id=user_id, + event_type=event_type, + severity=severity, + ip_address=ip_address, + user_agent=user_agent, + request_path=request_path, + request_method=request_method, + request_id=request_id, + description=description, + details=details + ) + db.add(event) + db.commit() + db.refresh(event) + + # Check for suspicious patterns + SecurityMonitoringService._check_suspicious_patterns(db, event) + + return event + except Exception as e: + logger.error(f"Error logging security event: {str(e)}") + db.rollback() + raise + + @staticmethod + def _check_suspicious_patterns(db: Session, event: SecurityEvent): + """Check for suspicious activity patterns""" + # Multiple failed login attempts from same IP + if event.event_type == SecurityEventType.login_failure: + recent_failures = db.query(SecurityEvent).filter( + SecurityEvent.event_type == SecurityEventType.login_failure, + SecurityEvent.ip_address == event.ip_address, + SecurityEvent.created_at >= datetime.utcnow() - timedelta(minutes=15) + ).count() + + if recent_failures >= 5: + # Log suspicious activity + SecurityMonitoringService.log_security_event( + db, + SecurityEventType.suspicious_activity, + SecurityEventSeverity.high, + ip_address=event.ip_address, + description=f"Multiple failed login attempts ({recent_failures}) from IP {event.ip_address}", + details={"failure_count": recent_failures} + ) + + # Multiple permission denied from same user + if event.event_type == SecurityEventType.permission_denied and event.user_id: + recent_denials = db.query(SecurityEvent).filter( + SecurityEvent.event_type == SecurityEventType.permission_denied, + SecurityEvent.user_id == event.user_id, + SecurityEvent.created_at >= datetime.utcnow() - timedelta(hours=1) + ).count() + + if recent_denials >= 10: + SecurityMonitoringService.log_security_event( + db, + SecurityEventType.suspicious_activity, + SecurityEventSeverity.medium, + user_id=event.user_id, + description=f"User {event.user_id} has {recent_denials} permission denials in the last hour", + details={"denial_count": recent_denials} + ) + + @staticmethod + def get_security_events( + db: Session, + user_id: Optional[int] = None, + event_type: Optional[SecurityEventType] = None, + severity: Optional[SecurityEventSeverity] = None, + ip_address: Optional[str] = None, + resolved: Optional[bool] = None, + start_date: Optional[datetime] = None, + end_date: Optional[datetime] = None, + limit: int = 100, + offset: int = 0 + ) -> List[SecurityEvent]: + """Get security events with filters""" + query = db.query(SecurityEvent) + + if user_id: + query = query.filter(SecurityEvent.user_id == user_id) + if event_type: + query = query.filter(SecurityEvent.event_type == event_type) + if severity: + query = query.filter(SecurityEvent.severity == severity) + if ip_address: + query = query.filter(SecurityEvent.ip_address == ip_address) + if resolved is not None: + query = query.filter(SecurityEvent.resolved == resolved) + if start_date: + query = query.filter(SecurityEvent.created_at >= start_date) + if end_date: + query = query.filter(SecurityEvent.created_at <= end_date) + + return query.order_by(SecurityEvent.created_at.desc()).offset(offset).limit(limit).all() + + @staticmethod + def get_security_stats( + db: Session, + days: int = 7 + ) -> Dict[str, Any]: + """Get security statistics for the last N days""" + start_date = datetime.utcnow() - timedelta(days=days) + + total_events = db.query(SecurityEvent).filter( + SecurityEvent.created_at >= start_date + ).count() + + by_type = {} + by_severity = {} + + events = db.query(SecurityEvent).filter( + SecurityEvent.created_at >= start_date + ).all() + + for event in events: + event_type = event.event_type.value + severity = event.severity.value + + by_type[event_type] = by_type.get(event_type, 0) + 1 + by_severity[severity] = by_severity.get(severity, 0) + 1 + + unresolved_critical = db.query(SecurityEvent).filter( + SecurityEvent.severity == SecurityEventSeverity.critical, + SecurityEvent.resolved == False, + SecurityEvent.created_at >= start_date + ).count() + + return { + "total_events": total_events, + "by_type": by_type, + "by_severity": by_severity, + "unresolved_critical": unresolved_critical, + "period_days": days + } + + @staticmethod + def resolve_event( + db: Session, + event_id: int, + resolved_by: int, + resolution_notes: Optional[str] = None + ) -> SecurityEvent: + """Mark a security event as resolved""" + event = db.query(SecurityEvent).filter(SecurityEvent.id == event_id).first() + if not event: + raise ValueError("Security event not found") + + event.resolved = True + event.resolved_at = datetime.utcnow() + event.resolved_by = resolved_by + event.resolution_notes = resolution_notes + + db.commit() + db.refresh(event) + return event + +security_monitoring_service = SecurityMonitoringService() + diff --git a/Backend/src/services/security_scan_service.py b/Backend/src/services/security_scan_service.py new file mode 100644 index 00000000..d2797fa9 --- /dev/null +++ b/Backend/src/services/security_scan_service.py @@ -0,0 +1,314 @@ +from sqlalchemy.orm import Session +from typing import List, Dict, Any, Optional +from datetime import datetime, timedelta +import logging +from ..models.security_event import SecurityEvent, SecurityEventType, SecurityEventSeverity +from ..models.user import User +from ..models.booking import Booking +from ..models.payment import Payment +from ..config.logging_config import get_logger + +logger = get_logger(__name__) + +class SecurityScanService: + """Service for automated security scanning""" + + @staticmethod + def run_full_scan(db: Session) -> Dict[str, Any]: + """Run a full security scan""" + results = { + "scan_id": f"scan_{datetime.utcnow().isoformat()}", + "started_at": datetime.utcnow().isoformat(), + "checks": [], + "total_issues": 0, + "critical_issues": 0, + "high_issues": 0, + "medium_issues": 0, + "low_issues": 0 + } + + # Run all security checks + checks = [ + SecurityScanService._check_weak_passwords(db), + SecurityScanService._check_inactive_users(db), + SecurityScanService._check_failed_login_attempts(db), + SecurityScanService._check_suspicious_activity(db), + SecurityScanService._check_unresolved_security_events(db), + SecurityScanService._check_expired_tokens(db), + SecurityScanService._check_unusual_payment_patterns(db), + SecurityScanService._check_data_retention_compliance(db), + ] + + for check in checks: + if check: + results["checks"].append(check) + results["total_issues"] += check.get("issue_count", 0) + severity = check.get("severity", "low") + if severity == "critical": + results["critical_issues"] += check.get("issue_count", 0) + elif severity == "high": + results["high_issues"] += check.get("issue_count", 0) + elif severity == "medium": + results["medium_issues"] += check.get("issue_count", 0) + else: + results["low_issues"] += check.get("issue_count", 0) + + completed_at = datetime.utcnow() + results["completed_at"] = completed_at.isoformat() + # Parse start time - handle ISO format with/without microseconds + start_str = results["started_at"] + try: + # Remove timezone info and microseconds for simpler parsing + if '.' in start_str: + start_str = start_str.split('.')[0] + if 'Z' in start_str: + start_str = start_str.replace('Z', '') + if '+' in start_str: + start_str = start_str.split('+')[0] + started_at = datetime.fromisoformat(start_str) + except Exception: + # Fallback: use current time if parsing fails + started_at = completed_at + results["duration_seconds"] = (completed_at - started_at).total_seconds() + + # Log critical and high issues as security events + for check in results["checks"]: + if check.get("severity") in ["critical", "high"] and check.get("issue_count", 0) > 0: + SecurityScanService._log_scan_finding(db, check) + + return results + + @staticmethod + def _check_weak_passwords(db: Session) -> Optional[Dict[str, Any]]: + """Check for users with weak passwords""" + # This is a placeholder - in production, you'd check password strength + # For now, we'll check for users without password changes in a long time + cutoff_date = datetime.utcnow() - timedelta(days=365) + users = db.query(User).filter( + User.created_at < cutoff_date, + User.is_active == True + ).all() + + if len(users) > 10: # Threshold + return { + "check_name": "Weak Passwords", + "check_type": "password_security", + "severity": "medium", + "status": "failed", + "issue_count": len(users), + "description": f"{len(users)} users have not changed passwords in over a year", + "recommendation": "Enforce password rotation policy", + "affected_items": [{"user_id": u.id, "email": u.email} for u in users[:10]] + } + return None + + @staticmethod + def _check_inactive_users(db: Session) -> Optional[Dict[str, Any]]: + """Check for inactive users that should be deactivated""" + cutoff_date = datetime.utcnow() - timedelta(days=180) + # Users who haven't logged in for 6 months + inactive_users = db.query(User).filter( + User.is_active == True + ).all() + + # This is simplified - in production, track last login + if len(inactive_users) > 50: + return { + "check_name": "Inactive Users", + "check_type": "user_management", + "severity": "low", + "status": "warning", + "issue_count": len(inactive_users), + "description": f"Found {len(inactive_users)} potentially inactive users", + "recommendation": "Review and deactivate inactive accounts", + "affected_items": [] + } + return None + + @staticmethod + def _check_failed_login_attempts(db: Session) -> Optional[Dict[str, Any]]: + """Check for excessive failed login attempts""" + from ..models.security_event import SecurityEvent, SecurityEventType + + recent_failures = db.query(SecurityEvent).filter( + SecurityEvent.event_type == SecurityEventType.login_failure, + SecurityEvent.created_at >= datetime.utcnow() - timedelta(hours=24) + ).count() + + if recent_failures > 50: + return { + "check_name": "Excessive Failed Logins", + "check_type": "authentication", + "severity": "high", + "status": "failed", + "issue_count": recent_failures, + "description": f"{recent_failures} failed login attempts in the last 24 hours", + "recommendation": "Review failed login attempts and consider IP blocking", + "affected_items": [] + } + return None + + @staticmethod + def _check_suspicious_activity(db: Session) -> Optional[Dict[str, Any]]: + """Check for suspicious activity patterns""" + from ..models.security_event import SecurityEvent, SecurityEventType, SecurityEventSeverity + + suspicious_events = db.query(SecurityEvent).filter( + SecurityEvent.event_type == SecurityEventType.suspicious_activity, + SecurityEvent.resolved == False, + SecurityEvent.created_at >= datetime.utcnow() - timedelta(days=7) + ).count() + + if suspicious_events > 0: + return { + "check_name": "Unresolved Suspicious Activity", + "check_type": "threat_detection", + "severity": "critical" if suspicious_events > 5 else "high", + "status": "failed", + "issue_count": suspicious_events, + "description": f"{suspicious_events} unresolved suspicious activity events in the last 7 days", + "recommendation": "Review and resolve suspicious activity events immediately", + "affected_items": [] + } + return None + + @staticmethod + def _check_unresolved_security_events(db: Session) -> Optional[Dict[str, Any]]: + """Check for unresolved critical security events""" + from ..models.security_event import SecurityEvent, SecurityEventSeverity + + unresolved_critical = db.query(SecurityEvent).filter( + SecurityEvent.severity == SecurityEventSeverity.critical, + SecurityEvent.resolved == False, + SecurityEvent.created_at >= datetime.utcnow() - timedelta(days=7) + ).count() + + if unresolved_critical > 0: + return { + "check_name": "Unresolved Critical Events", + "check_type": "incident_management", + "severity": "critical", + "status": "failed", + "issue_count": unresolved_critical, + "description": f"{unresolved_critical} unresolved critical security events", + "recommendation": "Resolve critical security events immediately", + "affected_items": [] + } + return None + + @staticmethod + def _check_expired_tokens(db: Session) -> Optional[Dict[str, Any]]: + """Check for expired tokens that should be cleaned up""" + from ..models.refresh_token import RefreshToken + + expired_tokens = db.query(RefreshToken).filter( + RefreshToken.expires_at < datetime.utcnow() + ).count() + + if expired_tokens > 1000: + return { + "check_name": "Expired Tokens", + "check_type": "token_management", + "severity": "low", + "status": "warning", + "issue_count": expired_tokens, + "description": f"{expired_tokens} expired tokens found in database", + "recommendation": "Clean up expired tokens to improve database performance", + "affected_items": [] + } + return None + + @staticmethod + def _check_unusual_payment_patterns(db: Session) -> Optional[Dict[str, Any]]: + """Check for unusual payment patterns that might indicate fraud""" + from ..models.payment import PaymentStatus + + # Check for multiple failed payments from same IP + recent_payments = db.query(Payment).filter( + Payment.payment_date >= datetime.utcnow() - timedelta(hours=24) + ).all() + + # Simplified check - in production, use more sophisticated fraud detection + failed_payments = [p for p in recent_payments if p.payment_status == PaymentStatus.failed] + + if len(failed_payments) > 20: + return { + "check_name": "Unusual Payment Patterns", + "check_type": "fraud_detection", + "severity": "medium", + "status": "warning", + "issue_count": len(failed_payments), + "description": f"{len(failed_payments)} failed payments in the last 24 hours", + "recommendation": "Review failed payment patterns for potential fraud", + "affected_items": [] + } + return None + + @staticmethod + def _check_data_retention_compliance(db: Session) -> Optional[Dict[str, Any]]: + """Check data retention policy compliance""" + from ..models.gdpr_compliance import DataRetentionPolicy + + policies = db.query(DataRetentionPolicy).filter( + DataRetentionPolicy.is_active == True, + DataRetentionPolicy.auto_delete == True + ).all() + + # Check if there's data that should have been deleted + issues = [] + for policy in policies: + # This is simplified - in production, check actual data age + if policy.retention_days < 30: # Very short retention + issues.append({ + "policy": policy.data_type, + "retention_days": policy.retention_days + }) + + if issues: + return { + "check_name": "Data Retention Compliance", + "check_type": "gdpr_compliance", + "severity": "high", + "status": "warning", + "issue_count": len(issues), + "description": f"Found {len(issues)} data retention policies that may need review", + "recommendation": "Review data retention policies for GDPR compliance", + "affected_items": issues + } + return None + + @staticmethod + def _log_scan_finding(db: Session, check: Dict[str, Any]): + """Log scan findings as security events""" + try: + event = SecurityEvent( + event_type=SecurityEventType.suspicious_activity, + severity=SecurityEventSeverity(check["severity"]), + description=f"Security Scan: {check['check_name']} - {check['description']}", + details={ + "check_type": check.get("check_type"), + "issue_count": check.get("issue_count"), + "recommendation": check.get("recommendation"), + "affected_items": check.get("affected_items", []) + } + ) + db.add(event) + db.commit() + except Exception as e: + logger.error(f"Error logging scan finding: {str(e)}") + db.rollback() + + @staticmethod + def schedule_scan(db: Session, interval_hours: int = 24) -> Dict[str, Any]: + """Schedule automatic security scans""" + # In production, use a task scheduler like Celery or APScheduler + # For now, this is a placeholder that returns scan configuration + return { + "scheduled": True, + "interval_hours": interval_hours, + "next_scan": (datetime.utcnow() + timedelta(hours=interval_hours)).isoformat(), + "message": "Scan scheduled. In production, use a task scheduler to run scans automatically." + } + +security_scan_service = SecurityScanService() + diff --git a/Backend/src/services/task_service.py b/Backend/src/services/task_service.py new file mode 100644 index 00000000..47676ccc --- /dev/null +++ b/Backend/src/services/task_service.py @@ -0,0 +1,338 @@ +from sqlalchemy.orm import Session, joinedload, selectinload +from sqlalchemy import and_, or_, func, desc, case +from typing import Optional, Dict, List, Any +from datetime import datetime, timedelta +from ..models.workflow import Task, TaskComment, TaskStatus, TaskPriority +from ..models.user import User +import logging + +logger = logging.getLogger(__name__) + +class TaskService: + """Task Management Service""" + + @staticmethod + def create_task( + db: Session, + title: str, + created_by: int, + task_type: str = 'general', + description: Optional[str] = None, + priority: TaskPriority = TaskPriority.medium, + workflow_instance_id: Optional[int] = None, + booking_id: Optional[int] = None, + room_id: Optional[int] = None, + assigned_to: Optional[int] = None, + due_date: Optional[datetime] = None, + estimated_duration_minutes: Optional[int] = None, + metadata: Optional[Dict[str, Any]] = None + ) -> Task: + """Create a new task""" + task = Task( + title=title, + description=description, + task_type=task_type, + priority=priority, + workflow_instance_id=workflow_instance_id, + booking_id=booking_id, + room_id=room_id, + assigned_to=assigned_to, + created_by=created_by, + due_date=due_date, + estimated_duration_minutes=estimated_duration_minutes, + meta_data=metadata or {}, + status=TaskStatus.pending if not assigned_to else TaskStatus.assigned + ) + db.add(task) + db.commit() + db.refresh(task) + return task + + @staticmethod + def get_tasks( + db: Session, + assigned_to: Optional[int] = None, + created_by: Optional[int] = None, + status: Optional[TaskStatus] = None, + priority: Optional[TaskPriority] = None, + task_type: Optional[str] = None, + booking_id: Optional[int] = None, + room_id: Optional[int] = None, + workflow_instance_id: Optional[int] = None, + overdue_only: bool = False, + skip: int = 0, + limit: int = 100 + ) -> List[Task]: + """Get tasks with optional filters""" + query = db.query(Task).options( + joinedload(Task.assignee).selectinload(User.role), + joinedload(Task.creator_user).selectinload(User.role) + ) + + if assigned_to: + query = query.filter(Task.assigned_to == assigned_to) + if created_by: + query = query.filter(Task.created_by == created_by) + if status: + query = query.filter(Task.status == status) + if priority: + query = query.filter(Task.priority == priority) + if task_type: + query = query.filter(Task.task_type == task_type) + if booking_id: + query = query.filter(Task.booking_id == booking_id) + if room_id: + query = query.filter(Task.room_id == room_id) + if workflow_instance_id: + query = query.filter(Task.workflow_instance_id == workflow_instance_id) + if overdue_only: + now = datetime.utcnow() + query = query.filter( + and_( + Task.due_date < now, + Task.status != TaskStatus.completed, + Task.status != TaskStatus.cancelled + ) + ) + + # MySQL doesn't support NULLS LAST, so we use CASE to handle NULLs + return query.order_by( + desc(Task.priority == TaskPriority.urgent), + desc(Task.priority == TaskPriority.high), + case((Task.due_date.is_(None), 1), else_=0), # NULLs last + Task.due_date.asc(), + Task.created_at.desc() + ).offset(skip).limit(limit).all() + + @staticmethod + def get_task_by_id(db: Session, task_id: int) -> Optional[Task]: + """Get task by ID""" + return db.query(Task).options( + joinedload(Task.assignee).selectinload(User.role), + joinedload(Task.creator_user).selectinload(User.role) + ).filter(Task.id == task_id).first() + + @staticmethod + def update_task( + db: Session, + task_id: int, + title: Optional[str] = None, + description: Optional[str] = None, + status: Optional[TaskStatus] = None, + priority: Optional[TaskPriority] = None, + assigned_to: Optional[int] = None, + due_date: Optional[datetime] = None, + notes: Optional[str] = None, + actual_duration_minutes: Optional[int] = None + ) -> Optional[Task]: + """Update task""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + return None + + if title is not None: + task.title = title + if description is not None: + task.description = description + if status is not None: + task.status = status + if status == TaskStatus.completed: + task.completed_at = datetime.utcnow() + if not task.actual_duration_minutes and task.created_at: + duration = (datetime.utcnow() - task.created_at).total_seconds() / 60 + task.actual_duration_minutes = int(duration) + if priority is not None: + task.priority = priority + if assigned_to is not None: + task.assigned_to = assigned_to + if assigned_to and task.status == TaskStatus.pending: + task.status = TaskStatus.assigned + if due_date is not None: + task.due_date = due_date + if notes is not None: + task.notes = notes + if actual_duration_minutes is not None: + task.actual_duration_minutes = actual_duration_minutes + + # Update status to overdue if past due date + if task.due_date and task.due_date < datetime.utcnow(): + if task.status not in [TaskStatus.completed, TaskStatus.cancelled]: + task.status = TaskStatus.overdue + + task.updated_at = datetime.utcnow() + db.commit() + db.refresh(task) + return task + + @staticmethod + def assign_task(db: Session, task_id: int, user_id: int) -> Optional[Task]: + """Assign task to a user""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + return None + + task.assigned_to = user_id + if task.status == TaskStatus.pending: + task.status = TaskStatus.assigned + task.updated_at = datetime.utcnow() + db.commit() + db.refresh(task) + return task + + @staticmethod + def start_task(db: Session, task_id: int) -> Optional[Task]: + """Mark task as in progress""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + return None + + task.status = TaskStatus.in_progress + task.updated_at = datetime.utcnow() + db.commit() + db.refresh(task) + return task + + @staticmethod + def complete_task(db: Session, task_id: int, notes: Optional[str] = None) -> Optional[Task]: + """Mark task as completed""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + return None + + task.status = TaskStatus.completed + task.completed_at = datetime.utcnow() + if notes: + task.notes = (task.notes or '') + f'\n\nCompleted: {notes}' + + # Calculate actual duration + if task.created_at: + duration = (datetime.utcnow() - task.created_at).total_seconds() / 60 + task.actual_duration_minutes = int(duration) + + task.updated_at = datetime.utcnow() + db.commit() + db.refresh(task) + + # Update workflow instance status if this task belongs to one + if task.workflow_instance_id: + from ..services.workflow_service import WorkflowService + WorkflowService.complete_workflow_instance(db, task.workflow_instance_id) + + return task + + @staticmethod + def cancel_task(db: Session, task_id: int, reason: Optional[str] = None) -> Optional[Task]: + """Cancel task""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + return None + + task.status = TaskStatus.cancelled + if reason: + task.notes = (task.notes or '') + f'\n\nCancelled: {reason}' + task.updated_at = datetime.utcnow() + db.commit() + db.refresh(task) + return task + + @staticmethod + def add_task_comment( + db: Session, + task_id: int, + user_id: int, + comment: str + ) -> TaskComment: + """Add comment to task""" + task_comment = TaskComment( + task_id=task_id, + user_id=user_id, + comment=comment + ) + db.add(task_comment) + db.commit() + db.refresh(task_comment) + return task_comment + + @staticmethod + def get_task_comments(db: Session, task_id: int) -> List[TaskComment]: + """Get all comments for a task""" + return db.query(TaskComment).options( + joinedload(TaskComment.user) + ).filter( + TaskComment.task_id == task_id + ).order_by(TaskComment.created_at.asc()).all() + + @staticmethod + def get_task_statistics( + db: Session, + assigned_to: Optional[int] = None, + start_date: Optional[datetime] = None, + end_date: Optional[datetime] = None + ) -> Dict[str, Any]: + """Get task statistics""" + query = db.query(Task) + + if assigned_to: + query = query.filter(Task.assigned_to == assigned_to) + if start_date: + query = query.filter(Task.created_at >= start_date) + if end_date: + query = query.filter(Task.created_at <= end_date) + + total = query.count() + pending = query.filter(Task.status == TaskStatus.pending).count() + assigned = query.filter(Task.status == TaskStatus.assigned).count() + in_progress = query.filter(Task.status == TaskStatus.in_progress).count() + completed = query.filter(Task.status == TaskStatus.completed).count() + overdue = query.filter(Task.status == TaskStatus.overdue).count() + cancelled = query.filter(Task.status == TaskStatus.cancelled).count() + + # Calculate average completion time + completed_tasks = query.filter(Task.status == TaskStatus.completed).all() + avg_completion_time = None + if completed_tasks: + total_duration = sum( + t.actual_duration_minutes or 0 + for t in completed_tasks + if t.actual_duration_minutes + ) + count_with_duration = sum(1 for t in completed_tasks if t.actual_duration_minutes) + if count_with_duration > 0: + avg_completion_time = total_duration / count_with_duration + + return { + 'total': total, + 'pending': pending, + 'assigned': assigned, + 'in_progress': in_progress, + 'completed': completed, + 'overdue': overdue, + 'cancelled': cancelled, + 'completion_rate': (completed / total * 100) if total > 0 else 0, + 'average_completion_time_minutes': avg_completion_time + } + + @staticmethod + def get_my_tasks(db: Session, user_id: int, status: Optional[TaskStatus] = None) -> List[Task]: + """Get tasks assigned to a user""" + query = db.query(Task).options( + joinedload(Task.assignee).selectinload(User.role), + joinedload(Task.creator_user).selectinload(User.role) + ).filter(Task.assigned_to == user_id) + + if status: + query = query.filter(Task.status == status) + else: + # Exclude completed and cancelled by default + query = query.filter( + Task.status.notin_([TaskStatus.completed, TaskStatus.cancelled]) + ) + + # MySQL doesn't support NULLS LAST, so we use CASE to handle NULLs + return query.order_by( + desc(Task.priority == TaskPriority.urgent), + desc(Task.priority == TaskPriority.high), + case((Task.due_date.is_(None), 1), else_=0), # NULLs last + Task.due_date.asc() + ).all() + diff --git a/Backend/src/services/workflow_service.py b/Backend/src/services/workflow_service.py new file mode 100644 index 00000000..e92b68e0 --- /dev/null +++ b/Backend/src/services/workflow_service.py @@ -0,0 +1,314 @@ +from sqlalchemy.orm import Session +from sqlalchemy import and_, or_, func +from typing import Optional, Dict, List, Any +from datetime import datetime, timedelta +from ..models.workflow import ( + Workflow, WorkflowInstance, Task, TaskComment, + WorkflowType, WorkflowStatus, WorkflowTrigger, TaskStatus, TaskPriority +) +from ..models.booking import Booking, BookingStatus +from ..models.room import Room +from ..models.user import User +import logging + +logger = logging.getLogger(__name__) + +class WorkflowService: + """Workflow Automation & Task Management Service""" + + @staticmethod + def create_workflow( + db: Session, + name: str, + workflow_type: WorkflowType, + trigger: WorkflowTrigger, + steps: List[Dict[str, Any]], + created_by: int, + description: Optional[str] = None, + trigger_config: Optional[Dict[str, Any]] = None, + sla_hours: Optional[int] = None + ) -> Workflow: + """Create a new workflow""" + workflow = Workflow( + name=name, + description=description, + workflow_type=workflow_type, + trigger=trigger, + trigger_config=trigger_config or {}, + steps=steps, + sla_hours=sla_hours, + created_by=created_by, + status=WorkflowStatus.active, + is_active=True + ) + db.add(workflow) + db.commit() + db.refresh(workflow) + return workflow + + @staticmethod + def get_workflows( + db: Session, + workflow_type: Optional[WorkflowType] = None, + status: Optional[WorkflowStatus] = None, + skip: int = 0, + limit: int = 100 + ) -> List[Workflow]: + """Get workflows with optional filters""" + query = db.query(Workflow) + + if workflow_type: + query = query.filter(Workflow.workflow_type == workflow_type) + if status: + query = query.filter(Workflow.status == status) + + return query.filter(Workflow.is_active == True).offset(skip).limit(limit).all() + + @staticmethod + def get_workflow_by_id(db: Session, workflow_id: int) -> Optional[Workflow]: + """Get workflow by ID""" + return db.query(Workflow).filter(Workflow.id == workflow_id).first() + + @staticmethod + def update_workflow( + db: Session, + workflow_id: int, + name: Optional[str] = None, + description: Optional[str] = None, + steps: Optional[List[Dict[str, Any]]] = None, + status: Optional[WorkflowStatus] = None, + trigger_config: Optional[Dict[str, Any]] = None, + sla_hours: Optional[int] = None + ) -> Optional[Workflow]: + """Update workflow""" + workflow = db.query(Workflow).filter(Workflow.id == workflow_id).first() + if not workflow: + return None + + if name is not None: + workflow.name = name + if description is not None: + workflow.description = description + if steps is not None: + workflow.steps = steps + if status is not None: + workflow.status = status + if trigger_config is not None: + workflow.trigger_config = trigger_config + if sla_hours is not None: + workflow.sla_hours = sla_hours + + workflow.updated_at = datetime.utcnow() + db.commit() + db.refresh(workflow) + return workflow + + @staticmethod + def delete_workflow(db: Session, workflow_id: int) -> bool: + """Soft delete workflow""" + workflow = db.query(Workflow).filter(Workflow.id == workflow_id).first() + if not workflow: + return False + + workflow.is_active = False + workflow.status = WorkflowStatus.archived + workflow.updated_at = datetime.utcnow() + db.commit() + return True + + @staticmethod + def trigger_workflow( + db: Session, + workflow_id: int, + booking_id: Optional[int] = None, + room_id: Optional[int] = None, + user_id: Optional[int] = None, + metadata: Optional[Dict[str, Any]] = None + ) -> Optional[WorkflowInstance]: + """Trigger a workflow and create an instance""" + workflow = db.query(Workflow).filter( + and_(Workflow.id == workflow_id, Workflow.is_active == True) + ).first() + + if not workflow or workflow.status != WorkflowStatus.active: + return None + + # Calculate due date based on SLA + due_date = None + if workflow.sla_hours: + due_date = datetime.utcnow() + timedelta(hours=workflow.sla_hours) + + # Create workflow instance + instance = WorkflowInstance( + workflow_id=workflow_id, + booking_id=booking_id, + room_id=room_id, + user_id=user_id, + status='pending', + due_date=due_date, + meta_data=metadata or {} + ) + db.add(instance) + db.commit() + db.refresh(instance) + + # Create tasks from workflow steps + WorkflowService._create_tasks_from_workflow(db, instance, workflow, created_by=workflow.created_by) + + return instance + + @staticmethod + def _create_tasks_from_workflow( + db: Session, + instance: WorkflowInstance, + workflow: Workflow, + created_by: int + ): + """Create tasks from workflow steps""" + for step in workflow.steps: + task = Task( + title=step.get('title', 'Untitled Task'), + description=step.get('description'), + task_type=step.get('task_type', 'general'), + priority=TaskPriority(step.get('priority', 'medium')), + workflow_instance_id=instance.id, + booking_id=instance.booking_id, + room_id=instance.room_id, + assigned_to=step.get('assigned_to'), + created_by=created_by, + due_date=WorkflowService._calculate_task_due_date(step, instance.due_date), + estimated_duration_minutes=step.get('estimated_duration_minutes'), + meta_data=step.get('metadata', {}) + ) + db.add(task) + + db.commit() + + @staticmethod + def _calculate_task_due_date(step: Dict[str, Any], workflow_due_date: Optional[datetime]) -> Optional[datetime]: + """Calculate task due date based on step configuration""" + if not workflow_due_date: + return None + + offset_hours = step.get('due_date_offset_hours', 0) + return workflow_due_date - timedelta(hours=offset_hours) + + @staticmethod + def get_workflow_instances( + db: Session, + workflow_id: Optional[int] = None, + booking_id: Optional[int] = None, + status: Optional[str] = None, + skip: int = 0, + limit: int = 100 + ) -> List[WorkflowInstance]: + """Get workflow instances with optional filters""" + query = db.query(WorkflowInstance) + + if workflow_id: + query = query.filter(WorkflowInstance.workflow_id == workflow_id) + if booking_id: + query = query.filter(WorkflowInstance.booking_id == booking_id) + if status: + query = query.filter(WorkflowInstance.status == status) + + return query.order_by(WorkflowInstance.created_at.desc()).offset(skip).limit(limit).all() + + @staticmethod + def complete_workflow_instance(db: Session, instance_id: int) -> Optional[WorkflowInstance]: + """Mark workflow instance as completed""" + instance = db.query(WorkflowInstance).filter(WorkflowInstance.id == instance_id).first() + if not instance: + return None + + # Check if all tasks are completed + incomplete_tasks = db.query(Task).filter( + and_( + Task.workflow_instance_id == instance_id, + Task.status != TaskStatus.completed, + Task.status != TaskStatus.cancelled + ) + ).count() + + if incomplete_tasks > 0: + instance.status = 'in_progress' + else: + instance.status = 'completed' + instance.completed_at = datetime.utcnow() + + instance.updated_at = datetime.utcnow() + db.commit() + db.refresh(instance) + return instance + + @staticmethod + def get_pre_arrival_workflows(db: Session) -> List[Workflow]: + """Get pre-arrival workflows""" + return db.query(Workflow).filter( + and_( + Workflow.workflow_type == WorkflowType.pre_arrival, + Workflow.status == WorkflowStatus.active, + Workflow.is_active == True + ) + ).all() + + @staticmethod + def get_room_preparation_workflows(db: Session) -> List[Workflow]: + """Get room preparation workflows""" + return db.query(Workflow).filter( + and_( + Workflow.workflow_type == WorkflowType.room_preparation, + Workflow.status == WorkflowStatus.active, + Workflow.is_active == True + ) + ).all() + + @staticmethod + def auto_trigger_workflows_for_booking(db: Session, booking: Booking): + """Automatically trigger workflows for a booking based on trigger conditions""" + # Get workflows that should be triggered for this booking + workflows = db.query(Workflow).filter( + and_( + Workflow.is_active == True, + Workflow.status == WorkflowStatus.active, + or_( + Workflow.trigger == WorkflowTrigger.booking_created, + Workflow.trigger == WorkflowTrigger.booking_confirmed + ) + ) + ).all() + + for workflow in workflows: + # Check trigger conditions + if WorkflowService._should_trigger_workflow(workflow, booking): + WorkflowService.trigger_workflow( + db=db, + workflow_id=workflow.id, + booking_id=booking.id, + room_id=booking.room_id, + user_id=booking.user_id, + meta_data={'booking_status': booking.status.value if hasattr(booking.status, 'value') else str(booking.status)} + ) + + @staticmethod + def _should_trigger_workflow(workflow: Workflow, booking: Booking) -> bool: + """Check if workflow should be triggered for a booking""" + trigger_config = workflow.trigger_config or {} + + # Check booking status filter + if 'booking_status' in trigger_config: + required_status = trigger_config['booking_status'] + booking_status = booking.status.value if hasattr(booking.status, 'value') else str(booking.status) + if booking_status != required_status: + return False + + # Check time-based triggers (e.g., X hours before check-in) + if 'hours_before_checkin' in trigger_config: + hours_before = trigger_config['hours_before_checkin'] + if booking.check_in_date: + trigger_time = booking.check_in_date - timedelta(hours=hours_before) + if datetime.utcnow() < trigger_time: + return False + + return True + diff --git a/Backend/src/tasks/security_scan_task.py b/Backend/src/tasks/security_scan_task.py new file mode 100644 index 00000000..e8abaeac --- /dev/null +++ b/Backend/src/tasks/security_scan_task.py @@ -0,0 +1,66 @@ +""" +Automated Security Scan Task +This module can be integrated with task schedulers like Celery or APScheduler +to run security scans automatically at scheduled intervals. +""" + +from datetime import datetime +import logging +from ..services.security_scan_service import security_scan_service +from ..config.database import get_db +from ..config.logging_config import get_logger + +logger = get_logger(__name__) + +def run_scheduled_security_scan(): + """ + Task function to run scheduled security scans. + This can be called by: + - Celery periodic tasks + - APScheduler + - Cron jobs + - Systemd timers + """ + try: + logger.info("Starting scheduled security scan") + db_gen = get_db() + db = next(db_gen) + + try: + results = security_scan_service.run_full_scan(db=db) + logger.info( + f"Security scan completed: {results['total_issues']} issues found " + f"({results['critical_issues']} critical, {results['high_issues']} high)" + ) + return results + finally: + db.close() + except Exception as e: + logger.error(f"Error running scheduled security scan: {str(e)}", exc_info=True) + raise + +# Example Celery task (uncomment if using Celery): +# from celery import shared_task +# +# @shared_task +# def scheduled_security_scan_task(): +# """Celery task for scheduled security scans""" +# return run_scheduled_security_scan() + +# Example APScheduler job (uncomment if using APScheduler): +# from apscheduler.schedulers.background import BackgroundScheduler +# +# def setup_scheduled_scans(): +# """Setup scheduled security scans using APScheduler""" +# scheduler = BackgroundScheduler() +# scheduler.add_job( +# run_scheduled_security_scan, +# 'interval', +# hours=24, # Run every 24 hours +# id='security_scan', +# name='Automated Security Scan', +# replace_existing=True +# ) +# scheduler.start() +# return scheduler + diff --git a/Backend/src/utils/__pycache__/currency_helpers.cpython-312.pyc b/Backend/src/utils/__pycache__/currency_helpers.cpython-312.pyc new file mode 100644 index 00000000..ca704c5c Binary files /dev/null and b/Backend/src/utils/__pycache__/currency_helpers.cpython-312.pyc differ diff --git a/Backend/src/utils/__pycache__/response_helpers.cpython-312.pyc b/Backend/src/utils/__pycache__/response_helpers.cpython-312.pyc new file mode 100644 index 00000000..be9648ee Binary files /dev/null and b/Backend/src/utils/__pycache__/response_helpers.cpython-312.pyc differ diff --git a/Backend/src/utils/__pycache__/role_helpers.cpython-312.pyc b/Backend/src/utils/__pycache__/role_helpers.cpython-312.pyc new file mode 100644 index 00000000..d80bb7cf Binary files /dev/null and b/Backend/src/utils/__pycache__/role_helpers.cpython-312.pyc differ diff --git a/Backend/src/utils/currency_helpers.py b/Backend/src/utils/currency_helpers.py new file mode 100644 index 00000000..8aa3e2c7 --- /dev/null +++ b/Backend/src/utils/currency_helpers.py @@ -0,0 +1,24 @@ +""" +Utility functions for currency handling +""" +CURRENCY_SYMBOLS = { + 'USD': '$', + 'EUR': '€', + 'GBP': '£', + 'JPY': '¥', + 'CNY': '¥', + 'KRW': '₩', + 'SGD': 'S$', + 'THB': '฿', + 'AUD': 'A$', + 'CAD': 'C$', + 'VND': '₫', + 'INR': '₹', + 'CHF': 'CHF', + 'NZD': 'NZ$' +} + +def get_currency_symbol(currency: str) -> str: + """Get currency symbol for a given currency code""" + return CURRENCY_SYMBOLS.get(currency.upper(), currency) + diff --git a/Backend/src/utils/response_helpers.py b/Backend/src/utils/response_helpers.py new file mode 100644 index 00000000..b7729c1f --- /dev/null +++ b/Backend/src/utils/response_helpers.py @@ -0,0 +1,51 @@ +""" +Utility functions for standardizing API responses +""" +from typing import Any, Dict, Optional + +def success_response( + data: Any = None, + message: Optional[str] = None, + **kwargs +) -> Dict[str, Any]: + """ + Create a standardized success response. + Returns both 'success' (boolean) and 'status' (string) for backward compatibility. + """ + response: Dict[str, Any] = { + 'success': True, + 'status': 'success' + } + + if data is not None: + response['data'] = data + + if message: + response['message'] = message + + # Add any additional fields + response.update(kwargs) + + return response + +def error_response( + message: str, + errors: Optional[list] = None, + **kwargs +) -> Dict[str, Any]: + """ + Create a standardized error response. + """ + response: Dict[str, Any] = { + 'success': False, + 'status': 'error', + 'message': message + } + + if errors: + response['errors'] = errors + + response.update(kwargs) + + return response + diff --git a/Backend/src/utils/role_helpers.py b/Backend/src/utils/role_helpers.py new file mode 100644 index 00000000..ab8a8677 --- /dev/null +++ b/Backend/src/utils/role_helpers.py @@ -0,0 +1,47 @@ +""" +Utility functions for role-based access control +""" +from sqlalchemy.orm import Session +from ..models.user import User +from ..models.role import Role + +def get_user_role_name(user: User, db: Session) -> str: + """Get the role name for a user""" + role = db.query(Role).filter(Role.id == user.role_id).first() + return role.name if role else 'customer' + +def is_admin(user: User, db: Session) -> bool: + """Check if user is admin""" + return get_user_role_name(user, db) == 'admin' + +def is_staff(user: User, db: Session) -> bool: + """Check if user is staff""" + return get_user_role_name(user, db) == 'staff' + +def is_accountant(user: User, db: Session) -> bool: + """Check if user is accountant""" + return get_user_role_name(user, db) == 'accountant' + +def is_customer(user: User, db: Session) -> bool: + """Check if user is customer""" + return get_user_role_name(user, db) == 'customer' + +def can_access_all_payments(user: User, db: Session) -> bool: + """Check if user can see all payments (admin or accountant)""" + role_name = get_user_role_name(user, db) + return role_name in ['admin', 'accountant'] + +def can_access_all_invoices(user: User, db: Session) -> bool: + """Check if user can see all invoices (admin or accountant)""" + role_name = get_user_role_name(user, db) + return role_name in ['admin', 'accountant'] + +def can_create_invoices(user: User, db: Session) -> bool: + """Check if user can create invoices (admin, staff, or accountant)""" + role_name = get_user_role_name(user, db) + return role_name in ['admin', 'staff', 'accountant'] + +def can_manage_users(user: User, db: Session) -> bool: + """Check if user can manage users (admin only)""" + return is_admin(user, db) + diff --git a/Backend/venv/bin/coverage b/Backend/venv/bin/coverage new file mode 100755 index 00000000..413244ed --- /dev/null +++ b/Backend/venv/bin/coverage @@ -0,0 +1,7 @@ +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python +import sys +from coverage.cmdline import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/Backend/venv/bin/coverage-3.12 b/Backend/venv/bin/coverage-3.12 new file mode 100755 index 00000000..413244ed --- /dev/null +++ b/Backend/venv/bin/coverage-3.12 @@ -0,0 +1,7 @@ +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python +import sys +from coverage.cmdline import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/Backend/venv/bin/coverage3 b/Backend/venv/bin/coverage3 new file mode 100755 index 00000000..413244ed --- /dev/null +++ b/Backend/venv/bin/coverage3 @@ -0,0 +1,7 @@ +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python +import sys +from coverage.cmdline import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/Backend/venv/bin/httpx b/Backend/venv/bin/httpx new file mode 100755 index 00000000..06a9c9ab --- /dev/null +++ b/Backend/venv/bin/httpx @@ -0,0 +1,7 @@ +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python +import sys +from httpx import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/Backend/venv/bin/pip b/Backend/venv/bin/pip index 97cb4193..b3de0772 100755 --- a/Backend/venv/bin/pip +++ b/Backend/venv/bin/pip @@ -1,8 +1,7 @@ -#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3 -# -*- coding: utf-8 -*- -import re +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python import sys from pip._internal.cli.main import main if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] sys.exit(main()) diff --git a/Backend/venv/bin/pip3 b/Backend/venv/bin/pip3 index 97cb4193..b3de0772 100755 --- a/Backend/venv/bin/pip3 +++ b/Backend/venv/bin/pip3 @@ -1,8 +1,7 @@ -#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3 -# -*- coding: utf-8 -*- -import re +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python import sys from pip._internal.cli.main import main if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] sys.exit(main()) diff --git a/Backend/venv/bin/pip3.12 b/Backend/venv/bin/pip3.12 index 97cb4193..b3de0772 100755 --- a/Backend/venv/bin/pip3.12 +++ b/Backend/venv/bin/pip3.12 @@ -1,8 +1,7 @@ -#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3 -# -*- coding: utf-8 -*- -import re +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python import sys from pip._internal.cli.main import main if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] sys.exit(main()) diff --git a/Backend/venv/bin/py.test b/Backend/venv/bin/py.test new file mode 100755 index 00000000..765d415d --- /dev/null +++ b/Backend/venv/bin/py.test @@ -0,0 +1,7 @@ +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python +import sys +from pytest import console_main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(console_main()) diff --git a/Backend/venv/bin/pygmentize b/Backend/venv/bin/pygmentize new file mode 100755 index 00000000..2c00d8d3 --- /dev/null +++ b/Backend/venv/bin/pygmentize @@ -0,0 +1,7 @@ +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python +import sys +from pygments.cmdline import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/Backend/venv/bin/pytest b/Backend/venv/bin/pytest new file mode 100755 index 00000000..765d415d --- /dev/null +++ b/Backend/venv/bin/pytest @@ -0,0 +1,7 @@ +#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python +import sys +from pytest import console_main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(console_main()) diff --git a/Backend/venv/lib/python3.12/site-packages/__pycache__/py.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/__pycache__/py.cpython-312.pyc new file mode 100644 index 00000000..ae542517 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/__pycache__/py.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc index c808b902..3a31168a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__init__.py b/Backend/venv/lib/python3.12/site-packages/_pytest/__init__.py new file mode 100644 index 00000000..8eb8ec96 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/__init__.py @@ -0,0 +1,13 @@ +from __future__ import annotations + + +__all__ = ["__version__", "version_tuple"] + +try: + from ._version import version as __version__ + from ._version import version_tuple +except ImportError: # pragma: no cover + # broken installation, we don't even try + # unknown only works because we do poor mans version compare + __version__ = "unknown" + version_tuple = (0, 0, "unknown") diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..e37f5a70 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/_argcomplete.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/_argcomplete.cpython-312.pyc new file mode 100644 index 00000000..7337f601 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/_argcomplete.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/_version.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/_version.cpython-312.pyc new file mode 100644 index 00000000..3452eb32 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/_version.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/cacheprovider.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/cacheprovider.cpython-312.pyc new file mode 100644 index 00000000..d4a1bf7d Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/cacheprovider.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/capture.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/capture.cpython-312.pyc new file mode 100644 index 00000000..94c58033 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/capture.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/compat.cpython-312.pyc new file mode 100644 index 00000000..6b40de4e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/debugging.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/debugging.cpython-312.pyc new file mode 100644 index 00000000..05ce468e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/debugging.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/deprecated.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/deprecated.cpython-312.pyc new file mode 100644 index 00000000..55a1546a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/deprecated.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/doctest.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/doctest.cpython-312.pyc new file mode 100644 index 00000000..e3ed9232 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/doctest.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/faulthandler.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/faulthandler.cpython-312.pyc new file mode 100644 index 00000000..0d7002a4 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/faulthandler.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/fixtures.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/fixtures.cpython-312.pyc new file mode 100644 index 00000000..5fb01e7a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/fixtures.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/freeze_support.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/freeze_support.cpython-312.pyc new file mode 100644 index 00000000..87691453 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/freeze_support.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/helpconfig.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/helpconfig.cpython-312.pyc new file mode 100644 index 00000000..d111e7c5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/helpconfig.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/hookspec.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/hookspec.cpython-312.pyc new file mode 100644 index 00000000..b2c858e5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/hookspec.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/junitxml.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/junitxml.cpython-312.pyc new file mode 100644 index 00000000..bd85960a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/junitxml.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/legacypath.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/legacypath.cpython-312.pyc new file mode 100644 index 00000000..deaa00d0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/legacypath.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/logging.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/logging.cpython-312.pyc new file mode 100644 index 00000000..5a5da84a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/logging.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/main.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/main.cpython-312.pyc new file mode 100644 index 00000000..64e96750 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/main.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/monkeypatch.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/monkeypatch.cpython-312.pyc new file mode 100644 index 00000000..afbd2e80 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/monkeypatch.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/nodes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/nodes.cpython-312.pyc new file mode 100644 index 00000000..4bdc87b7 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/nodes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/outcomes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/outcomes.cpython-312.pyc new file mode 100644 index 00000000..4fa11505 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/outcomes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pastebin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pastebin.cpython-312.pyc new file mode 100644 index 00000000..c077406a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pastebin.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pathlib.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pathlib.cpython-312.pyc new file mode 100644 index 00000000..5ac6da06 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pathlib.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pytester.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pytester.cpython-312.pyc new file mode 100644 index 00000000..e04dacc6 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pytester.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pytester_assertions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pytester_assertions.cpython-312.pyc new file mode 100644 index 00000000..f6ab9608 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/pytester_assertions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/python.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/python.cpython-312.pyc new file mode 100644 index 00000000..22dc1540 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/python.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/python_api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/python_api.cpython-312.pyc new file mode 100644 index 00000000..35261a2c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/python_api.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/raises.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/raises.cpython-312.pyc new file mode 100644 index 00000000..c9803da8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/raises.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/recwarn.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/recwarn.cpython-312.pyc new file mode 100644 index 00000000..89cb558c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/recwarn.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/reports.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/reports.cpython-312.pyc new file mode 100644 index 00000000..05370881 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/reports.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/runner.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/runner.cpython-312.pyc new file mode 100644 index 00000000..427d0a32 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/runner.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/scope.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/scope.cpython-312.pyc new file mode 100644 index 00000000..fc09430e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/scope.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/setuponly.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/setuponly.cpython-312.pyc new file mode 100644 index 00000000..a0433ac8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/setuponly.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/setupplan.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/setupplan.cpython-312.pyc new file mode 100644 index 00000000..a1fef745 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/setupplan.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/skipping.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/skipping.cpython-312.pyc new file mode 100644 index 00000000..d157298c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/skipping.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/stash.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/stash.cpython-312.pyc new file mode 100644 index 00000000..de452b59 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/stash.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/stepwise.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/stepwise.cpython-312.pyc new file mode 100644 index 00000000..70f54d19 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/stepwise.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/subtests.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/subtests.cpython-312.pyc new file mode 100644 index 00000000..64830e0b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/subtests.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/terminal.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/terminal.cpython-312.pyc new file mode 100644 index 00000000..f9137a10 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/terminal.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/threadexception.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/threadexception.cpython-312.pyc new file mode 100644 index 00000000..c8bbade5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/threadexception.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/timing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/timing.cpython-312.pyc new file mode 100644 index 00000000..7c5b7c75 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/timing.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/tmpdir.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/tmpdir.cpython-312.pyc new file mode 100644 index 00000000..f81eefec Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/tmpdir.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/tracemalloc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/tracemalloc.cpython-312.pyc new file mode 100644 index 00000000..f2fb41a4 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/tracemalloc.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/unittest.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/unittest.cpython-312.pyc new file mode 100644 index 00000000..a1a64018 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/unittest.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/unraisableexception.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/unraisableexception.cpython-312.pyc new file mode 100644 index 00000000..6f78525f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/unraisableexception.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/warning_types.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/warning_types.cpython-312.pyc new file mode 100644 index 00000000..9494b614 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/warning_types.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/warnings.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/warnings.cpython-312.pyc new file mode 100644 index 00000000..559ae5d1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/__pycache__/warnings.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_argcomplete.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_argcomplete.py new file mode 100644 index 00000000..59426ef9 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_argcomplete.py @@ -0,0 +1,117 @@ +"""Allow bash-completion for argparse with argcomplete if installed. + +Needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail +to find the magic string, so _ARGCOMPLETE env. var is never set, and +this does not need special code). + +Function try_argcomplete(parser) should be called directly before +the call to ArgumentParser.parse_args(). + +The filescompleter is what you normally would use on the positional +arguments specification, in order to get "dirname/" after "dirn" +instead of the default "dirname ": + + optparser.add_argument(Config._file_or_dir, nargs='*').completer=filescompleter + +Other, application specific, completers should go in the file +doing the add_argument calls as they need to be specified as .completer +attributes as well. (If argcomplete is not installed, the function the +attribute points to will not be used). + +SPEEDUP +======= + +The generic argcomplete script for bash-completion +(/etc/bash_completion.d/python-argcomplete.sh) +uses a python program to determine startup script generated by pip. +You can speed up completion somewhat by changing this script to include + # PYTHON_ARGCOMPLETE_OK +so the python-argcomplete-check-easy-install-script does not +need to be called to find the entry point of the code and see if that is +marked with PYTHON_ARGCOMPLETE_OK. + +INSTALL/DEBUGGING +================= + +To include this support in another application that has setup.py generated +scripts: + +- Add the line: + # PYTHON_ARGCOMPLETE_OK + near the top of the main python entry point. + +- Include in the file calling parse_args(): + from _argcomplete import try_argcomplete, filescompleter + Call try_argcomplete just before parse_args(), and optionally add + filescompleter to the positional arguments' add_argument(). + +If things do not work right away: + +- Switch on argcomplete debugging with (also helpful when doing custom + completers): + export _ARC_DEBUG=1 + +- Run: + python-argcomplete-check-easy-install-script $(which appname) + echo $? + will echo 0 if the magic line has been found, 1 if not. + +- Sometimes it helps to find early on errors using: + _ARGCOMPLETE=1 _ARC_DEBUG=1 appname + which should throw a KeyError: 'COMPLINE' (which is properly set by the + global argcomplete script). +""" + +from __future__ import annotations + +import argparse +from glob import glob +import os +import sys +from typing import Any + + +class FastFilesCompleter: + """Fast file completer class.""" + + def __init__(self, directories: bool = True) -> None: + self.directories = directories + + def __call__(self, prefix: str, **kwargs: Any) -> list[str]: + # Only called on non option completions. + if os.sep in prefix[1:]: + prefix_dir = len(os.path.dirname(prefix) + os.sep) + else: + prefix_dir = 0 + completion = [] + globbed = [] + if "*" not in prefix and "?" not in prefix: + # We are on unix, otherwise no bash. + if not prefix or prefix[-1] == os.sep: + globbed.extend(glob(prefix + ".*")) + prefix += "*" + globbed.extend(glob(prefix)) + for x in sorted(globbed): + if os.path.isdir(x): + x += "/" + # Append stripping the prefix (like bash, not like compgen). + completion.append(x[prefix_dir:]) + return completion + + +if os.environ.get("_ARGCOMPLETE"): + try: + import argcomplete.completers + except ImportError: + sys.exit(-1) + filescompleter: FastFilesCompleter | None = FastFilesCompleter() + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + argcomplete.autocomplete(parser, always_complete_options=False) + +else: + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + pass + + filescompleter = None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__init__.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__init__.py new file mode 100644 index 00000000..7f67a2e3 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__init__.py @@ -0,0 +1,26 @@ +"""Python inspection/code generation API.""" + +from __future__ import annotations + +from .code import Code +from .code import ExceptionInfo +from .code import filter_traceback +from .code import Frame +from .code import getfslineno +from .code import Traceback +from .code import TracebackEntry +from .source import getrawcode +from .source import Source + + +__all__ = [ + "Code", + "ExceptionInfo", + "Frame", + "Source", + "Traceback", + "TracebackEntry", + "filter_traceback", + "getfslineno", + "getrawcode", +] diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..4cdfb645 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/code.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/code.cpython-312.pyc new file mode 100644 index 00000000..a33216c6 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/code.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/source.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/source.cpython-312.pyc new file mode 100644 index 00000000..4596af08 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/__pycache__/source.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_code/code.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/code.py new file mode 100644 index 00000000..add2a493 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/code.py @@ -0,0 +1,1565 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import ast +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import inspect +from inspect import CO_VARARGS +from inspect import CO_VARKEYWORDS +from io import StringIO +import os +from pathlib import Path +import re +import sys +from traceback import extract_tb +from traceback import format_exception +from traceback import format_exception_only +from traceback import FrameSummary +from types import CodeType +from types import FrameType +from types import TracebackType +from typing import Any +from typing import ClassVar +from typing import Final +from typing import final +from typing import Generic +from typing import Literal +from typing import overload +from typing import SupportsIndex +from typing import TypeAlias +from typing import TypeVar + +import pluggy + +import _pytest +from _pytest._code.source import findsource +from _pytest._code.source import getrawcode +from _pytest._code.source import getstatementrange_ast +from _pytest._code.source import Source +from _pytest._io import TerminalWriter +from _pytest._io.saferepr import safeformat +from _pytest._io.saferepr import saferepr +from _pytest.compat import get_real_func +from _pytest.deprecated import check_ispytest +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + +TracebackStyle = Literal["long", "short", "line", "no", "native", "value", "auto"] + +EXCEPTION_OR_MORE = type[BaseException] | tuple[type[BaseException], ...] + + +class Code: + """Wrapper around Python code objects.""" + + __slots__ = ("raw",) + + def __init__(self, obj: CodeType) -> None: + self.raw = obj + + @classmethod + def from_function(cls, obj: object) -> Code: + return cls(getrawcode(obj)) + + def __eq__(self, other): + return self.raw == other.raw + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @property + def firstlineno(self) -> int: + return self.raw.co_firstlineno - 1 + + @property + def name(self) -> str: + return self.raw.co_name + + @property + def path(self) -> Path | str: + """Return a path object pointing to source code, or an ``str`` in + case of ``OSError`` / non-existing file.""" + if not self.raw.co_filename: + return "" + try: + p = absolutepath(self.raw.co_filename) + # maybe don't try this checking + if not p.exists(): + raise OSError("path check failed.") + return p + except OSError: + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + return self.raw.co_filename + + @property + def fullsource(self) -> Source | None: + """Return a _pytest._code.Source object for the full source file of the code.""" + full, _ = findsource(self.raw) + return full + + def source(self) -> Source: + """Return a _pytest._code.Source object for the code object's source only.""" + # return source only for that part of code + return Source(self.raw) + + def getargs(self, var: bool = False) -> tuple[str, ...]: + """Return a tuple with the argument names for the code object. + + If 'var' is set True also return the names of the variable and + keyword arguments when present. + """ + # Handy shortcut for getting args. + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + + +class Frame: + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + __slots__ = ("raw",) + + def __init__(self, frame: FrameType) -> None: + self.raw = frame + + @property + def lineno(self) -> int: + return self.raw.f_lineno - 1 + + @property + def f_globals(self) -> dict[str, Any]: + return self.raw.f_globals + + @property + def f_locals(self) -> dict[str, Any]: + return self.raw.f_locals + + @property + def code(self) -> Code: + return Code(self.raw.f_code) + + @property + def statement(self) -> Source: + """Statement this frame is at.""" + if self.code.fullsource is None: + return Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """Evaluate 'code' in the frame. + + 'vars' are optional additional local variables. + + Returns the result of the evaluation. + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def repr(self, object: object) -> str: + """Return a 'safe' (non-recursive, one-line) string repr for 'object'.""" + return saferepr(object) + + def getargs(self, var: bool = False): + """Return a list of tuples (name, value) for all arguments. + + If 'var' is set True, also include the variable and keyword arguments + when present. + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + + +class TracebackEntry: + """A single entry in a Traceback.""" + + __slots__ = ("_rawentry", "_repr_style") + + def __init__( + self, + rawentry: TracebackType, + repr_style: Literal["short", "long"] | None = None, + ) -> None: + self._rawentry: Final = rawentry + self._repr_style: Final = repr_style + + def with_repr_style( + self, repr_style: Literal["short", "long"] | None + ) -> TracebackEntry: + return TracebackEntry(self._rawentry, repr_style) + + @property + def lineno(self) -> int: + return self._rawentry.tb_lineno - 1 + + def get_python_framesummary(self) -> FrameSummary: + # Python's built-in traceback module implements all the nitty gritty + # details to get column numbers of out frames. + stack_summary = extract_tb(self._rawentry, limit=1) + return stack_summary[0] + + # Column and end line numbers introduced in python 3.11 + if sys.version_info < (3, 11): + + @property + def end_lineno_relative(self) -> int | None: + return None + + @property + def colno(self) -> int | None: + return None + + @property + def end_colno(self) -> int | None: + return None + else: + + @property + def end_lineno_relative(self) -> int | None: + frame_summary = self.get_python_framesummary() + if frame_summary.end_lineno is None: # pragma: no cover + return None + return frame_summary.end_lineno - 1 - self.frame.code.firstlineno + + @property + def colno(self) -> int | None: + """Starting byte offset of the expression in the traceback entry.""" + return self.get_python_framesummary().colno + + @property + def end_colno(self) -> int | None: + """Ending byte offset of the expression in the traceback entry.""" + return self.get_python_framesummary().end_colno + + @property + def frame(self) -> Frame: + return Frame(self._rawentry.tb_frame) + + @property + def relline(self) -> int: + return self.lineno - self.frame.code.firstlineno + + def __repr__(self) -> str: + return f"" + + @property + def statement(self) -> Source: + """_pytest._code.Source object for the current statement.""" + source = self.frame.code.fullsource + assert source is not None + return source.getstatement(self.lineno) + + @property + def path(self) -> Path | str: + """Path to the source code.""" + return self.frame.code.path + + @property + def locals(self) -> dict[str, Any]: + """Locals of underlying frame.""" + return self.frame.f_locals + + def getfirstlinesource(self) -> int: + return self.frame.code.firstlineno + + def getsource( + self, astcache: dict[str | Path, ast.AST] | None = None + ) -> Source | None: + """Return failing source code.""" + # we use the passed in astcache to not reparse asttrees + # within exception info printing + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast( + self.lineno, source, astnode=astnode + ) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None and astcache is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self, excinfo: ExceptionInfo[BaseException] | None) -> bool: + """Return True if the current frame has a var __tracebackhide__ + resolving to True. + + If __tracebackhide__ is a callable, it gets called with the + ExceptionInfo instance and can decide whether to hide the traceback. + + Mostly for internal use. + """ + tbh: bool | Callable[[ExceptionInfo[BaseException] | None], bool] = False + for maybe_ns_dct in (self.frame.f_locals, self.frame.f_globals): + # in normal cases, f_locals and f_globals are dictionaries + # however via `exec(...)` / `eval(...)` they can be other types + # (even incorrect types!). + # as such, we suppress all exceptions while accessing __tracebackhide__ + try: + tbh = maybe_ns_dct["__tracebackhide__"] + except Exception: + pass + else: + break + if tbh and callable(tbh): + return tbh(excinfo) + return tbh + + def __str__(self) -> str: + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except BaseException: + line = "???" + # This output does not quite match Python's repr for traceback entries, + # but changing it to do so would break certain plugins. See + # https://github.com/pytest-dev/pytest/pull/7535/ for details. + return f" File '{self.path}':{self.lineno + 1} in {name}\n {line}\n" + + @property + def name(self) -> str: + """co_name of underlying code.""" + return self.frame.code.raw.co_name + + +class Traceback(list[TracebackEntry]): + """Traceback objects encapsulate and offer higher level access to Traceback entries.""" + + def __init__( + self, + tb: TracebackType | Iterable[TracebackEntry], + ) -> None: + """Initialize from given python traceback object and ExceptionInfo.""" + if isinstance(tb, TracebackType): + + def f(cur: TracebackType) -> Iterable[TracebackEntry]: + cur_: TracebackType | None = cur + while cur_ is not None: + yield TracebackEntry(cur_) + cur_ = cur_.tb_next + + super().__init__(f(tb)) + else: + super().__init__(tb) + + def cut( + self, + path: os.PathLike[str] | str | None = None, + lineno: int | None = None, + firstlineno: int | None = None, + excludepath: os.PathLike[str] | None = None, + ) -> Traceback: + """Return a Traceback instance wrapping part of this Traceback. + + By providing any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined. + + This allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback). + """ + path_ = None if path is None else os.fspath(path) + excludepath_ = None if excludepath is None else os.fspath(excludepath) + for x in self: + code = x.frame.code + codepath = code.path + if path is not None and str(codepath) != path_: + continue + if ( + excludepath is not None + and isinstance(codepath, Path) + and excludepath_ in (str(p) for p in codepath.parents) # type: ignore[operator] + ): + continue + if lineno is not None and x.lineno != lineno: + continue + if firstlineno is not None and x.frame.code.firstlineno != firstlineno: + continue + return Traceback(x._rawentry) + return self + + @overload + def __getitem__(self, key: SupportsIndex) -> TracebackEntry: ... + + @overload + def __getitem__(self, key: slice) -> Traceback: ... + + def __getitem__(self, key: SupportsIndex | slice) -> TracebackEntry | Traceback: + if isinstance(key, slice): + return self.__class__(super().__getitem__(key)) + else: + return super().__getitem__(key) + + def filter( + self, + excinfo_or_fn: ExceptionInfo[BaseException] | Callable[[TracebackEntry], bool], + /, + ) -> Traceback: + """Return a Traceback instance with certain items removed. + + If the filter is an `ExceptionInfo`, removes all the ``TracebackEntry``s + which are hidden (see ishidden() above). + + Otherwise, the filter is a function that gets a single argument, a + ``TracebackEntry`` instance, and should return True when the item should + be added to the ``Traceback``, False when not. + """ + if isinstance(excinfo_or_fn, ExceptionInfo): + fn = lambda x: not x.ishidden(excinfo_or_fn) # noqa: E731 + else: + fn = excinfo_or_fn + return Traceback(filter(fn, self)) + + def recursionindex(self) -> int | None: + """Return the index of the frame/TracebackEntry where recursion originates if + appropriate, None if no recursion occurred.""" + cache: dict[tuple[Any, int, int], list[dict[str, Any]]] = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + # XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + values = cache.setdefault(key, []) + # Since Python 3.13 f_locals is a proxy, freeze it. + loc = dict(entry.frame.f_locals) + if values: + for otherloc in values: + if otherloc == loc: + return i + values.append(loc) + return None + + +def stringify_exception( + exc: BaseException, include_subexception_msg: bool = True +) -> str: + try: + notes = getattr(exc, "__notes__", []) + except KeyError: + # Workaround for https://github.com/python/cpython/issues/98778 on + # some 3.10 and 3.11 patch versions. + HTTPError = getattr(sys.modules.get("urllib.error", None), "HTTPError", ()) + if sys.version_info < (3, 12) and isinstance(exc, HTTPError): + notes = [] + else: # pragma: no cover + # exception not related to above bug, reraise + raise + if not include_subexception_msg and isinstance(exc, BaseExceptionGroup): + message = exc.message + else: + message = str(exc) + + return "\n".join( + [ + message, + *notes, + ] + ) + + +E = TypeVar("E", bound=BaseException, covariant=True) + + +@final +@dataclasses.dataclass +class ExceptionInfo(Generic[E]): + """Wraps sys.exc_info() objects and offers help for navigating the traceback.""" + + _assert_start_repr: ClassVar = "AssertionError('assert " + + _excinfo: tuple[type[E], E, TracebackType] | None + _striptext: str + _traceback: Traceback | None + + def __init__( + self, + excinfo: tuple[type[E], E, TracebackType] | None, + striptext: str = "", + traceback: Traceback | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._excinfo = excinfo + self._striptext = striptext + self._traceback = traceback + + @classmethod + def from_exception( + cls, + # Ignoring error: "Cannot use a covariant type variable as a parameter". + # This is OK to ignore because this class is (conceptually) readonly. + # See https://github.com/python/mypy/issues/7049. + exception: E, # type: ignore[misc] + exprinfo: str | None = None, + ) -> ExceptionInfo[E]: + """Return an ExceptionInfo for an existing exception. + + The exception must have a non-``None`` ``__traceback__`` attribute, + otherwise this function fails with an assertion error. This means that + the exception must have been raised, or added a traceback with the + :py:meth:`~BaseException.with_traceback()` method. + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + + .. versionadded:: 7.4 + """ + assert exception.__traceback__, ( + "Exceptions passed to ExcInfo.from_exception(...)" + " must have a non-None __traceback__." + ) + exc_info = (type(exception), exception, exception.__traceback__) + return cls.from_exc_info(exc_info, exprinfo) + + @classmethod + def from_exc_info( + cls, + exc_info: tuple[type[E], E, TracebackType], + exprinfo: str | None = None, + ) -> ExceptionInfo[E]: + """Like :func:`from_exception`, but using old-style exc_info tuple.""" + _striptext = "" + if exprinfo is None and isinstance(exc_info[1], AssertionError): + exprinfo = getattr(exc_info[1], "msg", None) + if exprinfo is None: + exprinfo = saferepr(exc_info[1]) + if exprinfo and exprinfo.startswith(cls._assert_start_repr): + _striptext = "AssertionError: " + + return cls(exc_info, _striptext, _ispytest=True) + + @classmethod + def from_current(cls, exprinfo: str | None = None) -> ExceptionInfo[BaseException]: + """Return an ExceptionInfo matching the current traceback. + + .. warning:: + + Experimental API + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + """ + tup = sys.exc_info() + assert tup[0] is not None, "no current exception" + assert tup[1] is not None, "no current exception" + assert tup[2] is not None, "no current exception" + exc_info = (tup[0], tup[1], tup[2]) + return ExceptionInfo.from_exc_info(exc_info, exprinfo) + + @classmethod + def for_later(cls) -> ExceptionInfo[E]: + """Return an unfilled ExceptionInfo.""" + return cls(None, _ispytest=True) + + def fill_unfilled(self, exc_info: tuple[type[E], E, TracebackType]) -> None: + """Fill an unfilled ExceptionInfo created with ``for_later()``.""" + assert self._excinfo is None, "ExceptionInfo was already filled" + self._excinfo = exc_info + + @property + def type(self) -> type[E]: + """The exception class.""" + assert self._excinfo is not None, ( + ".type can only be used after the context manager exits" + ) + return self._excinfo[0] + + @property + def value(self) -> E: + """The exception value.""" + assert self._excinfo is not None, ( + ".value can only be used after the context manager exits" + ) + return self._excinfo[1] + + @property + def tb(self) -> TracebackType: + """The exception raw traceback.""" + assert self._excinfo is not None, ( + ".tb can only be used after the context manager exits" + ) + return self._excinfo[2] + + @property + def typename(self) -> str: + """The type name of the exception.""" + assert self._excinfo is not None, ( + ".typename can only be used after the context manager exits" + ) + return self.type.__name__ + + @property + def traceback(self) -> Traceback: + """The traceback.""" + if self._traceback is None: + self._traceback = Traceback(self.tb) + return self._traceback + + @traceback.setter + def traceback(self, value: Traceback) -> None: + self._traceback = value + + def __repr__(self) -> str: + if self._excinfo is None: + return "" + return f"<{self.__class__.__name__} {saferepr(self._excinfo[1])} tblen={len(self.traceback)}>" + + def exconly(self, tryshort: bool = False) -> str: + """Return the exception as a string. + + When 'tryshort' resolves to True, and the exception is an + AssertionError, only the actual exception part of the exception + representation is returned (so 'AssertionError: ' is removed from + the beginning). + """ + + def _get_single_subexc( + eg: BaseExceptionGroup[BaseException], + ) -> BaseException | None: + if len(eg.exceptions) != 1: + return None + if isinstance(e := eg.exceptions[0], BaseExceptionGroup): + return _get_single_subexc(e) + return e + + if ( + tryshort + and isinstance(self.value, BaseExceptionGroup) + and (subexc := _get_single_subexc(self.value)) is not None + ): + return f"{subexc!r} [single exception in {type(self.value).__name__}]" + + lines = format_exception_only(self.type, self.value) + text = "".join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext) :] + return text + + def errisinstance(self, exc: EXCEPTION_OR_MORE) -> bool: + """Return True if the exception is an instance of exc. + + Consider using ``isinstance(excinfo.value, exc)`` instead. + """ + return isinstance(self.value, exc) + + def _getreprcrash(self) -> ReprFileLocation | None: + # Find last non-hidden traceback entry that led to the exception of the + # traceback, or None if all hidden. + for i in range(-1, -len(self.traceback) - 1, -1): + entry = self.traceback[i] + if not entry.ishidden(self): + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + exconly = self.exconly(tryshort=True) + return ReprFileLocation(path, lineno + 1, exconly) + return None + + def getrepr( + self, + showlocals: bool = False, + style: TracebackStyle = "long", + abspath: bool = False, + tbfilter: bool | Callable[[ExceptionInfo[BaseException]], Traceback] = True, + funcargs: bool = False, + truncate_locals: bool = True, + truncate_args: bool = True, + chain: bool = True, + ) -> ReprExceptionInfo | ExceptionChainRepr: + """Return str()able representation of this exception info. + + :param bool showlocals: + Show locals per traceback entry. + Ignored if ``style=="native"``. + + :param str style: + long|short|line|no|native|value traceback style. + + :param bool abspath: + If paths should be changed to absolute or left unchanged. + + :param tbfilter: + A filter for traceback entries. + + * If false, don't hide any entries. + * If true, hide internal entries and entries that contain a local + variable ``__tracebackhide__ = True``. + * If a callable, delegates the filtering to the callable. + + Ignored if ``style`` is ``"native"``. + + :param bool funcargs: + Show fixtures ("funcargs" for legacy purposes) per traceback entry. + + :param bool truncate_locals: + With ``showlocals==True``, make sure locals can be safely represented as strings. + + :param bool truncate_args: + With ``showargs==True``, make sure args can be safely represented as strings. + + :param bool chain: + If chained exceptions in Python 3 should be shown. + + .. versionchanged:: 3.9 + + Added the ``chain`` parameter. + """ + if style == "native": + return ReprExceptionInfo( + reprtraceback=ReprTracebackNative( + format_exception( + self.type, + self.value, + self.traceback[0]._rawentry if self.traceback else None, + ) + ), + reprcrash=self._getreprcrash(), + ) + + fmt = FormattedExcinfo( + showlocals=showlocals, + style=style, + abspath=abspath, + tbfilter=tbfilter, + funcargs=funcargs, + truncate_locals=truncate_locals, + truncate_args=truncate_args, + chain=chain, + ) + return fmt.repr_excinfo(self) + + def match(self, regexp: str | re.Pattern[str]) -> Literal[True]: + """Check whether the regular expression `regexp` matches the string + representation of the exception using :func:`python:re.search`. + + If it matches `True` is returned, otherwise an `AssertionError` is raised. + """ + __tracebackhide__ = True + value = stringify_exception(self.value) + msg = ( + f"Regex pattern did not match.\n" + f" Expected regex: {regexp!r}\n" + f" Actual message: {value!r}" + ) + if regexp == value: + msg += "\n Did you mean to `re.escape()` the regex?" + assert re.search(regexp, value), msg + # Return True to allow for "assert excinfo.match()". + return True + + def _group_contains( + self, + exc_group: BaseExceptionGroup[BaseException], + expected_exception: EXCEPTION_OR_MORE, + match: str | re.Pattern[str] | None, + target_depth: int | None = None, + current_depth: int = 1, + ) -> bool: + """Return `True` if a `BaseExceptionGroup` contains a matching exception.""" + if (target_depth is not None) and (current_depth > target_depth): + # already descended past the target depth + return False + for exc in exc_group.exceptions: + if isinstance(exc, BaseExceptionGroup): + if self._group_contains( + exc, expected_exception, match, target_depth, current_depth + 1 + ): + return True + if (target_depth is not None) and (current_depth != target_depth): + # not at the target depth, no match + continue + if not isinstance(exc, expected_exception): + continue + if match is not None: + value = stringify_exception(exc) + if not re.search(match, value): + continue + return True + return False + + def group_contains( + self, + expected_exception: EXCEPTION_OR_MORE, + *, + match: str | re.Pattern[str] | None = None, + depth: int | None = None, + ) -> bool: + """Check whether a captured exception group contains a matching exception. + + :param Type[BaseException] | Tuple[Type[BaseException]] expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. + + :param str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception and its `PEP-678 ` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + :param Optional[int] depth: + If `None`, will search for a matching exception at any nesting depth. + If >= 1, will only match an exception if it's at the specified depth (depth = 1 being + the exceptions contained within the topmost exception group). + + .. versionadded:: 8.0 + + .. warning:: + This helper makes it easy to check for the presence of specific exceptions, + but it is very bad for checking that the group does *not* contain + *any other exceptions*. + You should instead consider using :class:`pytest.RaisesGroup` + + """ + msg = "Captured exception is not an instance of `BaseExceptionGroup`" + assert isinstance(self.value, BaseExceptionGroup), msg + msg = "`depth` must be >= 1 if specified" + assert (depth is None) or (depth >= 1), msg + return self._group_contains(self.value, expected_exception, match, depth) + + +# Type alias for the `tbfilter` setting: +# bool: If True, it should be filtered using Traceback.filter() +# callable: A callable that takes an ExceptionInfo and returns the filtered traceback. +TracebackFilter: TypeAlias = bool | Callable[[ExceptionInfo[BaseException]], Traceback] + + +@dataclasses.dataclass +class FormattedExcinfo: + """Presenting information about failing Functions and Generators.""" + + # for traceback entries + flow_marker: ClassVar = ">" + fail_marker: ClassVar = "E" + + showlocals: bool = False + style: TracebackStyle = "long" + abspath: bool = True + tbfilter: TracebackFilter = True + funcargs: bool = False + truncate_locals: bool = True + truncate_args: bool = True + chain: bool = True + astcache: dict[str | Path, ast.AST] = dataclasses.field( + default_factory=dict, init=False, repr=False + ) + + def _getindent(self, source: Source) -> int: + # Figure out indent for the given source. + try: + s = str(source.getstatement(len(source) - 1)) + except KeyboardInterrupt: + raise + except BaseException: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except BaseException: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry: TracebackEntry) -> Source | None: + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def repr_args(self, entry: TracebackEntry) -> ReprFuncArgs | None: + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + if self.truncate_args: + str_repr = saferepr(argvalue) + else: + str_repr = saferepr(argvalue, maxsize=None) + args.append((argname, str_repr)) + return ReprFuncArgs(args) + return None + + def get_source( + self, + source: Source | None, + line_index: int = -1, + excinfo: ExceptionInfo[BaseException] | None = None, + short: bool = False, + end_line_index: int | None = None, + colno: int | None = None, + end_colno: int | None = None, + ) -> list[str]: + """Return formatted and marked up source lines.""" + lines = [] + if source is not None and line_index < 0: + line_index += len(source) + if source is None or line_index >= len(source.lines) or line_index < 0: + # `line_index` could still be outside `range(len(source.lines))` if + # we're processing AST with pathological position attributes. + source = Source("???") + line_index = 0 + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + lines.extend( + self.get_highlight_arrows_for_line( + raw_line=source.raw_lines[line_index], + line=source.lines[line_index].strip(), + lineno=line_index, + end_lineno=end_line_index, + colno=colno, + end_colno=end_colno, + ) + ) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + lines.extend( + self.get_highlight_arrows_for_line( + raw_line=source.raw_lines[line_index], + line=source.lines[line_index], + lineno=line_index, + end_lineno=end_line_index, + colno=colno, + end_colno=end_colno, + ) + ) + for line in source.lines[line_index + 1 :]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_highlight_arrows_for_line( + self, + line: str, + raw_line: str, + lineno: int | None, + end_lineno: int | None, + colno: int | None, + end_colno: int | None, + ) -> list[str]: + """Return characters highlighting a source line. + + Example with colno and end_colno pointing to the bar expression: + "foo() + bar()" + returns " ^^^^^" + """ + if lineno != end_lineno: + # Don't handle expressions that span multiple lines. + return [] + if colno is None or end_colno is None: + # Can't do anything without column information. + return [] + + num_stripped_chars = len(raw_line) - len(line) + + start_char_offset = _byte_offset_to_character_offset(raw_line, colno) + end_char_offset = _byte_offset_to_character_offset(raw_line, end_colno) + num_carets = end_char_offset - start_char_offset + # If the highlight would span the whole line, it is redundant, don't + # show it. + if num_carets >= len(line.strip()): + return [] + + highlights = " " + highlights += " " * (start_char_offset - num_stripped_chars + 1) + highlights += "^" * num_carets + return [highlights] + + def get_exconly( + self, + excinfo: ExceptionInfo[BaseException], + indent: int = 4, + markall: bool = False, + ) -> list[str]: + lines = [] + indentstr = " " * indent + # Get the real exception information out. + exlines = excinfo.exconly(tryshort=True).split("\n") + failindent = self.fail_marker + indentstr[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indentstr + return lines + + def repr_locals(self, locals: Mapping[str, object]) -> ReprLocals | None: + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == "__builtins__": + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + if self.truncate_locals: + str_repr = saferepr(value) + else: + str_repr = safeformat(value) + # if len(str_repr) < 70 or not isinstance(value, (list, tuple, dict)): + lines.append(f"{name:<10} = {str_repr}") + # else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + return None + + def repr_traceback_entry( + self, + entry: TracebackEntry | None, + excinfo: ExceptionInfo[BaseException] | None = None, + ) -> ReprEntry: + lines: list[str] = [] + style = ( + entry._repr_style + if entry is not None and entry._repr_style is not None + else self.style + ) + if style in ("short", "long") and entry is not None: + source = self._getentrysource(entry) + if source is None: + source = Source("???") + line_index = 0 + end_line_index, colno, end_colno = None, None, None + else: + line_index = entry.relline + end_line_index = entry.end_lineno_relative + colno = entry.colno + end_colno = entry.end_colno + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source( + source=source, + line_index=line_index, + excinfo=excinfo, + short=short, + end_line_index=end_line_index, + colno=colno, + end_colno=end_colno, + ) + lines.extend(s) + if short: + message = f"in {entry.name}" + else: + message = (excinfo and excinfo.typename) or "" + entry_path = entry.path + path = self._makepath(entry_path) + reprfileloc = ReprFileLocation(path, entry.lineno + 1, message) + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, reprfileloc, style) + elif style == "value": + if excinfo: + lines.extend(str(excinfo.value).split("\n")) + return ReprEntry(lines, None, None, None, style) + else: + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path: Path | str) -> str: + if not self.abspath and isinstance(path, Path): + try: + np = bestrelpath(Path.cwd(), path) + except OSError: + return str(path) + if len(np) < len(str(path)): + return np + return str(path) + + def repr_traceback(self, excinfo: ExceptionInfo[BaseException]) -> ReprTraceback: + traceback = filter_excinfo_traceback(self.tbfilter, excinfo) + + if isinstance(excinfo.value, RecursionError): + traceback, extraline = self._truncate_recursive_traceback(traceback) + else: + extraline = None + + if not traceback: + if extraline is None: + extraline = "All traceback entries are hidden. Pass `--full-trace` to see hidden and internal frames." + entries = [self.repr_traceback_entry(None, excinfo)] + return ReprTraceback(entries, extraline, style=self.style) + + last = traceback[-1] + if self.style == "value": + entries = [self.repr_traceback_entry(last, excinfo)] + return ReprTraceback(entries, None, style=self.style) + + entries = [ + self.repr_traceback_entry(entry, excinfo if last == entry else None) + for entry in traceback + ] + return ReprTraceback(entries, extraline, style=self.style) + + def _truncate_recursive_traceback( + self, traceback: Traceback + ) -> tuple[Traceback, str | None]: + """Truncate the given recursive traceback trying to find the starting + point of the recursion. + + The detection is done by going through each traceback entry and + finding the point in which the locals of the frame are equal to the + locals of a previous frame (see ``recursionindex()``). + + Handle the situation where the recursion process might raise an + exception (for example comparing numpy arrays using equality raises a + TypeError), in which case we do our best to warn the user of the + error and show a limited traceback. + """ + try: + recursionindex = traceback.recursionindex() + except Exception as e: + max_frames = 10 + extraline: str | None = ( + "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n" + " The following exception happened when comparing locals in the stack frame:\n" + f" {type(e).__name__}: {e!s}\n" + f" Displaying first and last {max_frames} stack frames out of {len(traceback)}." + ) + # Type ignored because adding two instances of a List subtype + # currently incorrectly has type List instead of the subtype. + traceback = traceback[:max_frames] + traceback[-max_frames:] # type: ignore + else: + if recursionindex is not None: + extraline = "!!! Recursion detected (same locals & position)" + traceback = traceback[: recursionindex + 1] + else: + extraline = None + + return traceback, extraline + + def repr_excinfo(self, excinfo: ExceptionInfo[BaseException]) -> ExceptionChainRepr: + repr_chain: list[tuple[ReprTraceback, ReprFileLocation | None, str | None]] = [] + e: BaseException | None = excinfo.value + excinfo_: ExceptionInfo[BaseException] | None = excinfo + descr = None + seen: set[int] = set() + while e is not None and id(e) not in seen: + seen.add(id(e)) + + if excinfo_: + # Fall back to native traceback as a temporary workaround until + # full support for exception groups added to ExceptionInfo. + # See https://github.com/pytest-dev/pytest/issues/9159 + reprtraceback: ReprTraceback | ReprTracebackNative + if isinstance(e, BaseExceptionGroup): + # don't filter any sub-exceptions since they shouldn't have any internal frames + traceback = filter_excinfo_traceback(self.tbfilter, excinfo) + reprtraceback = ReprTracebackNative( + format_exception( + type(excinfo.value), + excinfo.value, + traceback[0]._rawentry, + ) + ) + else: + reprtraceback = self.repr_traceback(excinfo_) + reprcrash = excinfo_._getreprcrash() + else: + # Fallback to native repr if the exception doesn't have a traceback: + # ExceptionInfo objects require a full traceback to work. + reprtraceback = ReprTracebackNative(format_exception(type(e), e, None)) + reprcrash = None + repr_chain += [(reprtraceback, reprcrash, descr)] + + if e.__cause__ is not None and self.chain: + e = e.__cause__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "The above exception was the direct cause of the following exception:" + elif ( + e.__context__ is not None and not e.__suppress_context__ and self.chain + ): + e = e.__context__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "During handling of the above exception, another exception occurred:" + else: + e = None + repr_chain.reverse() + return ExceptionChainRepr(repr_chain) + + +@dataclasses.dataclass(eq=False) +class TerminalRepr: + def __str__(self) -> str: + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = StringIO() + tw = TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self) -> str: + return f"<{self.__class__} instance at {id(self):0x}>" + + def toterminal(self, tw: TerminalWriter) -> None: + raise NotImplementedError() + + +# This class is abstract -- only subclasses are instantiated. +@dataclasses.dataclass(eq=False) +class ExceptionRepr(TerminalRepr): + # Provided by subclasses. + reprtraceback: ReprTraceback + reprcrash: ReprFileLocation | None + sections: list[tuple[str, str, str]] = dataclasses.field( + init=False, default_factory=list + ) + + def addsection(self, name: str, content: str, sep: str = "-") -> None: + self.sections.append((name, content, sep)) + + def toterminal(self, tw: TerminalWriter) -> None: + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + + +@dataclasses.dataclass(eq=False) +class ExceptionChainRepr(ExceptionRepr): + chain: Sequence[tuple[ReprTraceback, ReprFileLocation | None, str | None]] + + def __init__( + self, + chain: Sequence[tuple[ReprTraceback, ReprFileLocation | None, str | None]], + ) -> None: + # reprcrash and reprtraceback of the outermost (the newest) exception + # in the chain. + super().__init__( + reprtraceback=chain[-1][0], + reprcrash=chain[-1][1], + ) + self.chain = chain + + def toterminal(self, tw: TerminalWriter) -> None: + for element in self.chain: + element[0].toterminal(tw) + if element[2] is not None: + tw.line("") + tw.line(element[2], yellow=True) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprExceptionInfo(ExceptionRepr): + reprtraceback: ReprTraceback + reprcrash: ReprFileLocation | None + + def toterminal(self, tw: TerminalWriter) -> None: + self.reprtraceback.toterminal(tw) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprTraceback(TerminalRepr): + reprentries: Sequence[ReprEntry | ReprEntryNative] + extraline: str | None + style: TracebackStyle + + entrysep: ClassVar = "_ " + + def toterminal(self, tw: TerminalWriter) -> None: + # The entries might have different styles. + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i + 1] + if entry.style == "long" or ( + entry.style == "short" and next_entry.style == "long" + ): + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines: Sequence[str]) -> None: + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + self.style = "native" + + +@dataclasses.dataclass(eq=False) +class ReprEntryNative(TerminalRepr): + lines: Sequence[str] + + style: ClassVar[TracebackStyle] = "native" + + def toterminal(self, tw: TerminalWriter) -> None: + tw.write("".join(self.lines)) + + +@dataclasses.dataclass(eq=False) +class ReprEntry(TerminalRepr): + lines: Sequence[str] + reprfuncargs: ReprFuncArgs | None + reprlocals: ReprLocals | None + reprfileloc: ReprFileLocation | None + style: TracebackStyle + + def _write_entry_lines(self, tw: TerminalWriter) -> None: + """Write the source code portions of a list of traceback entries with syntax highlighting. + + Usually entries are lines like these: + + " x = 1" + "> assert x == 2" + "E assert 1 == 2" + + This function takes care of rendering the "source" portions of it (the lines without + the "E" prefix) using syntax highlighting, taking care to not highlighting the ">" + character, as doing so might break line continuations. + """ + if not self.lines: + return + + if self.style == "value": + # Using tw.write instead of tw.line for testing purposes due to TWMock implementation; + # lines written with TWMock.line and TWMock._write_source cannot be distinguished + # from each other, whereas lines written with TWMock.write are marked with TWMock.WRITE + for line in self.lines: + tw.write(line) + tw.write("\n") + return + + # separate indents and source lines that are not failures: we want to + # highlight the code but not the indentation, which may contain markers + # such as "> assert 0" + fail_marker = f"{FormattedExcinfo.fail_marker} " + indent_size = len(fail_marker) + indents: list[str] = [] + source_lines: list[str] = [] + failure_lines: list[str] = [] + for index, line in enumerate(self.lines): + is_failure_line = line.startswith(fail_marker) + if is_failure_line: + # from this point on all lines are considered part of the failure + failure_lines.extend(self.lines[index:]) + break + else: + indents.append(line[:indent_size]) + source_lines.append(line[indent_size:]) + + tw._write_source(source_lines, indents) + + # failure lines are always completely red and bold + for line in failure_lines: + tw.line(line, bold=True, red=True) + + def toterminal(self, tw: TerminalWriter) -> None: + if self.style == "short": + if self.reprfileloc: + self.reprfileloc.toterminal(tw) + self._write_entry_lines(tw) + if self.reprlocals: + self.reprlocals.toterminal(tw, indent=" " * 8) + return + + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + + self._write_entry_lines(tw) + + if self.reprlocals: + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self) -> str: + return "{}\n{}\n{}".format( + "\n".join(self.lines), self.reprlocals, self.reprfileloc + ) + + +@dataclasses.dataclass(eq=False) +class ReprFileLocation(TerminalRepr): + path: str + lineno: int + message: str + + def __post_init__(self) -> None: + self.path = str(self.path) + + def toterminal(self, tw: TerminalWriter) -> None: + # Filename and lineno output for each entry, using an output format + # that most editors understand. + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.write(self.path, bold=True, red=True) + tw.line(f":{self.lineno}: {msg}") + + +@dataclasses.dataclass(eq=False) +class ReprLocals(TerminalRepr): + lines: Sequence[str] + + def toterminal(self, tw: TerminalWriter, indent="") -> None: + for line in self.lines: + tw.line(indent + line) + + +@dataclasses.dataclass(eq=False) +class ReprFuncArgs(TerminalRepr): + args: Sequence[tuple[str, object]] + + def toterminal(self, tw: TerminalWriter) -> None: + if self.args: + linesofar = "" + for name, value in self.args: + ns = f"{name} = {value}" + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + +def getfslineno(obj: object) -> tuple[str | Path, int]: + """Return source location (path, lineno) for the given object. + + If the source cannot be determined return ("", -1). + + The line number is 0-based. + """ + # xxx let decorators etc specify a sane ordering + # NOTE: this used to be done in _pytest.compat.getfslineno, initially added + # in 6ec13a2b9. It ("place_as") appears to be something very custom. + obj = get_real_func(obj) + if hasattr(obj, "place_as"): + obj = obj.place_as + + try: + code = Code.from_function(obj) + except TypeError: + try: + fn = inspect.getsourcefile(obj) or inspect.getfile(obj) # type: ignore[arg-type] + except TypeError: + return "", -1 + + fspath = (fn and absolutepath(fn)) or "" + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except OSError: + pass + return fspath, lineno + + return code.path, code.firstlineno + + +def _byte_offset_to_character_offset(str, offset): + """Converts a byte based offset in a string to a code-point.""" + as_utf8 = str.encode("utf-8") + return len(as_utf8[:offset].decode("utf-8", errors="replace")) + + +# Relative paths that we use to filter traceback entries from appearing to the user; +# see filter_traceback. +# note: if we need to add more paths than what we have now we should probably use a list +# for better maintenance. + +_PLUGGY_DIR = Path(pluggy.__file__.rstrip("oc")) +# pluggy is either a package or a single module depending on the version +if _PLUGGY_DIR.name == "__init__.py": + _PLUGGY_DIR = _PLUGGY_DIR.parent +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def filter_traceback(entry: TracebackEntry) -> bool: + """Return True if a TracebackEntry instance should be included in tracebacks. + + We hide traceback entries of: + + * dynamically generated code (no code to show up for it); + * internal traceback from pytest or its internal libraries, py and pluggy. + """ + # entry.path might sometimes return a str object when the entry + # points to dynamically generated code. + # See https://bitbucket.org/pytest-dev/py/issues/71. + raw_filename = entry.frame.code.raw.co_filename + is_generated = "<" in raw_filename and ">" in raw_filename + if is_generated: + return False + + # entry.path might point to a non-existing file, in which case it will + # also return a str object. See #1133. + p = Path(entry.path) + + parents = p.parents + if _PLUGGY_DIR in parents: + return False + if _PYTEST_DIR in parents: + return False + + return True + + +def filter_excinfo_traceback( + tbfilter: TracebackFilter, excinfo: ExceptionInfo[BaseException] +) -> Traceback: + """Filter the exception traceback in ``excinfo`` according to ``tbfilter``.""" + if callable(tbfilter): + return tbfilter(excinfo) + elif tbfilter: + return excinfo.traceback.filter(excinfo) + else: + return excinfo.traceback diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_code/source.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/source.py new file mode 100644 index 00000000..99c242dd --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_code/source.py @@ -0,0 +1,225 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import ast +from bisect import bisect_right +from collections.abc import Iterable +from collections.abc import Iterator +import inspect +import textwrap +import tokenize +import types +from typing import overload +import warnings + + +class Source: + """An immutable object holding a source code fragment. + + When using Source(...), the source lines are deindented. + """ + + def __init__(self, obj: object = None) -> None: + if not obj: + self.lines: list[str] = [] + self.raw_lines: list[str] = [] + elif isinstance(obj, Source): + self.lines = obj.lines + self.raw_lines = obj.raw_lines + elif isinstance(obj, tuple | list): + self.lines = deindent(x.rstrip("\n") for x in obj) + self.raw_lines = list(x.rstrip("\n") for x in obj) + elif isinstance(obj, str): + self.lines = deindent(obj.split("\n")) + self.raw_lines = obj.split("\n") + else: + try: + rawcode = getrawcode(obj) + src = inspect.getsource(rawcode) + except TypeError: + src = inspect.getsource(obj) # type: ignore[arg-type] + self.lines = deindent(src.split("\n")) + self.raw_lines = src.split("\n") + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Source): + return NotImplemented + return self.lines == other.lines + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @overload + def __getitem__(self, key: int) -> str: ... + + @overload + def __getitem__(self, key: slice) -> Source: ... + + def __getitem__(self, key: int | slice) -> str | Source: + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + newsource = Source() + newsource.lines = self.lines[key.start : key.stop] + newsource.raw_lines = self.raw_lines[key.start : key.stop] + return newsource + + def __iter__(self) -> Iterator[str]: + return iter(self.lines) + + def __len__(self) -> int: + return len(self.lines) + + def strip(self) -> Source: + """Return new Source object with trailing and leading blank lines removed.""" + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end - 1].strip(): + end -= 1 + source = Source() + source.raw_lines = self.raw_lines + source.lines[:] = self.lines[start:end] + return source + + def indent(self, indent: str = " " * 4) -> Source: + """Return a copy of the source object with all lines indented by the + given indent-string.""" + newsource = Source() + newsource.raw_lines = self.raw_lines + newsource.lines = [(indent + line) for line in self.lines] + return newsource + + def getstatement(self, lineno: int) -> Source: + """Return Source statement which contains the given linenumber + (counted from 0).""" + start, end = self.getstatementrange(lineno) + return self[start:end] + + def getstatementrange(self, lineno: int) -> tuple[int, int]: + """Return (start, end) tuple which spans the minimal statement region + which containing the given lineno.""" + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + _ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self) -> Source: + """Return a new Source object deindented.""" + newsource = Source() + newsource.lines[:] = deindent(self.lines) + newsource.raw_lines = self.raw_lines + return newsource + + def __str__(self) -> str: + return "\n".join(self.lines) + + +# +# helper functions +# + + +def findsource(obj) -> tuple[Source | None, int]: + try: + sourcelines, lineno = inspect.findsource(obj) + except Exception: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + source.raw_lines = sourcelines + return source, lineno + + +def getrawcode(obj: object, trycall: bool = True) -> types.CodeType: + """Return code object for given function.""" + try: + return obj.__code__ # type: ignore[attr-defined,no-any-return] + except AttributeError: + pass + if trycall: + call = getattr(obj, "__call__", None) + if call and not isinstance(obj, type): + return getrawcode(call, trycall=False) + raise TypeError(f"could not get code object for {obj!r}") + + +def deindent(lines: Iterable[str]) -> list[str]: + return textwrap.dedent("\n".join(lines)).splitlines() + + +def get_statement_startend2(lineno: int, node: ast.AST) -> tuple[int, int | None]: + # Flatten all statements and except handlers into one lineno-list. + # AST's line numbers start indexing at 1. + values: list[int] = [] + for x in ast.walk(node): + if isinstance(x, ast.stmt | ast.ExceptHandler): + # The lineno points to the class/def, so need to include the decorators. + if isinstance(x, ast.ClassDef | ast.FunctionDef | ast.AsyncFunctionDef): + for d in x.decorator_list: + values.append(d.lineno - 1) + values.append(x.lineno - 1) + for name in ("finalbody", "orelse"): + val: list[ast.stmt] | None = getattr(x, name, None) + if val: + # Treat the finally/orelse part as its own statement. + values.append(val[0].lineno - 1 - 1) + values.sort() + insert_index = bisect_right(values, lineno) + start = values[insert_index - 1] + if insert_index >= len(values): + end = None + else: + end = values[insert_index] + return start, end + + +def getstatementrange_ast( + lineno: int, + source: Source, + assertion: bool = False, + astnode: ast.AST | None = None, +) -> tuple[ast.AST, int, int]: + if astnode is None: + content = str(source) + # See #4260: + # Don't produce duplicate warnings when compiling source to find AST. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + astnode = ast.parse(content, "source", "exec") + + start, end = get_statement_startend2(lineno, astnode) + # We need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # Make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself. + block_finder = inspect.BlockFinder() + # If we start with an indented line, put blockfinder to "started" mode. + block_finder.started = ( + bool(source.lines[start]) and source.lines[start][0].isspace() + ) + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # The end might still point to a comment or empty line, correct it. + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__init__.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__init__.py new file mode 100644 index 00000000..b0155b18 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__init__.py @@ -0,0 +1,10 @@ +from __future__ import annotations + +from .terminalwriter import get_terminal_width +from .terminalwriter import TerminalWriter + + +__all__ = [ + "TerminalWriter", + "get_terminal_width", +] diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..3850e74d Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/pprint.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/pprint.cpython-312.pyc new file mode 100644 index 00000000..13eca886 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/pprint.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/saferepr.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/saferepr.cpython-312.pyc new file mode 100644 index 00000000..a9be7392 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/saferepr.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-312.pyc new file mode 100644 index 00000000..09110697 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-312.pyc new file mode 100644 index 00000000..0983b666 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/pprint.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/pprint.py new file mode 100644 index 00000000..28f06909 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/pprint.py @@ -0,0 +1,673 @@ +# mypy: allow-untyped-defs +# This module was imported from the cpython standard library +# (https://github.com/python/cpython/) at commit +# c5140945c723ae6c4b7ee81ff720ac8ea4b52cfd (python3.12). +# +# +# Original Author: Fred L. Drake, Jr. +# fdrake@acm.org +# +# This is a simple little module I wrote to make life easier. I didn't +# see anything quite like it in the library, though I may have overlooked +# something. I wrote this when I was trying to read some heavily nested +# tuples with fairly non-descriptive content. This is modeled very much +# after Lisp/Scheme - style pretty-printing of lists. If you find it +# useful, thank small children who sleep at night. +from __future__ import annotations + +import collections as _collections +from collections.abc import Callable +from collections.abc import Iterator +import dataclasses as _dataclasses +from io import StringIO as _StringIO +import re +import types as _types +from typing import Any +from typing import IO + + +class _safe_key: + """Helper function for key functions when sorting unorderable objects. + + The wrapped-object will fallback to a Py2.x style comparison for + unorderable types (sorting first comparing the type name and then by + the obj ids). Does not work recursively, so dict.items() must have + _safe_key applied to both the key and the value. + + """ + + __slots__ = ["obj"] + + def __init__(self, obj): + self.obj = obj + + def __lt__(self, other): + try: + return self.obj < other.obj + except TypeError: + return (str(type(self.obj)), id(self.obj)) < ( + str(type(other.obj)), + id(other.obj), + ) + + +def _safe_tuple(t): + """Helper function for comparing 2-tuples""" + return _safe_key(t[0]), _safe_key(t[1]) + + +class PrettyPrinter: + def __init__( + self, + indent: int = 4, + width: int = 80, + depth: int | None = None, + ) -> None: + """Handle pretty printing operations onto a stream using a set of + configured parameters. + + indent + Number of spaces to indent for each level of nesting. + + width + Attempted maximum number of columns in the output. + + depth + The maximum depth to print out nested structures. + + """ + if indent < 0: + raise ValueError("indent must be >= 0") + if depth is not None and depth <= 0: + raise ValueError("depth must be > 0") + if not width: + raise ValueError("width must be != 0") + self._depth = depth + self._indent_per_level = indent + self._width = width + + def pformat(self, object: Any) -> str: + sio = _StringIO() + self._format(object, sio, 0, 0, set(), 0) + return sio.getvalue() + + def _format( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + objid = id(object) + if objid in context: + stream.write(_recursion(object)) + return + + p = self._dispatch.get(type(object).__repr__, None) + if p is not None: + context.add(objid) + p(self, object, stream, indent, allowance, context, level + 1) + context.remove(objid) + elif ( + _dataclasses.is_dataclass(object) + and not isinstance(object, type) + and object.__dataclass_params__.repr # type:ignore[attr-defined] + and + # Check dataclass has generated repr method. + hasattr(object.__repr__, "__wrapped__") + and "__create_fn__" in object.__repr__.__wrapped__.__qualname__ + ): + context.add(objid) + self._pprint_dataclass( + object, stream, indent, allowance, context, level + 1 + ) + context.remove(objid) + else: + stream.write(self._repr(object, context, level)) + + def _pprint_dataclass( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + cls_name = object.__class__.__name__ + items = [ + (f.name, getattr(object, f.name)) + for f in _dataclasses.fields(object) + if f.repr + ] + stream.write(cls_name + "(") + self._format_namespace_items(items, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch: dict[ + Callable[..., str], + Callable[[PrettyPrinter, Any, IO[str], int, int, set[int], int], None], + ] = {} + + def _pprint_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + write("{") + items = sorted(object.items(), key=_safe_tuple) + self._format_dict_items(items, stream, indent, allowance, context, level) + write("}") + + _dispatch[dict.__repr__] = _pprint_dict + + def _pprint_ordered_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object): + stream.write(repr(object)) + return + cls = object.__class__ + stream.write(cls.__name__ + "(") + self._pprint_dict(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.OrderedDict.__repr__] = _pprint_ordered_dict + + def _pprint_list( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("[") + self._format_items(object, stream, indent, allowance, context, level) + stream.write("]") + + _dispatch[list.__repr__] = _pprint_list + + def _pprint_tuple( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("(") + self._format_items(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[tuple.__repr__] = _pprint_tuple + + def _pprint_set( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object): + stream.write(repr(object)) + return + typ = object.__class__ + if typ is set: + stream.write("{") + endchar = "}" + else: + stream.write(typ.__name__ + "({") + endchar = "})" + object = sorted(object, key=_safe_key) + self._format_items(object, stream, indent, allowance, context, level) + stream.write(endchar) + + _dispatch[set.__repr__] = _pprint_set + _dispatch[frozenset.__repr__] = _pprint_set + + def _pprint_str( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + if not len(object): + write(repr(object)) + return + chunks = [] + lines = object.splitlines(True) + if level == 1: + indent += 1 + allowance += 1 + max_width1 = max_width = self._width - indent + for i, line in enumerate(lines): + rep = repr(line) + if i == len(lines) - 1: + max_width1 -= allowance + if len(rep) <= max_width1: + chunks.append(rep) + else: + # A list of alternating (non-space, space) strings + parts = re.findall(r"\S*\s*", line) + assert parts + assert not parts[-1] + parts.pop() # drop empty last part + max_width2 = max_width + current = "" + for j, part in enumerate(parts): + candidate = current + part + if j == len(parts) - 1 and i == len(lines) - 1: + max_width2 -= allowance + if len(repr(candidate)) > max_width2: + if current: + chunks.append(repr(current)) + current = part + else: + current = candidate + if current: + chunks.append(repr(current)) + if len(chunks) == 1: + write(rep) + return + if level == 1: + write("(") + for i, rep in enumerate(chunks): + if i > 0: + write("\n" + " " * indent) + write(rep) + if level == 1: + write(")") + + _dispatch[str.__repr__] = _pprint_str + + def _pprint_bytes( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + if len(object) <= 4: + write(repr(object)) + return + parens = level == 1 + if parens: + indent += 1 + allowance += 1 + write("(") + delim = "" + for rep in _wrap_bytes_repr(object, self._width - indent, allowance): + write(delim) + write(rep) + if not delim: + delim = "\n" + " " * indent + if parens: + write(")") + + _dispatch[bytes.__repr__] = _pprint_bytes + + def _pprint_bytearray( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + write("bytearray(") + self._pprint_bytes( + bytes(object), stream, indent + 10, allowance + 1, context, level + 1 + ) + write(")") + + _dispatch[bytearray.__repr__] = _pprint_bytearray + + def _pprint_mappingproxy( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("mappingproxy(") + self._format(object.copy(), stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy + + def _pprint_simplenamespace( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if type(object) is _types.SimpleNamespace: + # The SimpleNamespace repr is "namespace" instead of the class + # name, so we do the same here. For subclasses; use the class name. + cls_name = "namespace" + else: + cls_name = object.__class__.__name__ + items = object.__dict__.items() + stream.write(cls_name + "(") + self._format_namespace_items(items, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace + + def _format_dict_items( + self, + items: list[tuple[Any, Any]], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + for key, ent in items: + write(delimnl) + write(self._repr(key, context, level)) + write(": ") + self._format(ent, stream, item_indent, 1, context, level) + write(",") + + write("\n" + " " * indent) + + def _format_namespace_items( + self, + items: list[tuple[Any, Any]], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + for key, ent in items: + write(delimnl) + write(key) + write("=") + if id(ent) in context: + # Special-case representation of recursion to match standard + # recursive dataclass repr. + write("...") + else: + self._format( + ent, + stream, + item_indent + len(key) + 1, + 1, + context, + level, + ) + + write(",") + + write("\n" + " " * indent) + + def _format_items( + self, + items: list[Any], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + + for item in items: + write(delimnl) + self._format(item, stream, item_indent, 1, context, level) + write(",") + + write("\n" + " " * indent) + + def _repr(self, object: Any, context: set[int], level: int) -> str: + return self._safe_repr(object, context.copy(), self._depth, level) + + def _pprint_default_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + rdf = self._repr(object.default_factory, context, level) + stream.write(f"{object.__class__.__name__}({rdf}, ") + self._pprint_dict(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.defaultdict.__repr__] = _pprint_default_dict + + def _pprint_counter( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write(object.__class__.__name__ + "(") + + if object: + stream.write("{") + items = object.most_common() + self._format_dict_items(items, stream, indent, allowance, context, level) + stream.write("}") + + stream.write(")") + + _dispatch[_collections.Counter.__repr__] = _pprint_counter + + def _pprint_chain_map( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object.maps) or (len(object.maps) == 1 and not len(object.maps[0])): + stream.write(repr(object)) + return + + stream.write(object.__class__.__name__ + "(") + self._format_items(object.maps, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.ChainMap.__repr__] = _pprint_chain_map + + def _pprint_deque( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write(object.__class__.__name__ + "(") + if object.maxlen is not None: + stream.write(f"maxlen={object.maxlen}, ") + stream.write("[") + + self._format_items(object, stream, indent, allowance + 1, context, level) + stream.write("])") + + _dispatch[_collections.deque.__repr__] = _pprint_deque + + def _pprint_user_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserDict.__repr__] = _pprint_user_dict + + def _pprint_user_list( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserList.__repr__] = _pprint_user_list + + def _pprint_user_string( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserString.__repr__] = _pprint_user_string + + def _safe_repr( + self, object: Any, context: set[int], maxlevels: int | None, level: int + ) -> str: + typ = type(object) + if typ in _builtin_scalars: + return repr(object) + + r = getattr(typ, "__repr__", None) + + if issubclass(typ, dict) and r is dict.__repr__: + if not object: + return "{}" + objid = id(object) + if maxlevels and level >= maxlevels: + return "{...}" + if objid in context: + return _recursion(object) + context.add(objid) + components: list[str] = [] + append = components.append + level += 1 + for k, v in sorted(object.items(), key=_safe_tuple): + krepr = self._safe_repr(k, context, maxlevels, level) + vrepr = self._safe_repr(v, context, maxlevels, level) + append(f"{krepr}: {vrepr}") + context.remove(objid) + return "{{{}}}".format(", ".join(components)) + + if (issubclass(typ, list) and r is list.__repr__) or ( + issubclass(typ, tuple) and r is tuple.__repr__ + ): + if issubclass(typ, list): + if not object: + return "[]" + format = "[%s]" + elif len(object) == 1: + format = "(%s,)" + else: + if not object: + return "()" + format = "(%s)" + objid = id(object) + if maxlevels and level >= maxlevels: + return format % "..." + if objid in context: + return _recursion(object) + context.add(objid) + components = [] + append = components.append + level += 1 + for o in object: + orepr = self._safe_repr(o, context, maxlevels, level) + append(orepr) + context.remove(objid) + return format % ", ".join(components) + + return repr(object) + + +_builtin_scalars = frozenset( + {str, bytes, bytearray, float, complex, bool, type(None), int} +) + + +def _recursion(object: Any) -> str: + return f"" + + +def _wrap_bytes_repr(object: Any, width: int, allowance: int) -> Iterator[str]: + current = b"" + last = len(object) // 4 * 4 + for i in range(0, len(object), 4): + part = object[i : i + 4] + candidate = current + part + if i == last: + width -= allowance + if len(repr(candidate)) > width: + if current: + yield repr(current) + current = part + else: + current = candidate + if current: + yield repr(current) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/saferepr.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/saferepr.py new file mode 100644 index 00000000..cee70e33 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/saferepr.py @@ -0,0 +1,130 @@ +from __future__ import annotations + +import pprint +import reprlib + + +def _try_repr_or_str(obj: object) -> str: + try: + return repr(obj) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException: + return f'{type(obj).__name__}("{obj}")' + + +def _format_repr_exception(exc: BaseException, obj: object) -> str: + try: + exc_info = _try_repr_or_str(exc) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as inner_exc: + exc_info = f"unpresentable exception ({_try_repr_or_str(inner_exc)})" + return ( + f"<[{exc_info} raised in repr()] {type(obj).__name__} object at 0x{id(obj):x}>" + ) + + +def _ellipsize(s: str, maxsize: int) -> str: + if len(s) > maxsize: + i = max(0, (maxsize - 3) // 2) + j = max(0, maxsize - 3 - i) + return s[:i] + "..." + s[len(s) - j :] + return s + + +class SafeRepr(reprlib.Repr): + """ + repr.Repr that limits the resulting size of repr() and includes + information on exceptions raised during the call. + """ + + def __init__(self, maxsize: int | None, use_ascii: bool = False) -> None: + """ + :param maxsize: + If not None, will truncate the resulting repr to that specific size, using ellipsis + somewhere in the middle to hide the extra text. + If None, will not impose any size limits on the returning repr. + """ + super().__init__() + # ``maxstring`` is used by the superclass, and needs to be an int; using a + # very large number in case maxsize is None, meaning we want to disable + # truncation. + self.maxstring = maxsize if maxsize is not None else 1_000_000_000 + self.maxsize = maxsize + self.use_ascii = use_ascii + + def repr(self, x: object) -> str: + try: + if self.use_ascii: + s = ascii(x) + else: + s = super().repr(x) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + def repr_instance(self, x: object, level: int) -> str: + try: + s = repr(x) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + +def safeformat(obj: object) -> str: + """Return a pretty printed string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info. + """ + try: + return pprint.pformat(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) + + +# Maximum size of overall repr of objects to display during assertion errors. +DEFAULT_REPR_MAX_SIZE = 240 + + +def saferepr( + obj: object, maxsize: int | None = DEFAULT_REPR_MAX_SIZE, use_ascii: bool = False +) -> str: + """Return a size-limited safe repr-string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info and 'saferepr' generally takes + care to never raise exceptions itself. + + This function is a wrapper around the Repr/reprlib functionality of the + stdlib. + """ + return SafeRepr(maxsize, use_ascii).repr(obj) + + +def saferepr_unlimited(obj: object, use_ascii: bool = True) -> str: + """Return an unlimited-size safe repr-string for the given object. + + As with saferepr, failing __repr__ functions of user instances + will be represented with a short exception info. + + This function is a wrapper around simple repr. + + Note: a cleaner solution would be to alter ``saferepr``this way + when maxsize=None, but that might affect some other code. + """ + try: + if use_ascii: + return ascii(obj) + return repr(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/terminalwriter.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/terminalwriter.py new file mode 100644 index 00000000..9191b4ed --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/terminalwriter.py @@ -0,0 +1,258 @@ +"""Helper functions for writing to terminals and files.""" + +from __future__ import annotations + +from collections.abc import Sequence +import os +import shutil +import sys +from typing import final +from typing import Literal +from typing import TextIO + +import pygments +from pygments.formatters.terminal import TerminalFormatter +from pygments.lexer import Lexer +from pygments.lexers.diff import DiffLexer +from pygments.lexers.python import PythonLexer + +from ..compat import assert_never +from .wcwidth import wcswidth + + +# This code was initially copied from py 1.8.1, file _io/terminalwriter.py. + + +def get_terminal_width() -> int: + width, _ = shutil.get_terminal_size(fallback=(80, 24)) + + # The Windows get_terminal_size may be bogus, let's sanify a bit. + if width < 40: + width = 80 + + return width + + +def should_do_markup(file: TextIO) -> bool: + if os.environ.get("PY_COLORS") == "1": + return True + if os.environ.get("PY_COLORS") == "0": + return False + if os.environ.get("NO_COLOR"): + return False + if os.environ.get("FORCE_COLOR"): + return True + return ( + hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb" + ) + + +@final +class TerminalWriter: + _esctable = dict( + black=30, + red=31, + green=32, + yellow=33, + blue=34, + purple=35, + cyan=36, + white=37, + Black=40, + Red=41, + Green=42, + Yellow=43, + Blue=44, + Purple=45, + Cyan=46, + White=47, + bold=1, + light=2, + blink=5, + invert=7, + ) + + def __init__(self, file: TextIO | None = None) -> None: + if file is None: + file = sys.stdout + if hasattr(file, "isatty") and file.isatty() and sys.platform == "win32": + try: + import colorama + except ImportError: + pass + else: + file = colorama.AnsiToWin32(file).stream + assert file is not None + self._file = file + self.hasmarkup = should_do_markup(file) + self._current_line = "" + self._terminal_width: int | None = None + self.code_highlight = True + + @property + def fullwidth(self) -> int: + if self._terminal_width is not None: + return self._terminal_width + return get_terminal_width() + + @fullwidth.setter + def fullwidth(self, value: int) -> None: + self._terminal_width = value + + @property + def width_of_current_line(self) -> int: + """Return an estimate of the width so far in the current line.""" + return wcswidth(self._current_line) + + def markup(self, text: str, **markup: bool) -> str: + for name in markup: + if name not in self._esctable: + raise ValueError(f"unknown markup: {name!r}") + if self.hasmarkup: + esc = [self._esctable[name] for name, on in markup.items() if on] + if esc: + text = "".join(f"\x1b[{cod}m" for cod in esc) + text + "\x1b[0m" + return text + + def sep( + self, + sepchar: str, + title: str | None = None, + fullwidth: int | None = None, + **markup: bool, + ) -> None: + if fullwidth is None: + fullwidth = self.fullwidth + # The goal is to have the line be as long as possible + # under the condition that len(line) <= fullwidth. + if sys.platform == "win32": + # If we print in the last column on windows we are on a + # new line but there is no way to verify/neutralize this + # (we may not know the exact line width). + # So let's be defensive to avoid empty lines in the output. + fullwidth -= 1 + if title is not None: + # we want 2 + 2*len(fill) + len(title) <= fullwidth + # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth + # 2*len(sepchar)*N <= fullwidth - len(title) - 2 + # N <= (fullwidth - len(title) - 2) // (2*len(sepchar)) + N = max((fullwidth - len(title) - 2) // (2 * len(sepchar)), 1) + fill = sepchar * N + line = f"{fill} {title} {fill}" + else: + # we want len(sepchar)*N <= fullwidth + # i.e. N <= fullwidth // len(sepchar) + line = sepchar * (fullwidth // len(sepchar)) + # In some situations there is room for an extra sepchar at the right, + # in particular if we consider that with a sepchar like "_ " the + # trailing space is not important at the end of the line. + if len(line) + len(sepchar.rstrip()) <= fullwidth: + line += sepchar.rstrip() + + self.line(line, **markup) + + def write(self, msg: str, *, flush: bool = False, **markup: bool) -> None: + if msg: + current_line = msg.rsplit("\n", 1)[-1] + if "\n" in msg: + self._current_line = current_line + else: + self._current_line += current_line + + msg = self.markup(msg, **markup) + + self.write_raw(msg, flush=flush) + + def write_raw(self, msg: str, *, flush: bool = False) -> None: + try: + self._file.write(msg) + except UnicodeEncodeError: + # Some environments don't support printing general Unicode + # strings, due to misconfiguration or otherwise; in that case, + # print the string escaped to ASCII. + # When the Unicode situation improves we should consider + # letting the error propagate instead of masking it (see #7475 + # for one brief attempt). + msg = msg.encode("unicode-escape").decode("ascii") + self._file.write(msg) + + if flush: + self.flush() + + def line(self, s: str = "", **markup: bool) -> None: + self.write(s, **markup) + self.write("\n") + + def flush(self) -> None: + self._file.flush() + + def _write_source(self, lines: Sequence[str], indents: Sequence[str] = ()) -> None: + """Write lines of source code possibly highlighted. + + Keeping this private for now because the API is clunky. We should discuss how + to evolve the terminal writer so we can have more precise color support, for example + being able to write part of a line in one color and the rest in another, and so on. + """ + if indents and len(indents) != len(lines): + raise ValueError( + f"indents size ({len(indents)}) should have same size as lines ({len(lines)})" + ) + if not indents: + indents = [""] * len(lines) + source = "\n".join(lines) + new_lines = self._highlight(source).splitlines() + # Would be better to strict=True but that fails some CI jobs. + for indent, new_line in zip(indents, new_lines, strict=False): + self.line(indent + new_line) + + def _get_pygments_lexer(self, lexer: Literal["python", "diff"]) -> Lexer: + if lexer == "python": + return PythonLexer() + elif lexer == "diff": + return DiffLexer() + else: + assert_never(lexer) + + def _get_pygments_formatter(self) -> TerminalFormatter: + from _pytest.config.exceptions import UsageError + + theme = os.getenv("PYTEST_THEME") + theme_mode = os.getenv("PYTEST_THEME_MODE", "dark") + + try: + return TerminalFormatter(bg=theme_mode, style=theme) + except pygments.util.ClassNotFound as e: + raise UsageError( + f"PYTEST_THEME environment variable has an invalid value: '{theme}'. " + "Hint: See available pygments styles with `pygmentize -L styles`." + ) from e + except pygments.util.OptionError as e: + raise UsageError( + f"PYTEST_THEME_MODE environment variable has an invalid value: '{theme_mode}'. " + "The allowed values are 'dark' (default) and 'light'." + ) from e + + def _highlight( + self, source: str, lexer: Literal["diff", "python"] = "python" + ) -> str: + """Highlight the given source if we have markup support.""" + if not source or not self.hasmarkup or not self.code_highlight: + return source + + pygments_lexer = self._get_pygments_lexer(lexer) + pygments_formatter = self._get_pygments_formatter() + + highlighted: str = pygments.highlight( + source, pygments_lexer, pygments_formatter + ) + # pygments terminal formatter may add a newline when there wasn't one. + # We don't want this, remove. + if highlighted[-1] == "\n" and source[-1] != "\n": + highlighted = highlighted[:-1] + + # Some lexers will not set the initial color explicitly + # which may lead to the previous color being propagated to the + # start of the expression, so reset first. + highlighted = "\x1b[0m" + highlighted + + return highlighted diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_io/wcwidth.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/wcwidth.py new file mode 100644 index 00000000..23886ff1 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_io/wcwidth.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from functools import lru_cache +import unicodedata + + +@lru_cache(100) +def wcwidth(c: str) -> int: + """Determine how many columns are needed to display a character in a terminal. + + Returns -1 if the character is not printable. + Returns 0, 1 or 2 for other characters. + """ + o = ord(c) + + # ASCII fast path. + if 0x20 <= o < 0x07F: + return 1 + + # Some Cf/Zp/Zl characters which should be zero-width. + if ( + o == 0x0000 + or 0x200B <= o <= 0x200F + or 0x2028 <= o <= 0x202E + or 0x2060 <= o <= 0x2063 + ): + return 0 + + category = unicodedata.category(c) + + # Control characters. + if category == "Cc": + return -1 + + # Combining characters with zero width. + if category in ("Me", "Mn"): + return 0 + + # Full/Wide east asian characters. + if unicodedata.east_asian_width(c) in ("F", "W"): + return 2 + + return 1 + + +def wcswidth(s: str) -> int: + """Determine how many columns are needed to display a string in a terminal. + + Returns -1 if the string contains non-printable characters. + """ + width = 0 + for c in unicodedata.normalize("NFC", s): + wc = wcwidth(c) + if wc < 0: + return -1 + width += wc + return width diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__init__.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..99cbfa42 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/error.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/error.cpython-312.pyc new file mode 100644 index 00000000..6cc2e8bc Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/error.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/path.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/path.cpython-312.pyc new file mode 100644 index 00000000..a70e713f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/__pycache__/path.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_py/error.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/error.py new file mode 100644 index 00000000..dace2376 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/error.py @@ -0,0 +1,119 @@ +"""create errno-specific classes for IO or os calls.""" + +from __future__ import annotations + +from collections.abc import Callable +import errno +import os +import sys +from typing import TYPE_CHECKING +from typing import TypeVar + + +if TYPE_CHECKING: + from typing_extensions import ParamSpec + + P = ParamSpec("P") + +R = TypeVar("R") + + +class Error(EnvironmentError): + def __repr__(self) -> str: + return "{}.{} {!r}: {} ".format( + self.__class__.__module__, + self.__class__.__name__, + self.__class__.__doc__, + " ".join(map(str, self.args)), + # repr(self.args) + ) + + def __str__(self) -> str: + s = "[{}]: {}".format( + self.__class__.__doc__, + " ".join(map(str, self.args)), + ) + return s + + +_winerrnomap = { + 2: errno.ENOENT, + 3: errno.ENOENT, + 17: errno.EEXIST, + 18: errno.EXDEV, + 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailable + 22: errno.ENOTDIR, + 20: errno.ENOTDIR, + 267: errno.ENOTDIR, + 5: errno.EACCES, # anything better? +} + + +class ErrorMaker: + """lazily provides Exception classes for each possible POSIX errno + (as defined per the 'errno' module). All such instances + subclass EnvironmentError. + """ + + _errno2class: dict[int, type[Error]] = {} + + def __getattr__(self, name: str) -> type[Error]: + if name[0] == "_": + raise AttributeError(name) + eno = getattr(errno, name) + cls = self._geterrnoclass(eno) + setattr(self, name, cls) + return cls + + def _geterrnoclass(self, eno: int) -> type[Error]: + try: + return self._errno2class[eno] + except KeyError: + clsname = errno.errorcode.get(eno, f"UnknownErrno{eno}") + errorcls = type( + clsname, + (Error,), + {"__module__": "py.error", "__doc__": os.strerror(eno)}, + ) + self._errno2class[eno] = errorcls + return errorcls + + def checked_call( + self, func: Callable[P, R], *args: P.args, **kwargs: P.kwargs + ) -> R: + """Call a function and raise an errno-exception if applicable.""" + __tracebackhide__ = True + try: + return func(*args, **kwargs) + except Error: + raise + except OSError as value: + if not hasattr(value, "errno"): + raise + if sys.platform == "win32": + try: + # error: Invalid index type "Optional[int]" for "dict[int, int]"; expected type "int" [index] + # OK to ignore because we catch the KeyError below. + cls = self._geterrnoclass(_winerrnomap[value.errno]) # type:ignore[index] + except KeyError: + raise value + else: + # we are not on Windows, or we got a proper OSError + if value.errno is None: + cls = type( + "UnknownErrnoNone", + (Error,), + {"__module__": "py.error", "__doc__": None}, + ) + else: + cls = self._geterrnoclass(value.errno) + + raise cls(f"{func.__name__}{args!r}") + + +_error_maker = ErrorMaker() +checked_call = _error_maker.checked_call + + +def __getattr__(attr: str) -> type[Error]: + return getattr(_error_maker, attr) # type: ignore[no-any-return] diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_py/path.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/path.py new file mode 100644 index 00000000..b7131b08 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_py/path.py @@ -0,0 +1,1475 @@ +# mypy: allow-untyped-defs +"""local path implementation.""" + +from __future__ import annotations + +import atexit +from collections.abc import Callable +from contextlib import contextmanager +import fnmatch +import importlib.util +import io +import os +from os.path import abspath +from os.path import dirname +from os.path import exists +from os.path import isabs +from os.path import isdir +from os.path import isfile +from os.path import islink +from os.path import normpath +import posixpath +from stat import S_ISDIR +from stat import S_ISLNK +from stat import S_ISREG +import sys +from typing import Any +from typing import cast +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import uuid +import warnings + +from . import error + + +# Moved from local.py. +iswin32 = sys.platform == "win32" or (getattr(os, "_name", False) == "nt") + + +class Checkers: + _depend_on_existence = "exists", "link", "dir", "file" + + def __init__(self, path): + self.path = path + + def dotfile(self): + return self.path.basename.startswith(".") + + def ext(self, arg): + if not arg.startswith("."): + arg = "." + arg + return self.path.ext == arg + + def basename(self, arg): + return self.path.basename == arg + + def basestarts(self, arg): + return self.path.basename.startswith(arg) + + def relto(self, arg): + return self.path.relto(arg) + + def fnmatch(self, arg): + return self.path.fnmatch(arg) + + def endswith(self, arg): + return str(self.path).endswith(arg) + + def _evaluate(self, kw): + from .._code.source import getrawcode + + for name, value in kw.items(): + invert = False + meth = None + try: + meth = getattr(self, name) + except AttributeError: + if name[:3] == "not": + invert = True + try: + meth = getattr(self, name[3:]) + except AttributeError: + pass + if meth is None: + raise TypeError(f"no {name!r} checker available for {self.path!r}") + try: + if getrawcode(meth).co_argcount > 1: + if (not meth(value)) ^ invert: + return False + else: + if bool(value) ^ bool(meth()) ^ invert: + return False + except (error.ENOENT, error.ENOTDIR, error.EBUSY): + # EBUSY feels not entirely correct, + # but its kind of necessary since ENOMEDIUM + # is not accessible in python + for name in self._depend_on_existence: + if name in kw: + if kw.get(name): + return False + name = "not" + name + if name in kw: + if not kw.get(name): + return False + return True + + _statcache: Stat + + def _stat(self) -> Stat: + try: + return self._statcache + except AttributeError: + try: + self._statcache = self.path.stat() + except error.ELOOP: + self._statcache = self.path.lstat() + return self._statcache + + def dir(self): + return S_ISDIR(self._stat().mode) + + def file(self): + return S_ISREG(self._stat().mode) + + def exists(self): + return self._stat() + + def link(self): + st = self.path.lstat() + return S_ISLNK(st.mode) + + +class NeverRaised(Exception): + pass + + +class Visitor: + def __init__(self, fil, rec, ignore, bf, sort): + if isinstance(fil, str): + fil = FNMatcher(fil) + if isinstance(rec, str): + self.rec: Callable[[LocalPath], bool] = FNMatcher(rec) + elif not hasattr(rec, "__call__") and rec: + self.rec = lambda path: True + else: + self.rec = rec + self.fil = fil + self.ignore = ignore + self.breadthfirst = bf + self.optsort = cast(Callable[[Any], Any], sorted) if sort else (lambda x: x) + + def gen(self, path): + try: + entries = path.listdir() + except self.ignore: + return + rec = self.rec + dirs = self.optsort( + [p for p in entries if p.check(dir=1) and (rec is None or rec(p))] + ) + if not self.breadthfirst: + for subdir in dirs: + yield from self.gen(subdir) + for p in self.optsort(entries): + if self.fil is None or self.fil(p): + yield p + if self.breadthfirst: + for subdir in dirs: + yield from self.gen(subdir) + + +class FNMatcher: + def __init__(self, pattern): + self.pattern = pattern + + def __call__(self, path): + pattern = self.pattern + + if ( + pattern.find(path.sep) == -1 + and iswin32 + and pattern.find(posixpath.sep) != -1 + ): + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posixpath.sep, path.sep) + + if pattern.find(path.sep) == -1: + name = path.basename + else: + name = str(path) # path.strpath # XXX svn? + if not os.path.isabs(pattern): + pattern = "*" + path.sep + pattern + return fnmatch.fnmatch(name, pattern) + + +def map_as_list(func, iter): + return list(map(func, iter)) + + +class Stat: + if TYPE_CHECKING: + + @property + def size(self) -> int: ... + + @property + def mtime(self) -> float: ... + + def __getattr__(self, name: str) -> Any: + return getattr(self._osstatresult, "st_" + name) + + def __init__(self, path, osstatresult): + self.path = path + self._osstatresult = osstatresult + + @property + def owner(self): + if iswin32: + raise NotImplementedError("XXX win32") + import pwd + + entry = error.checked_call(pwd.getpwuid, self.uid) # type:ignore[attr-defined,unused-ignore] + return entry[0] + + @property + def group(self): + """Return group name of file.""" + if iswin32: + raise NotImplementedError("XXX win32") + import grp + + entry = error.checked_call(grp.getgrgid, self.gid) # type:ignore[attr-defined,unused-ignore] + return entry[0] + + def isdir(self): + return S_ISDIR(self._osstatresult.st_mode) + + def isfile(self): + return S_ISREG(self._osstatresult.st_mode) + + def islink(self): + self.path.lstat() + return S_ISLNK(self._osstatresult.st_mode) + + +def getuserid(user): + import pwd + + if not isinstance(user, int): + user = pwd.getpwnam(user)[2] # type:ignore[attr-defined,unused-ignore] + return user + + +def getgroupid(group): + import grp + + if not isinstance(group, int): + group = grp.getgrnam(group)[2] # type:ignore[attr-defined,unused-ignore] + return group + + +class LocalPath: + """Object oriented interface to os.path and other local filesystem + related information. + """ + + class ImportMismatchError(ImportError): + """raised on pyimport() if there is a mismatch of __file__'s""" + + sep = os.sep + + def __init__(self, path=None, expanduser=False): + """Initialize and return a local Path instance. + + Path can be relative to the current directory. + If path is None it defaults to the current working directory. + If expanduser is True, tilde-expansion is performed. + Note that Path instances always carry an absolute path. + Note also that passing in a local path object will simply return + the exact same path object. Use new() to get a new copy. + """ + if path is None: + self.strpath = error.checked_call(os.getcwd) + else: + try: + path = os.fspath(path) + except TypeError: + raise ValueError( + "can only pass None, Path instances " + "or non-empty strings to LocalPath" + ) + if expanduser: + path = os.path.expanduser(path) + self.strpath = abspath(path) + + if sys.platform != "win32": + + def chown(self, user, group, rec=0): + """Change ownership to the given user and group. + user and group may be specified by a number or + by a name. if rec is True change ownership + recursively. + """ + uid = getuserid(user) + gid = getgroupid(group) + if rec: + for x in self.visit(rec=lambda x: x.check(link=0)): + if x.check(link=0): + error.checked_call(os.chown, str(x), uid, gid) + error.checked_call(os.chown, str(self), uid, gid) + + def readlink(self) -> str: + """Return value of a symbolic link.""" + # https://github.com/python/mypy/issues/12278 + return error.checked_call(os.readlink, self.strpath) # type: ignore[arg-type,return-value,unused-ignore] + + def mklinkto(self, oldname): + """Posix style hard link to another name.""" + error.checked_call(os.link, str(oldname), str(self)) + + def mksymlinkto(self, value, absolute=1): + """Create a symbolic link with the given value (pointing to another name).""" + if absolute: + error.checked_call(os.symlink, str(value), self.strpath) + else: + base = self.common(value) + # with posix local paths '/' is always a common base + relsource = self.__class__(value).relto(base) + reldest = self.relto(base) + n = reldest.count(self.sep) + target = self.sep.join(("..",) * n + (relsource,)) + error.checked_call(os.symlink, target, self.strpath) + + def __div__(self, other): + return self.join(os.fspath(other)) + + __truediv__ = __div__ # py3k + + @property + def basename(self): + """Basename part of path.""" + return self._getbyspec("basename")[0] + + @property + def dirname(self): + """Dirname part of path.""" + return self._getbyspec("dirname")[0] + + @property + def purebasename(self): + """Pure base name of the path.""" + return self._getbyspec("purebasename")[0] + + @property + def ext(self): + """Extension of the path (including the '.').""" + return self._getbyspec("ext")[0] + + def read_binary(self): + """Read and return a bytestring from reading the path.""" + with self.open("rb") as f: + return f.read() + + def read_text(self, encoding): + """Read and return a Unicode string from reading the path.""" + with self.open("r", encoding=encoding) as f: + return f.read() + + def read(self, mode="r"): + """Read and return a bytestring from reading the path.""" + with self.open(mode) as f: + return f.read() + + def readlines(self, cr=1): + """Read and return a list of lines from the path. if cr is False, the + newline will be removed from the end of each line.""" + mode = "r" + + if not cr: + content = self.read(mode) + return content.split("\n") + else: + f = self.open(mode) + try: + return f.readlines() + finally: + f.close() + + def load(self): + """(deprecated) return object unpickled from self.read()""" + f = self.open("rb") + try: + import pickle + + return error.checked_call(pickle.load, f) + finally: + f.close() + + def move(self, target): + """Move this path to target.""" + if target.relto(self): + raise error.EINVAL(target, "cannot move path into a subdirectory of itself") + try: + self.rename(target) + except error.EXDEV: # invalid cross-device link + self.copy(target) + self.remove() + + def fnmatch(self, pattern): + """Return true if the basename/fullname matches the glob-'pattern'. + + valid pattern characters:: + + * matches everything + ? matches any single character + [seq] matches any character in seq + [!seq] matches any char not in seq + + If the pattern contains a path-separator then the full path + is used for pattern matching and a '*' is prepended to the + pattern. + + if the pattern doesn't contain a path-separator the pattern + is only matched against the basename. + """ + return FNMatcher(pattern)(self) + + def relto(self, relpath): + """Return a string which is the relative part of the path + to the given 'relpath'. + """ + if not isinstance(relpath, str | LocalPath): + raise TypeError(f"{relpath!r}: not a string or path object") + strrelpath = str(relpath) + if strrelpath and strrelpath[-1] != self.sep: + strrelpath += self.sep + # assert strrelpath[-1] == self.sep + # assert strrelpath[-2] != self.sep + strself = self.strpath + if sys.platform == "win32" or getattr(os, "_name", None) == "nt": + if os.path.normcase(strself).startswith(os.path.normcase(strrelpath)): + return strself[len(strrelpath) :] + elif strself.startswith(strrelpath): + return strself[len(strrelpath) :] + return "" + + def ensure_dir(self, *args): + """Ensure the path joined with args is a directory.""" + return self.ensure(*args, dir=True) + + def bestrelpath(self, dest): + """Return a string which is a relative path from self + (assumed to be a directory) to dest such that + self.join(bestrelpath) == dest and if not such + path can be determined return dest. + """ + try: + if self == dest: + return os.curdir + base = self.common(dest) + if not base: # can be the case on windows + return str(dest) + self2base = self.relto(base) + reldest = dest.relto(base) + if self2base: + n = self2base.count(self.sep) + 1 + else: + n = 0 + lst = [os.pardir] * n + if reldest: + lst.append(reldest) + target = dest.sep.join(lst) + return target + except AttributeError: + return str(dest) + + def exists(self): + return self.check() + + def isdir(self): + return self.check(dir=1) + + def isfile(self): + return self.check(file=1) + + def parts(self, reverse=False): + """Return a root-first list of all ancestor directories + plus the path itself. + """ + current = self + lst = [self] + while 1: + last = current + current = current.dirpath() + if last == current: + break + lst.append(current) + if not reverse: + lst.reverse() + return lst + + def common(self, other): + """Return the common part shared with the other path + or None if there is no common part. + """ + last = None + for x, y in zip(self.parts(), other.parts()): + if x != y: + return last + last = x + return last + + def __add__(self, other): + """Return new path object with 'other' added to the basename""" + return self.new(basename=self.basename + str(other)) + + def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False): + """Yields all paths below the current one + + fil is a filter (glob pattern or callable), if not matching the + path will not be yielded, defaulting to None (everything is + returned) + + rec is a filter (glob pattern or callable) that controls whether + a node is descended, defaulting to None + + ignore is an Exception class that is ignoredwhen calling dirlist() + on any of the paths (by default, all exceptions are reported) + + bf if True will cause a breadthfirst search instead of the + default depthfirst. Default: False + + sort if True will sort entries within each directory level. + """ + yield from Visitor(fil, rec, ignore, bf, sort).gen(self) + + def _sortlist(self, res, sort): + if sort: + if hasattr(sort, "__call__"): + warnings.warn( + DeprecationWarning( + "listdir(sort=callable) is deprecated and breaks on python3" + ), + stacklevel=3, + ) + res.sort(sort) + else: + res.sort() + + def __fspath__(self): + return self.strpath + + def __hash__(self): + s = self.strpath + if iswin32: + s = s.lower() + return hash(s) + + def __eq__(self, other): + s1 = os.fspath(self) + try: + s2 = os.fspath(other) + except TypeError: + return False + if iswin32: + s1 = s1.lower() + try: + s2 = s2.lower() + except AttributeError: + return False + return s1 == s2 + + def __ne__(self, other): + return not (self == other) + + def __lt__(self, other): + return os.fspath(self) < os.fspath(other) + + def __gt__(self, other): + return os.fspath(self) > os.fspath(other) + + def samefile(self, other): + """Return True if 'other' references the same file as 'self'.""" + other = os.fspath(other) + if not isabs(other): + other = abspath(other) + if self == other: + return True + if not hasattr(os.path, "samefile"): + return False + return error.checked_call(os.path.samefile, self.strpath, other) + + def remove(self, rec=1, ignore_errors=False): + """Remove a file or directory (or a directory tree if rec=1). + if ignore_errors is True, errors while removing directories will + be ignored. + """ + if self.check(dir=1, link=0): + if rec: + # force remove of readonly files on windows + if iswin32: + self.chmod(0o700, rec=1) + import shutil + + error.checked_call( + shutil.rmtree, self.strpath, ignore_errors=ignore_errors + ) + else: + error.checked_call(os.rmdir, self.strpath) + else: + if iswin32: + self.chmod(0o700) + error.checked_call(os.remove, self.strpath) + + def computehash(self, hashtype="md5", chunksize=524288): + """Return hexdigest of hashvalue for this file.""" + try: + try: + import hashlib as mod + except ImportError: + if hashtype == "sha1": + hashtype = "sha" + mod = __import__(hashtype) + hash = getattr(mod, hashtype)() + except (AttributeError, ImportError): + raise ValueError(f"Don't know how to compute {hashtype!r} hash") + f = self.open("rb") + try: + while 1: + buf = f.read(chunksize) + if not buf: + return hash.hexdigest() + hash.update(buf) + finally: + f.close() + + def new(self, **kw): + """Create a modified version of this path. + the following keyword arguments modify various path parts:: + + a:/some/path/to/a/file.ext + xx drive + xxxxxxxxxxxxxxxxx dirname + xxxxxxxx basename + xxxx purebasename + xxx ext + """ + obj = object.__new__(self.__class__) + if not kw: + obj.strpath = self.strpath + return obj + drive, dirname, _basename, purebasename, ext = self._getbyspec( + "drive,dirname,basename,purebasename,ext" + ) + if "basename" in kw: + if "purebasename" in kw or "ext" in kw: + raise ValueError(f"invalid specification {kw!r}") + else: + pb = kw.setdefault("purebasename", purebasename) + try: + ext = kw["ext"] + except KeyError: + pass + else: + if ext and not ext.startswith("."): + ext = "." + ext + kw["basename"] = pb + ext + + if "dirname" in kw and not kw["dirname"]: + kw["dirname"] = drive + else: + kw.setdefault("dirname", dirname) + kw.setdefault("sep", self.sep) + obj.strpath = normpath("{dirname}{sep}{basename}".format(**kw)) + return obj + + def _getbyspec(self, spec: str) -> list[str]: + """See new for what 'spec' can be.""" + res = [] + parts = self.strpath.split(self.sep) + + args = filter(None, spec.split(",")) + for name in args: + if name == "drive": + res.append(parts[0]) + elif name == "dirname": + res.append(self.sep.join(parts[:-1])) + else: + basename = parts[-1] + if name == "basename": + res.append(basename) + else: + i = basename.rfind(".") + if i == -1: + purebasename, ext = basename, "" + else: + purebasename, ext = basename[:i], basename[i:] + if name == "purebasename": + res.append(purebasename) + elif name == "ext": + res.append(ext) + else: + raise ValueError(f"invalid part specification {name!r}") + return res + + def dirpath(self, *args, **kwargs): + """Return the directory path joined with any given path arguments.""" + if not kwargs: + path = object.__new__(self.__class__) + path.strpath = dirname(self.strpath) + if args: + path = path.join(*args) + return path + return self.new(basename="").join(*args, **kwargs) + + def join(self, *args: os.PathLike[str], abs: bool = False) -> LocalPath: + """Return a new path by appending all 'args' as path + components. if abs=1 is used restart from root if any + of the args is an absolute path. + """ + sep = self.sep + strargs = [os.fspath(arg) for arg in args] + strpath = self.strpath + if abs: + newargs: list[str] = [] + for arg in reversed(strargs): + if isabs(arg): + strpath = arg + strargs = newargs + break + newargs.insert(0, arg) + # special case for when we have e.g. strpath == "/" + actual_sep = "" if strpath.endswith(sep) else sep + for arg in strargs: + arg = arg.strip(sep) + if iswin32: + # allow unix style paths even on windows. + arg = arg.strip("/") + arg = arg.replace("/", sep) + strpath = strpath + actual_sep + arg + actual_sep = sep + obj = object.__new__(self.__class__) + obj.strpath = normpath(strpath) + return obj + + def open(self, mode="r", ensure=False, encoding=None): + """Return an opened file with the given mode. + + If ensure is True, create parent directories if needed. + """ + if ensure: + self.dirpath().ensure(dir=1) + if encoding: + return error.checked_call( + io.open, + self.strpath, + mode, + encoding=encoding, + ) + return error.checked_call(open, self.strpath, mode) + + def _fastjoin(self, name): + child = object.__new__(self.__class__) + child.strpath = self.strpath + self.sep + name + return child + + def islink(self): + return islink(self.strpath) + + def check(self, **kw): + """Check a path for existence and properties. + + Without arguments, return True if the path exists, otherwise False. + + valid checkers:: + + file = 1 # is a file + file = 0 # is not a file (may not even exist) + dir = 1 # is a dir + link = 1 # is a link + exists = 1 # exists + + You can specify multiple checker definitions, for example:: + + path.check(file=1, link=1) # a link pointing to a file + """ + if not kw: + return exists(self.strpath) + if len(kw) == 1: + if "dir" in kw: + return not kw["dir"] ^ isdir(self.strpath) + if "file" in kw: + return not kw["file"] ^ isfile(self.strpath) + if not kw: + kw = {"exists": 1} + return Checkers(self)._evaluate(kw) + + _patternchars = set("*?[" + os.sep) + + def listdir(self, fil=None, sort=None): + """List directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if fil is None and sort is None: + names = error.checked_call(os.listdir, self.strpath) + return map_as_list(self._fastjoin, names) + if isinstance(fil, str): + if not self._patternchars.intersection(fil): + child = self._fastjoin(fil) + if exists(child.strpath): + return [child] + return [] + fil = FNMatcher(fil) + names = error.checked_call(os.listdir, self.strpath) + res = [] + for name in names: + child = self._fastjoin(name) + if fil is None or fil(child): + res.append(child) + self._sortlist(res, sort) + return res + + def size(self) -> int: + """Return size of the underlying file object""" + return self.stat().size + + def mtime(self) -> float: + """Return last modification time of the path.""" + return self.stat().mtime + + def copy(self, target, mode=False, stat=False): + """Copy path to target. + + If mode is True, will copy permission from path to target. + If stat is True, copy permission, last modification + time, last access time, and flags from path to target. + """ + if self.check(file=1): + if target.check(dir=1): + target = target.join(self.basename) + assert self != target + copychunked(self, target) + if mode: + copymode(self.strpath, target.strpath) + if stat: + copystat(self, target) + else: + + def rec(p): + return p.check(link=0) + + for x in self.visit(rec=rec): + relpath = x.relto(self) + newx = target.join(relpath) + newx.dirpath().ensure(dir=1) + if x.check(link=1): + newx.mksymlinkto(x.readlink()) + continue + elif x.check(file=1): + copychunked(x, newx) + elif x.check(dir=1): + newx.ensure(dir=1) + if mode: + copymode(x.strpath, newx.strpath) + if stat: + copystat(x, newx) + + def rename(self, target): + """Rename this path to target.""" + target = os.fspath(target) + return error.checked_call(os.rename, self.strpath, target) + + def dump(self, obj, bin=1): + """Pickle object into path location""" + f = self.open("wb") + import pickle + + try: + error.checked_call(pickle.dump, obj, f, bin) + finally: + f.close() + + def mkdir(self, *args): + """Create & return the directory joined with args.""" + p = self.join(*args) + error.checked_call(os.mkdir, os.fspath(p)) + return p + + def write_binary(self, data, ensure=False): + """Write binary data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("wb") as f: + f.write(data) + + def write_text(self, data, encoding, ensure=False): + """Write text data into path using the specified encoding. + If ensure is True create missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("w", encoding=encoding) as f: + f.write(data) + + def write(self, data, mode="w", ensure=False): + """Write data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + if "b" in mode: + if not isinstance(data, bytes): + raise ValueError("can only process bytes") + else: + if not isinstance(data, str): + if not isinstance(data, bytes): + data = str(data) + else: + data = data.decode(sys.getdefaultencoding()) + f = self.open(mode) + try: + f.write(data) + finally: + f.close() + + def _ensuredirs(self): + parent = self.dirpath() + if parent == self: + return self + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + try: + self.mkdir() + except error.EEXIST: + # race condition: file/dir created by another thread/process. + # complain if it is not a dir + if self.check(dir=0): + raise + return self + + def ensure(self, *args, **kwargs): + """Ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if kwargs.get("dir", 0): + return p._ensuredirs() + else: + p.dirpath()._ensuredirs() + if not p.check(file=1): + p.open("wb").close() + return p + + @overload + def stat(self, raising: Literal[True] = ...) -> Stat: ... + + @overload + def stat(self, raising: Literal[False]) -> Stat | None: ... + + def stat(self, raising: bool = True) -> Stat | None: + """Return an os.stat() tuple.""" + if raising: + return Stat(self, error.checked_call(os.stat, self.strpath)) + try: + return Stat(self, os.stat(self.strpath)) + except KeyboardInterrupt: + raise + except Exception: + return None + + def lstat(self) -> Stat: + """Return an os.lstat() tuple.""" + return Stat(self, error.checked_call(os.lstat, self.strpath)) + + def setmtime(self, mtime=None): + """Set modification time for the given path. if 'mtime' is None + (the default) then the file's mtime is set to current time. + + Note that the resolution for 'mtime' is platform dependent. + """ + if mtime is None: + return error.checked_call(os.utime, self.strpath, mtime) + try: + return error.checked_call(os.utime, self.strpath, (-1, mtime)) + except error.EINVAL: + return error.checked_call(os.utime, self.strpath, (self.atime(), mtime)) + + def chdir(self): + """Change directory to self and return old current directory""" + try: + old = self.__class__() + except error.ENOENT: + old = None + error.checked_call(os.chdir, self.strpath) + return old + + @contextmanager + def as_cwd(self): + """ + Return a context manager, which changes to the path's dir during the + managed "with" context. + On __enter__ it returns the old dir, which might be ``None``. + """ + old = self.chdir() + try: + yield old + finally: + if old is not None: + old.chdir() + + def realpath(self): + """Return a new path which contains no symbolic links.""" + return self.__class__(os.path.realpath(self.strpath)) + + def atime(self): + """Return last access time of the path.""" + return self.stat().atime + + def __repr__(self): + return f"local({self.strpath!r})" + + def __str__(self): + """Return string representation of the Path.""" + return self.strpath + + def chmod(self, mode, rec=0): + """Change permissions to the given mode. If mode is an + integer it directly encodes the os-specific modes. + if rec is True perform recursively. + """ + if not isinstance(mode, int): + raise TypeError(f"mode {mode!r} must be an integer") + if rec: + for x in self.visit(rec=rec): + error.checked_call(os.chmod, str(x), mode) + error.checked_call(os.chmod, self.strpath, mode) + + def pypkgpath(self): + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + Return None if a pkgpath cannot be determined. + """ + pkgpath = None + for parent in self.parts(reverse=True): + if parent.isdir(): + if not parent.join("__init__.py").exists(): + break + if not isimportable(parent.basename): + break + pkgpath = parent + return pkgpath + + def _ensuresyspath(self, ensuremode, path): + if ensuremode: + s = str(path) + if ensuremode == "append": + if s not in sys.path: + sys.path.append(s) + else: + if s != sys.path[0]: + sys.path.insert(0, s) + + def pyimport(self, modname=None, ensuresyspath=True): + """Return path as an imported python module. + + If modname is None, look for the containing package + and construct an according module name. + The module will be put/looked up in sys.modules. + if ensuresyspath is True then the root dir for importing + the file (taking __init__.py files into account) will + be prepended to sys.path if it isn't there already. + If ensuresyspath=="append" the root dir will be appended + if it isn't already contained in sys.path. + if ensuresyspath is False no modification of syspath happens. + + Special value of ensuresyspath=="importlib" is intended + purely for using in pytest, it is capable only of importing + separate .py files outside packages, e.g. for test suite + without any __init__.py file. It effectively allows having + same-named test modules in different places and offers + mild opt-in via this option. Note that it works only in + recent versions of python. + """ + if not self.check(): + raise error.ENOENT(self) + + if ensuresyspath == "importlib": + if modname is None: + modname = self.purebasename + spec = importlib.util.spec_from_file_location(modname, str(self)) + if spec is None or spec.loader is None: + raise ImportError(f"Can't find module {modname} at location {self!s}") + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + return mod + + pkgpath = None + if modname is None: + pkgpath = self.pypkgpath() + if pkgpath is not None: + pkgroot = pkgpath.dirpath() + names = self.new(ext="").relto(pkgroot).split(self.sep) + if names[-1] == "__init__": + names.pop() + modname = ".".join(names) + else: + pkgroot = self.dirpath() + modname = self.purebasename + + self._ensuresyspath(ensuresyspath, pkgroot) + __import__(modname) + mod = sys.modules[modname] + if self.basename == "__init__.py": + return mod # we don't check anything as we might + # be in a namespace package ... too icky to check + modfile = mod.__file__ + assert modfile is not None + if modfile[-4:] in (".pyc", ".pyo"): + modfile = modfile[:-1] + elif modfile.endswith("$py.class"): + modfile = modfile[:-9] + ".py" + if modfile.endswith(os.sep + "__init__.py"): + if self.basename != "__init__.py": + modfile = modfile[:-12] + try: + issame = self.samefile(modfile) + except error.ENOENT: + issame = False + if not issame: + ignore = os.getenv("PY_IGNORE_IMPORTMISMATCH") + if ignore != "1": + raise self.ImportMismatchError(modname, modfile, self) + return mod + else: + try: + return sys.modules[modname] + except KeyError: + # we have a custom modname, do a pseudo-import + import types + + mod = types.ModuleType(modname) + mod.__file__ = str(self) + sys.modules[modname] = mod + try: + with open(str(self), "rb") as f: + exec(f.read(), mod.__dict__) + except BaseException: + del sys.modules[modname] + raise + return mod + + def sysexec(self, *argv: os.PathLike[str], **popen_opts: Any) -> str: + """Return stdout text from executing a system child process, + where the 'self' path points to executable. + The process is directly invoked and not through a system shell. + """ + from subprocess import PIPE + from subprocess import Popen + + popen_opts.pop("stdout", None) + popen_opts.pop("stderr", None) + proc = Popen( + [str(self)] + [str(arg) for arg in argv], + **popen_opts, + stdout=PIPE, + stderr=PIPE, + ) + stdout: str | bytes + stdout, stderr = proc.communicate() + ret = proc.wait() + if isinstance(stdout, bytes): + stdout = stdout.decode(sys.getdefaultencoding()) + if ret != 0: + if isinstance(stderr, bytes): + stderr = stderr.decode(sys.getdefaultencoding()) + raise RuntimeError( + ret, + ret, + str(self), + stdout, + stderr, + ) + return stdout + + @classmethod + def sysfind(cls, name, checker=None, paths=None): + """Return a path object found by looking at the systems + underlying PATH specification. If the checker is not None + it will be invoked to filter matching paths. If a binary + cannot be found, None is returned + Note: This is probably not working on plain win32 systems + but may work on cygwin. + """ + if isabs(name): + p = local(name) + if p.check(file=1): + return p + else: + if paths is None: + if iswin32: + paths = os.environ["Path"].split(";") + if "" not in paths and "." not in paths: + paths.append(".") + try: + systemroot = os.environ["SYSTEMROOT"] + except KeyError: + pass + else: + paths = [ + path.replace("%SystemRoot%", systemroot) for path in paths + ] + else: + paths = os.environ["PATH"].split(":") + tryadd = [] + if iswin32: + tryadd += os.environ["PATHEXT"].split(os.pathsep) + tryadd.append("") + + for x in paths: + for addext in tryadd: + p = local(x).join(name, abs=True) + addext + try: + if p.check(file=1): + if checker: + if not checker(p): + continue + return p + except error.EACCES: + pass + return None + + @classmethod + def _gethomedir(cls): + try: + x = os.environ["HOME"] + except KeyError: + try: + x = os.environ["HOMEDRIVE"] + os.environ["HOMEPATH"] + except KeyError: + return None + return cls(x) + + # """ + # special class constructors for local filesystem paths + # """ + @classmethod + def get_temproot(cls): + """Return the system's temporary directory + (where tempfiles are usually created in) + """ + import tempfile + + return local(tempfile.gettempdir()) + + @classmethod + def mkdtemp(cls, rootdir=None): + """Return a Path object pointing to a fresh new temporary directory + (which we created ourselves). + """ + import tempfile + + if rootdir is None: + rootdir = cls.get_temproot() + path = error.checked_call(tempfile.mkdtemp, dir=str(rootdir)) + return cls(path) + + @classmethod + def make_numbered_dir( + cls, prefix="session-", rootdir=None, keep=3, lock_timeout=172800 + ): # two days + """Return unique directory with a number greater than the current + maximum one. The number is assumed to start directly after prefix. + if keep is true directories with a number less than (maxnum-keep) + will be removed. If .lock files are used (lock_timeout non-zero), + algorithm is multi-process safe. + """ + if rootdir is None: + rootdir = cls.get_temproot() + + nprefix = prefix.lower() + + def parse_num(path): + """Parse the number out of a path (if it matches the prefix)""" + nbasename = path.basename.lower() + if nbasename.startswith(nprefix): + try: + return int(nbasename[len(nprefix) :]) + except ValueError: + pass + + def create_lockfile(path): + """Exclusively create lockfile. Throws when failed""" + mypid = os.getpid() + lockfile = path.join(".lock") + if hasattr(lockfile, "mksymlinkto"): + lockfile.mksymlinkto(str(mypid)) + else: + fd = error.checked_call( + os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644 + ) + with os.fdopen(fd, "w") as f: + f.write(str(mypid)) + return lockfile + + def atexit_remove_lockfile(lockfile): + """Ensure lockfile is removed at process exit""" + mypid = os.getpid() + + def try_remove_lockfile(): + # in a fork() situation, only the last process should + # remove the .lock, otherwise the other processes run the + # risk of seeing their temporary dir disappear. For now + # we remove the .lock in the parent only (i.e. we assume + # that the children finish before the parent). + if os.getpid() != mypid: + return + try: + lockfile.remove() + except error.Error: + pass + + atexit.register(try_remove_lockfile) + + # compute the maximum number currently in use with the prefix + lastmax = None + while True: + maxnum = -1 + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None: + maxnum = max(maxnum, num) + + # make the new directory + try: + udir = rootdir.mkdir(prefix + str(maxnum + 1)) + if lock_timeout: + lockfile = create_lockfile(udir) + atexit_remove_lockfile(lockfile) + except (error.EEXIST, error.ENOENT, error.EBUSY): + # race condition (1): another thread/process created the dir + # in the meantime - try again + # race condition (2): another thread/process spuriously acquired + # lock treating empty directory as candidate + # for removal - try again + # race condition (3): another thread/process tried to create the lock at + # the same time (happened in Python 3.3 on Windows) + # https://ci.appveyor.com/project/pytestbot/py/build/1.0.21/job/ffi85j4c0lqwsfwa + if lastmax == maxnum: + raise + lastmax = maxnum + continue + break + + def get_mtime(path): + """Read file modification time""" + try: + return path.lstat().mtime + except error.Error: + pass + + garbage_prefix = prefix + "garbage-" + + def is_garbage(path): + """Check if path denotes directory scheduled for removal""" + bn = path.basename + return bn.startswith(garbage_prefix) + + # prune old directories + udir_time = get_mtime(udir) + if keep and udir_time: + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None and num <= (maxnum - keep): + try: + # try acquiring lock to remove directory as exclusive user + if lock_timeout: + create_lockfile(path) + except (error.EEXIST, error.ENOENT, error.EBUSY): + path_time = get_mtime(path) + if not path_time: + # assume directory doesn't exist now + continue + if abs(udir_time - path_time) < lock_timeout: + # assume directory with lockfile exists + # and lock timeout hasn't expired yet + continue + + # path dir locked for exclusive use + # and scheduled for removal to avoid another thread/process + # treating it as a new directory or removal candidate + garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4())) + try: + path.rename(garbage_path) + garbage_path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + if is_garbage(path): + try: + path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + + # make link... + try: + username = os.environ["USER"] # linux, et al + except KeyError: + try: + username = os.environ["USERNAME"] # windows + except KeyError: + username = "current" + + src = str(udir) + dest = src[: src.rfind("-")] + "-" + username + try: + os.unlink(dest) + except OSError: + pass + try: + os.symlink(src, dest) + except (OSError, AttributeError, NotImplementedError): + pass + + return udir + + +def copymode(src, dest): + """Copy permission from src to dst.""" + import shutil + + shutil.copymode(src, dest) + + +def copystat(src, dest): + """Copy permission, last modification time, + last access time, and flags from src to dst.""" + import shutil + + shutil.copystat(str(src), str(dest)) + + +def copychunked(src, dest): + chunksize = 524288 # half a meg of bytes + fsrc = src.open("rb") + try: + fdest = dest.open("wb") + try: + while 1: + buf = fsrc.read(chunksize) + if not buf: + break + fdest.write(buf) + finally: + fdest.close() + finally: + fsrc.close() + + +def isimportable(name): + if name and (name[0].isalpha() or name[0] == "_"): + name = name.replace("_", "") + return not name or name.isalnum() + + +local = LocalPath diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/_version.py b/Backend/venv/lib/python3.12/site-packages/_pytest/_version.py new file mode 100644 index 00000000..25a26b42 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/_version.py @@ -0,0 +1,34 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = [ + "__version__", + "__version_tuple__", + "version", + "version_tuple", + "__commit_id__", + "commit_id", +] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] + COMMIT_ID = Union[str, None] +else: + VERSION_TUPLE = object + COMMIT_ID = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE +commit_id: COMMIT_ID +__commit_id__: COMMIT_ID + +__version__ = version = '9.0.1' +__version_tuple__ = version_tuple = (9, 0, 1) + +__commit_id__ = commit_id = None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__init__.py b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__init__.py new file mode 100644 index 00000000..22f3ca8e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__init__.py @@ -0,0 +1,208 @@ +# mypy: allow-untyped-defs +"""Support for presenting detailed information in failing assertions.""" + +from __future__ import annotations + +from collections.abc import Generator +import sys +from typing import Any +from typing import Protocol +from typing import TYPE_CHECKING + +from _pytest.assertion import rewrite +from _pytest.assertion import truncate +from _pytest.assertion import util +from _pytest.assertion.rewrite import assertstate_key +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item + + +if TYPE_CHECKING: + from _pytest.main import Session + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--assert", + action="store", + dest="assertmode", + choices=("rewrite", "plain"), + default="rewrite", + metavar="MODE", + help=( + "Control assertion debugging tools.\n" + "'plain' performs no assertion debugging.\n" + "'rewrite' (the default) rewrites assert statements in test modules" + " on import to provide assert expression information." + ), + ) + parser.addini( + "enable_assertion_pass_hook", + type="bool", + default=False, + help="Enables the pytest_assertion_pass hook. " + "Make sure to delete any previously generated pyc cache files.", + ) + + parser.addini( + "truncation_limit_lines", + default=None, + help="Set threshold of LINES after which truncation will take effect", + ) + parser.addini( + "truncation_limit_chars", + default=None, + help=("Set threshold of CHARS after which truncation will take effect"), + ) + + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_ASSERTIONS, + help=( + "Specify a verbosity level for assertions, overriding the main level. " + "Higher levels will provide more detailed explanation when an assertion fails." + ), + ) + + +def register_assert_rewrite(*names: str) -> None: + """Register one or more module names to be rewritten on import. + + This function will make sure that this module or all modules inside + the package will get their assert statements rewritten. + Thus you should make sure to call this before the module is + actually imported, usually in your __init__.py if you are a plugin + using a package. + + :param names: The module names to register. + """ + for name in names: + if not isinstance(name, str): + msg = "expected module names as *args, got {0} instead" # type: ignore[unreachable] + raise TypeError(msg.format(repr(names))) + rewrite_hook: RewriteHook + for hook in sys.meta_path: + if isinstance(hook, rewrite.AssertionRewritingHook): + rewrite_hook = hook + break + else: + rewrite_hook = DummyRewriteHook() + rewrite_hook.mark_rewrite(*names) + + +class RewriteHook(Protocol): + def mark_rewrite(self, *names: str) -> None: ... + + +class DummyRewriteHook: + """A no-op import hook for when rewriting is disabled.""" + + def mark_rewrite(self, *names: str) -> None: + pass + + +class AssertionState: + """State for the assertion plugin.""" + + def __init__(self, config: Config, mode) -> None: + self.mode = mode + self.trace = config.trace.root.get("assertion") + self.hook: rewrite.AssertionRewritingHook | None = None + + +def install_importhook(config: Config) -> rewrite.AssertionRewritingHook: + """Try to install the rewrite hook, raise SystemError if it fails.""" + config.stash[assertstate_key] = AssertionState(config, "rewrite") + config.stash[assertstate_key].hook = hook = rewrite.AssertionRewritingHook(config) + sys.meta_path.insert(0, hook) + config.stash[assertstate_key].trace("installed rewrite import hook") + + def undo() -> None: + hook = config.stash[assertstate_key].hook + if hook is not None and hook in sys.meta_path: + sys.meta_path.remove(hook) + + config.add_cleanup(undo) + return hook + + +def pytest_collection(session: Session) -> None: + # This hook is only called when test modules are collected + # so for example not in the managing process of pytest-xdist + # (which does not collect test modules). + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(session) + + +@hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + """Setup the pytest_assertrepr_compare and pytest_assertion_pass hooks. + + The rewrite module will use util._reprcompare if it exists to use custom + reporting via the pytest_assertrepr_compare hook. This sets up this custom + comparison for the test. + """ + ihook = item.ihook + + def callbinrepr(op, left: object, right: object) -> str | None: + """Call the pytest_assertrepr_compare hook and prepare the result. + + This uses the first result from the hook and then ensures the + following: + * Overly verbose explanations are truncated unless configured otherwise + (eg. if running in verbose mode). + * Embedded newlines are escaped to help util.format_explanation() + later. + * If the rewrite mode is used embedded %-characters are replaced + to protect later % formatting. + + The result can be formatted by util.format_explanation() for + pretty printing. + """ + hook_result = ihook.pytest_assertrepr_compare( + config=item.config, op=op, left=left, right=right + ) + for new_expl in hook_result: + if new_expl: + new_expl = truncate.truncate_if_required(new_expl, item) + new_expl = [line.replace("\n", "\\n") for line in new_expl] + res = "\n~".join(new_expl) + if item.config.getvalue("assertmode") == "rewrite": + res = res.replace("%", "%%") + return res + return None + + saved_assert_hooks = util._reprcompare, util._assertion_pass + util._reprcompare = callbinrepr + util._config = item.config + + if ihook.pytest_assertion_pass.get_hookimpls(): + + def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None: + ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl) + + util._assertion_pass = call_assertion_pass_hook + + try: + return (yield) + finally: + util._reprcompare, util._assertion_pass = saved_assert_hooks + util._config = None + + +def pytest_sessionfinish(session: Session) -> None: + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(None) + + +def pytest_assertrepr_compare( + config: Config, op: str, left: Any, right: Any +) -> list[str] | None: + return util.assertrepr_compare(config=config, op=op, left=left, right=right) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..ca0c2513 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-312.pyc new file mode 100644 index 00000000..b42457dd Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/truncate.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/truncate.cpython-312.pyc new file mode 100644 index 00000000..9706f341 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/truncate.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/util.cpython-312.pyc new file mode 100644 index 00000000..fbfec16b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/__pycache__/util.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/rewrite.py b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/rewrite.py new file mode 100644 index 00000000..566549d6 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/rewrite.py @@ -0,0 +1,1202 @@ +"""Rewrite assertion AST to produce nice error messages.""" + +from __future__ import annotations + +import ast +from collections import defaultdict +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence +import errno +import functools +import importlib.abc +import importlib.machinery +import importlib.util +import io +import itertools +import marshal +import os +from pathlib import Path +from pathlib import PurePath +import struct +import sys +import tokenize +import types +from typing import IO +from typing import TYPE_CHECKING + + +if sys.version_info >= (3, 12): + from importlib.resources.abc import TraversableResources +else: + from importlib.abc import TraversableResources +if sys.version_info < (3, 11): + from importlib.readers import FileReader +else: + from importlib.resources.readers import FileReader + + +from _pytest._io.saferepr import DEFAULT_REPR_MAX_SIZE +from _pytest._io.saferepr import saferepr +from _pytest._io.saferepr import saferepr_unlimited +from _pytest._version import version +from _pytest.assertion import util +from _pytest.config import Config +from _pytest.fixtures import FixtureFunctionDefinition +from _pytest.main import Session +from _pytest.pathlib import absolutepath +from _pytest.pathlib import fnmatch_ex +from _pytest.stash import StashKey + + +# fmt: off +from _pytest.assertion.util import format_explanation as _format_explanation # noqa:F401, isort:skip +# fmt:on + +if TYPE_CHECKING: + from _pytest.assertion import AssertionState + + +class Sentinel: + pass + + +assertstate_key = StashKey["AssertionState"]() + +# pytest caches rewritten pycs in pycache dirs +PYTEST_TAG = f"{sys.implementation.cache_tag}-pytest-{version}" +PYC_EXT = ".py" + ((__debug__ and "c") or "o") +PYC_TAIL = "." + PYTEST_TAG + PYC_EXT + +# Special marker that denotes we have just left a scope definition +_SCOPE_END_MARKER = Sentinel() + + +class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader): + """PEP302/PEP451 import hook which rewrites asserts.""" + + def __init__(self, config: Config) -> None: + self.config = config + try: + self.fnpats = config.getini("python_files") + except ValueError: + self.fnpats = ["test_*.py", "*_test.py"] + self.session: Session | None = None + self._rewritten_names: dict[str, Path] = {} + self._must_rewrite: set[str] = set() + # flag to guard against trying to rewrite a pyc file while we are already writing another pyc file, + # which might result in infinite recursion (#3506) + self._writing_pyc = False + self._basenames_to_check_rewrite = {"conftest"} + self._marked_for_rewrite_cache: dict[str, bool] = {} + self._session_paths_checked = False + + def set_session(self, session: Session | None) -> None: + self.session = session + self._session_paths_checked = False + + # Indirection so we can mock calls to find_spec originated from the hook during testing + _find_spec = importlib.machinery.PathFinder.find_spec + + def find_spec( + self, + name: str, + path: Sequence[str | bytes] | None = None, + target: types.ModuleType | None = None, + ) -> importlib.machinery.ModuleSpec | None: + if self._writing_pyc: + return None + state = self.config.stash[assertstate_key] + if self._early_rewrite_bailout(name, state): + return None + state.trace(f"find_module called for: {name}") + + # Type ignored because mypy is confused about the `self` binding here. + spec = self._find_spec(name, path) # type: ignore + + if spec is None and path is not None: + # With --import-mode=importlib, PathFinder cannot find spec without modifying `sys.path`, + # causing inability to assert rewriting (#12659). + # At this point, try using the file path to find the module spec. + for _path_str in path: + spec = importlib.util.spec_from_file_location(name, _path_str) + if spec is not None: + break + + if ( + # the import machinery could not find a file to import + spec is None + # this is a namespace package (without `__init__.py`) + # there's nothing to rewrite there + or spec.origin is None + # we can only rewrite source files + or not isinstance(spec.loader, importlib.machinery.SourceFileLoader) + # if the file doesn't exist, we can't rewrite it + or not os.path.exists(spec.origin) + ): + return None + else: + fn = spec.origin + + if not self._should_rewrite(name, fn, state): + return None + + return importlib.util.spec_from_file_location( + name, + fn, + loader=self, + submodule_search_locations=spec.submodule_search_locations, + ) + + def create_module( + self, spec: importlib.machinery.ModuleSpec + ) -> types.ModuleType | None: + return None # default behaviour is fine + + def exec_module(self, module: types.ModuleType) -> None: + assert module.__spec__ is not None + assert module.__spec__.origin is not None + fn = Path(module.__spec__.origin) + state = self.config.stash[assertstate_key] + + self._rewritten_names[module.__name__] = fn + + # The requested module looks like a test file, so rewrite it. This is + # the most magical part of the process: load the source, rewrite the + # asserts, and load the rewritten source. We also cache the rewritten + # module code in a special pyc. We must be aware of the possibility of + # concurrent pytest processes rewriting and loading pycs. To avoid + # tricky race conditions, we maintain the following invariant: The + # cached pyc is always a complete, valid pyc. Operations on it must be + # atomic. POSIX's atomic rename comes in handy. + write = not sys.dont_write_bytecode + cache_dir = get_cache_dir(fn) + if write: + ok = try_makedirs(cache_dir) + if not ok: + write = False + state.trace(f"read only directory: {cache_dir}") + + cache_name = fn.name[:-3] + PYC_TAIL + pyc = cache_dir / cache_name + # Notice that even if we're in a read-only directory, I'm going + # to check for a cached pyc. This may not be optimal... + co = _read_pyc(fn, pyc, state.trace) + if co is None: + state.trace(f"rewriting {fn!r}") + source_stat, co = _rewrite_test(fn, self.config) + if write: + self._writing_pyc = True + try: + _write_pyc(state, co, source_stat, pyc) + finally: + self._writing_pyc = False + else: + state.trace(f"found cached rewritten pyc for {fn}") + exec(co, module.__dict__) + + def _early_rewrite_bailout(self, name: str, state: AssertionState) -> bool: + """A fast way to get out of rewriting modules. + + Profiling has shown that the call to PathFinder.find_spec (inside of + the find_spec from this class) is a major slowdown, so, this method + tries to filter what we're sure won't be rewritten before getting to + it. + """ + if self.session is not None and not self._session_paths_checked: + self._session_paths_checked = True + for initial_path in self.session._initialpaths: + # Make something as c:/projects/my_project/path.py -> + # ['c:', 'projects', 'my_project', 'path.py'] + parts = str(initial_path).split(os.sep) + # add 'path' to basenames to be checked. + self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0]) + + # Note: conftest already by default in _basenames_to_check_rewrite. + parts = name.split(".") + if parts[-1] in self._basenames_to_check_rewrite: + return False + + # For matching the name it must be as if it was a filename. + path = PurePath(*parts).with_suffix(".py") + + for pat in self.fnpats: + # if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based + # on the name alone because we need to match against the full path + if os.path.dirname(pat): + return False + if fnmatch_ex(pat, path): + return False + + if self._is_marked_for_rewrite(name, state): + return False + + state.trace(f"early skip of rewriting module: {name}") + return True + + def _should_rewrite(self, name: str, fn: str, state: AssertionState) -> bool: + # always rewrite conftest files + if os.path.basename(fn) == "conftest.py": + state.trace(f"rewriting conftest file: {fn!r}") + return True + + if self.session is not None: + if self.session.isinitpath(absolutepath(fn)): + state.trace(f"matched test file (was specified on cmdline): {fn!r}") + return True + + # modules not passed explicitly on the command line are only + # rewritten if they match the naming convention for test files + fn_path = PurePath(fn) + for pat in self.fnpats: + if fnmatch_ex(pat, fn_path): + state.trace(f"matched test file {fn!r}") + return True + + return self._is_marked_for_rewrite(name, state) + + def _is_marked_for_rewrite(self, name: str, state: AssertionState) -> bool: + try: + return self._marked_for_rewrite_cache[name] + except KeyError: + for marked in self._must_rewrite: + if name == marked or name.startswith(marked + "."): + state.trace(f"matched marked file {name!r} (from {marked!r})") + self._marked_for_rewrite_cache[name] = True + return True + + self._marked_for_rewrite_cache[name] = False + return False + + def mark_rewrite(self, *names: str) -> None: + """Mark import names as needing to be rewritten. + + The named module or package as well as any nested modules will + be rewritten on import. + """ + already_imported = ( + set(names).intersection(sys.modules).difference(self._rewritten_names) + ) + for name in already_imported: + mod = sys.modules[name] + if not AssertionRewriter.is_rewrite_disabled( + mod.__doc__ or "" + ) and not isinstance(mod.__loader__, type(self)): + self._warn_already_imported(name) + self._must_rewrite.update(names) + self._marked_for_rewrite_cache.clear() + + def _warn_already_imported(self, name: str) -> None: + from _pytest.warning_types import PytestAssertRewriteWarning + + self.config.issue_config_time_warning( + PytestAssertRewriteWarning( + f"Module already imported so cannot be rewritten; {name}" + ), + stacklevel=5, + ) + + def get_data(self, pathname: str | bytes) -> bytes: + """Optional PEP302 get_data API.""" + with open(pathname, "rb") as f: + return f.read() + + def get_resource_reader(self, name: str) -> TraversableResources: + return FileReader(types.SimpleNamespace(path=self._rewritten_names[name])) # type: ignore[arg-type] + + +def _write_pyc_fp( + fp: IO[bytes], source_stat: os.stat_result, co: types.CodeType +) -> None: + # Technically, we don't have to have the same pyc format as + # (C)Python, since these "pycs" should never be seen by builtin + # import. However, there's little reason to deviate. + fp.write(importlib.util.MAGIC_NUMBER) + # https://www.python.org/dev/peps/pep-0552/ + flags = b"\x00\x00\x00\x00" + fp.write(flags) + # as of now, bytecode header expects 32-bit numbers for size and mtime (#4903) + mtime = int(source_stat.st_mtime) & 0xFFFFFFFF + size = source_stat.st_size & 0xFFFFFFFF + # " bool: + proc_pyc = f"{pyc}.{os.getpid()}" + try: + with open(proc_pyc, "wb") as fp: + _write_pyc_fp(fp, source_stat, co) + except OSError as e: + state.trace(f"error writing pyc file at {proc_pyc}: errno={e.errno}") + return False + + try: + os.replace(proc_pyc, pyc) + except OSError as e: + state.trace(f"error writing pyc file at {pyc}: {e}") + # we ignore any failure to write the cache file + # there are many reasons, permission-denied, pycache dir being a + # file etc. + return False + return True + + +def _rewrite_test(fn: Path, config: Config) -> tuple[os.stat_result, types.CodeType]: + """Read and rewrite *fn* and return the code object.""" + stat = os.stat(fn) + source = fn.read_bytes() + strfn = str(fn) + tree = ast.parse(source, filename=strfn) + rewrite_asserts(tree, source, strfn, config) + co = compile(tree, strfn, "exec", dont_inherit=True) + return stat, co + + +def _read_pyc( + source: Path, pyc: Path, trace: Callable[[str], None] = lambda x: None +) -> types.CodeType | None: + """Possibly read a pytest pyc containing rewritten code. + + Return rewritten code if successful or None if not. + """ + try: + fp = open(pyc, "rb") + except OSError: + return None + with fp: + try: + stat_result = os.stat(source) + mtime = int(stat_result.st_mtime) + size = stat_result.st_size + data = fp.read(16) + except OSError as e: + trace(f"_read_pyc({source}): OSError {e}") + return None + # Check for invalid or out of date pyc file. + if len(data) != (16): + trace(f"_read_pyc({source}): invalid pyc (too short)") + return None + if data[:4] != importlib.util.MAGIC_NUMBER: + trace(f"_read_pyc({source}): invalid pyc (bad magic number)") + return None + if data[4:8] != b"\x00\x00\x00\x00": + trace(f"_read_pyc({source}): invalid pyc (unsupported flags)") + return None + mtime_data = data[8:12] + if int.from_bytes(mtime_data, "little") != mtime & 0xFFFFFFFF: + trace(f"_read_pyc({source}): out of date") + return None + size_data = data[12:16] + if int.from_bytes(size_data, "little") != size & 0xFFFFFFFF: + trace(f"_read_pyc({source}): invalid pyc (incorrect size)") + return None + try: + co = marshal.load(fp) + except Exception as e: + trace(f"_read_pyc({source}): marshal.load error {e}") + return None + if not isinstance(co, types.CodeType): + trace(f"_read_pyc({source}): not a code object") + return None + return co + + +def rewrite_asserts( + mod: ast.Module, + source: bytes, + module_path: str | None = None, + config: Config | None = None, +) -> None: + """Rewrite the assert statements in mod.""" + AssertionRewriter(module_path, config, source).run(mod) + + +def _saferepr(obj: object) -> str: + r"""Get a safe repr of an object for assertion error messages. + + The assertion formatting (util.format_explanation()) requires + newlines to be escaped since they are a special character for it. + Normally assertion.util.format_explanation() does this but for a + custom repr it is possible to contain one of the special escape + sequences, especially '\n{' and '\n}' are likely to be present in + JSON reprs. + """ + if isinstance(obj, types.MethodType): + # for bound methods, skip redundant information + return obj.__name__ + + maxsize = _get_maxsize_for_saferepr(util._config) + if not maxsize: + return saferepr_unlimited(obj).replace("\n", "\\n") + return saferepr(obj, maxsize=maxsize).replace("\n", "\\n") + + +def _get_maxsize_for_saferepr(config: Config | None) -> int | None: + """Get `maxsize` configuration for saferepr based on the given config object.""" + if config is None: + verbosity = 0 + else: + verbosity = config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + if verbosity >= 2: + return None + if verbosity >= 1: + return DEFAULT_REPR_MAX_SIZE * 10 + return DEFAULT_REPR_MAX_SIZE + + +def _format_assertmsg(obj: object) -> str: + r"""Format the custom assertion message given. + + For strings this simply replaces newlines with '\n~' so that + util.format_explanation() will preserve them instead of escaping + newlines. For other objects saferepr() is used first. + """ + # reprlib appears to have a bug which means that if a string + # contains a newline it gets escaped, however if an object has a + # .__repr__() which contains newlines it does not get escaped. + # However in either case we want to preserve the newline. + replaces = [("\n", "\n~"), ("%", "%%")] + if not isinstance(obj, str): + obj = saferepr(obj, _get_maxsize_for_saferepr(util._config)) + replaces.append(("\\n", "\n~")) + + for r1, r2 in replaces: + obj = obj.replace(r1, r2) + + return obj + + +def _should_repr_global_name(obj: object) -> bool: + if callable(obj): + # For pytest fixtures the __repr__ method provides more information than the function name. + return isinstance(obj, FixtureFunctionDefinition) + + try: + return not hasattr(obj, "__name__") + except Exception: + return True + + +def _format_boolop(explanations: Iterable[str], is_or: bool) -> str: + explanation = "(" + ((is_or and " or ") or " and ").join(explanations) + ")" + return explanation.replace("%", "%%") + + +def _call_reprcompare( + ops: Sequence[str], + results: Sequence[bool], + expls: Sequence[str], + each_obj: Sequence[object], +) -> str: + for i, res, expl in zip(range(len(ops)), results, expls, strict=True): + try: + done = not res + except Exception: + done = True + if done: + break + if util._reprcompare is not None: + custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1]) + if custom is not None: + return custom + return expl + + +def _call_assertion_pass(lineno: int, orig: str, expl: str) -> None: + if util._assertion_pass is not None: + util._assertion_pass(lineno, orig, expl) + + +def _check_if_assertion_pass_impl() -> bool: + """Check if any plugins implement the pytest_assertion_pass hook + in order not to generate explanation unnecessarily (might be expensive).""" + return True if util._assertion_pass else False + + +UNARY_MAP = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"} + +BINOP_MAP = { + ast.BitOr: "|", + ast.BitXor: "^", + ast.BitAnd: "&", + ast.LShift: "<<", + ast.RShift: ">>", + ast.Add: "+", + ast.Sub: "-", + ast.Mult: "*", + ast.Div: "/", + ast.FloorDiv: "//", + ast.Mod: "%%", # escaped for string formatting + ast.Eq: "==", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Pow: "**", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in", + ast.MatMult: "@", +} + + +def traverse_node(node: ast.AST) -> Iterator[ast.AST]: + """Recursively yield node and all its children in depth-first order.""" + yield node + for child in ast.iter_child_nodes(node): + yield from traverse_node(child) + + +@functools.lru_cache(maxsize=1) +def _get_assertion_exprs(src: bytes) -> dict[int, str]: + """Return a mapping from {lineno: "assertion test expression"}.""" + ret: dict[int, str] = {} + + depth = 0 + lines: list[str] = [] + assert_lineno: int | None = None + seen_lines: set[int] = set() + + def _write_and_reset() -> None: + nonlocal depth, lines, assert_lineno, seen_lines + assert assert_lineno is not None + ret[assert_lineno] = "".join(lines).rstrip().rstrip("\\") + depth = 0 + lines = [] + assert_lineno = None + seen_lines = set() + + tokens = tokenize.tokenize(io.BytesIO(src).readline) + for tp, source, (lineno, offset), _, line in tokens: + if tp == tokenize.NAME and source == "assert": + assert_lineno = lineno + elif assert_lineno is not None: + # keep track of depth for the assert-message `,` lookup + if tp == tokenize.OP and source in "([{": + depth += 1 + elif tp == tokenize.OP and source in ")]}": + depth -= 1 + + if not lines: + lines.append(line[offset:]) + seen_lines.add(lineno) + # a non-nested comma separates the expression from the message + elif depth == 0 and tp == tokenize.OP and source == ",": + # one line assert with message + if lineno in seen_lines and len(lines) == 1: + offset_in_trimmed = offset + len(lines[-1]) - len(line) + lines[-1] = lines[-1][:offset_in_trimmed] + # multi-line assert with message + elif lineno in seen_lines: + lines[-1] = lines[-1][:offset] + # multi line assert with escaped newline before message + else: + lines.append(line[:offset]) + _write_and_reset() + elif tp in {tokenize.NEWLINE, tokenize.ENDMARKER}: + _write_and_reset() + elif lines and lineno not in seen_lines: + lines.append(line) + seen_lines.add(lineno) + + return ret + + +class AssertionRewriter(ast.NodeVisitor): + """Assertion rewriting implementation. + + The main entrypoint is to call .run() with an ast.Module instance, + this will then find all the assert statements and rewrite them to + provide intermediate values and a detailed assertion error. See + http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html + for an overview of how this works. + + The entry point here is .run() which will iterate over all the + statements in an ast.Module and for each ast.Assert statement it + finds call .visit() with it. Then .visit_Assert() takes over and + is responsible for creating new ast statements to replace the + original assert statement: it rewrites the test of an assertion + to provide intermediate values and replace it with an if statement + which raises an assertion error with a detailed explanation in + case the expression is false and calls pytest_assertion_pass hook + if expression is true. + + For this .visit_Assert() uses the visitor pattern to visit all the + AST nodes of the ast.Assert.test field, each visit call returning + an AST node and the corresponding explanation string. During this + state is kept in several instance attributes: + + :statements: All the AST statements which will replace the assert + statement. + + :variables: This is populated by .variable() with each variable + used by the statements so that they can all be set to None at + the end of the statements. + + :variable_counter: Counter to create new unique variables needed + by statements. Variables are created using .variable() and + have the form of "@py_assert0". + + :expl_stmts: The AST statements which will be executed to get + data from the assertion. This is the code which will construct + the detailed assertion message that is used in the AssertionError + or for the pytest_assertion_pass hook. + + :explanation_specifiers: A dict filled by .explanation_param() + with %-formatting placeholders and their corresponding + expressions to use in the building of an assertion message. + This is used by .pop_format_context() to build a message. + + :stack: A stack of the explanation_specifiers dicts maintained by + .push_format_context() and .pop_format_context() which allows + to build another %-formatted string while already building one. + + :scope: A tuple containing the current scope used for variables_overwrite. + + :variables_overwrite: A dict filled with references to variables + that change value within an assert. This happens when a variable is + reassigned with the walrus operator + + This state, except the variables_overwrite, is reset on every new assert + statement visited and used by the other visitors. + """ + + def __init__( + self, module_path: str | None, config: Config | None, source: bytes + ) -> None: + super().__init__() + self.module_path = module_path + self.config = config + if config is not None: + self.enable_assertion_pass_hook = config.getini( + "enable_assertion_pass_hook" + ) + else: + self.enable_assertion_pass_hook = False + self.source = source + self.scope: tuple[ast.AST, ...] = () + self.variables_overwrite: defaultdict[tuple[ast.AST, ...], dict[str, str]] = ( + defaultdict(dict) + ) + + def run(self, mod: ast.Module) -> None: + """Find all assert statements in *mod* and rewrite them.""" + if not mod.body: + # Nothing to do. + return + + # We'll insert some special imports at the top of the module, but after any + # docstrings and __future__ imports, so first figure out where that is. + doc = getattr(mod, "docstring", None) + expect_docstring = doc is None + if doc is not None and self.is_rewrite_disabled(doc): + return + pos = 0 + for item in mod.body: + match item: + case ast.Expr(value=ast.Constant(value=str() as doc)) if ( + expect_docstring + ): + if self.is_rewrite_disabled(doc): + return + expect_docstring = False + case ast.ImportFrom(level=0, module="__future__"): + pass + case _: + break + pos += 1 + # Special case: for a decorated function, set the lineno to that of the + # first decorator, not the `def`. Issue #4984. + if isinstance(item, ast.FunctionDef) and item.decorator_list: + lineno = item.decorator_list[0].lineno + else: + lineno = item.lineno + # Now actually insert the special imports. + aliases = [ + ast.alias("builtins", "@py_builtins", lineno=lineno, col_offset=0), + ast.alias( + "_pytest.assertion.rewrite", + "@pytest_ar", + lineno=lineno, + col_offset=0, + ), + ] + imports = [ + ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases + ] + mod.body[pos:pos] = imports + + # Collect asserts. + self.scope = (mod,) + nodes: list[ast.AST | Sentinel] = [mod] + while nodes: + node = nodes.pop() + if isinstance(node, ast.FunctionDef | ast.AsyncFunctionDef | ast.ClassDef): + self.scope = tuple((*self.scope, node)) + nodes.append(_SCOPE_END_MARKER) + if node == _SCOPE_END_MARKER: + self.scope = self.scope[:-1] + continue + assert isinstance(node, ast.AST) + for name, field in ast.iter_fields(node): + if isinstance(field, list): + new: list[ast.AST] = [] + for i, child in enumerate(field): + if isinstance(child, ast.Assert): + # Transform assert. + new.extend(self.visit(child)) + else: + new.append(child) + if isinstance(child, ast.AST): + nodes.append(child) + setattr(node, name, new) + elif ( + isinstance(field, ast.AST) + # Don't recurse into expressions as they can't contain + # asserts. + and not isinstance(field, ast.expr) + ): + nodes.append(field) + + @staticmethod + def is_rewrite_disabled(docstring: str) -> bool: + return "PYTEST_DONT_REWRITE" in docstring + + def variable(self) -> str: + """Get a new variable.""" + # Use a character invalid in python identifiers to avoid clashing. + name = "@py_assert" + str(next(self.variable_counter)) + self.variables.append(name) + return name + + def assign(self, expr: ast.expr) -> ast.Name: + """Give *expr* a name.""" + name = self.variable() + self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) + return ast.copy_location(ast.Name(name, ast.Load()), expr) + + def display(self, expr: ast.expr) -> ast.expr: + """Call saferepr on the expression.""" + return self.helper("_saferepr", expr) + + def helper(self, name: str, *args: ast.expr) -> ast.expr: + """Call a helper in this module.""" + py_name = ast.Name("@pytest_ar", ast.Load()) + attr = ast.Attribute(py_name, name, ast.Load()) + return ast.Call(attr, list(args), []) + + def builtin(self, name: str) -> ast.Attribute: + """Return the builtin called *name*.""" + builtin_name = ast.Name("@py_builtins", ast.Load()) + return ast.Attribute(builtin_name, name, ast.Load()) + + def explanation_param(self, expr: ast.expr) -> str: + """Return a new named %-formatting placeholder for expr. + + This creates a %-formatting placeholder for expr in the + current formatting context, e.g. ``%(py0)s``. The placeholder + and expr are placed in the current format context so that it + can be used on the next call to .pop_format_context(). + """ + specifier = "py" + str(next(self.variable_counter)) + self.explanation_specifiers[specifier] = expr + return "%(" + specifier + ")s" + + def push_format_context(self) -> None: + """Create a new formatting context. + + The format context is used for when an explanation wants to + have a variable value formatted in the assertion message. In + this case the value required can be added using + .explanation_param(). Finally .pop_format_context() is used + to format a string of %-formatted values as added by + .explanation_param(). + """ + self.explanation_specifiers: dict[str, ast.expr] = {} + self.stack.append(self.explanation_specifiers) + + def pop_format_context(self, expl_expr: ast.expr) -> ast.Name: + """Format the %-formatted string with current format context. + + The expl_expr should be an str ast.expr instance constructed from + the %-placeholders created by .explanation_param(). This will + add the required code to format said string to .expl_stmts and + return the ast.Name instance of the formatted string. + """ + current = self.stack.pop() + if self.stack: + self.explanation_specifiers = self.stack[-1] + keys: list[ast.expr | None] = [ast.Constant(key) for key in current.keys()] + format_dict = ast.Dict(keys, list(current.values())) + form = ast.BinOp(expl_expr, ast.Mod(), format_dict) + name = "@py_format" + str(next(self.variable_counter)) + if self.enable_assertion_pass_hook: + self.format_variables.append(name) + self.expl_stmts.append(ast.Assign([ast.Name(name, ast.Store())], form)) + return ast.Name(name, ast.Load()) + + def generic_visit(self, node: ast.AST) -> tuple[ast.Name, str]: + """Handle expressions we don't have custom code for.""" + assert isinstance(node, ast.expr) + res = self.assign(node) + return res, self.explanation_param(self.display(res)) + + def visit_Assert(self, assert_: ast.Assert) -> list[ast.stmt]: + """Return the AST statements to replace the ast.Assert instance. + + This rewrites the test of an assertion to provide + intermediate values and replace it with an if statement which + raises an assertion error with a detailed explanation in case + the expression is false. + """ + if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1: + import warnings + + from _pytest.warning_types import PytestAssertRewriteWarning + + # TODO: This assert should not be needed. + assert self.module_path is not None + warnings.warn_explicit( + PytestAssertRewriteWarning( + "assertion is always true, perhaps remove parentheses?" + ), + category=None, + filename=self.module_path, + lineno=assert_.lineno, + ) + + self.statements: list[ast.stmt] = [] + self.variables: list[str] = [] + self.variable_counter = itertools.count() + + if self.enable_assertion_pass_hook: + self.format_variables: list[str] = [] + + self.stack: list[dict[str, ast.expr]] = [] + self.expl_stmts: list[ast.stmt] = [] + self.push_format_context() + # Rewrite assert into a bunch of statements. + top_condition, explanation = self.visit(assert_.test) + + negation = ast.UnaryOp(ast.Not(), top_condition) + + if self.enable_assertion_pass_hook: # Experimental pytest_assertion_pass hook + msg = self.pop_format_context(ast.Constant(explanation)) + + # Failed + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + gluestr = "\n>assert " + else: + assertmsg = ast.Constant("") + gluestr = "assert " + err_explanation = ast.BinOp(ast.Constant(gluestr), ast.Add(), msg) + err_msg = ast.BinOp(assertmsg, ast.Add(), err_explanation) + err_name = ast.Name("AssertionError", ast.Load()) + fmt = self.helper("_format_explanation", err_msg) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + statements_fail = [] + statements_fail.extend(self.expl_stmts) + statements_fail.append(raise_) + + # Passed + fmt_pass = self.helper("_format_explanation", msg) + orig = _get_assertion_exprs(self.source)[assert_.lineno] + hook_call_pass = ast.Expr( + self.helper( + "_call_assertion_pass", + ast.Constant(assert_.lineno), + ast.Constant(orig), + fmt_pass, + ) + ) + # If any hooks implement assert_pass hook + hook_impl_test = ast.If( + self.helper("_check_if_assertion_pass_impl"), + [*self.expl_stmts, hook_call_pass], + [], + ) + statements_pass: list[ast.stmt] = [hook_impl_test] + + # Test for assertion condition + main_test = ast.If(negation, statements_fail, statements_pass) + self.statements.append(main_test) + if self.format_variables: + variables: list[ast.expr] = [ + ast.Name(name, ast.Store()) for name in self.format_variables + ] + clear_format = ast.Assign(variables, ast.Constant(None)) + self.statements.append(clear_format) + + else: # Original assertion rewriting + # Create failure message. + body = self.expl_stmts + self.statements.append(ast.If(negation, body, [])) + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + explanation = "\n>assert " + explanation + else: + assertmsg = ast.Constant("") + explanation = "assert " + explanation + template = ast.BinOp(assertmsg, ast.Add(), ast.Constant(explanation)) + msg = self.pop_format_context(template) + fmt = self.helper("_format_explanation", msg) + err_name = ast.Name("AssertionError", ast.Load()) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + + body.append(raise_) + + # Clear temporary variables by setting them to None. + if self.variables: + variables = [ast.Name(name, ast.Store()) for name in self.variables] + clear = ast.Assign(variables, ast.Constant(None)) + self.statements.append(clear) + # Fix locations (line numbers/column offsets). + for stmt in self.statements: + for node in traverse_node(stmt): + if getattr(node, "lineno", None) is None: + # apply the assertion location to all generated ast nodes without source location + # and preserve the location of existing nodes or generated nodes with an correct location. + ast.copy_location(node, assert_) + return self.statements + + def visit_NamedExpr(self, name: ast.NamedExpr) -> tuple[ast.NamedExpr, str]: + # This method handles the 'walrus operator' repr of the target + # name if it's a local variable or _should_repr_global_name() + # thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + target_id = name.target.id + inlocs = ast.Compare(ast.Constant(target_id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Constant(target_id)) + return name, self.explanation_param(expr) + + def visit_Name(self, name: ast.Name) -> tuple[ast.Name, str]: + # Display the repr of the name if it's a local variable or + # _should_repr_global_name() thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + inlocs = ast.Compare(ast.Constant(name.id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Constant(name.id)) + return name, self.explanation_param(expr) + + def visit_BoolOp(self, boolop: ast.BoolOp) -> tuple[ast.Name, str]: + res_var = self.variable() + expl_list = self.assign(ast.List([], ast.Load())) + app = ast.Attribute(expl_list, "append", ast.Load()) + is_or = int(isinstance(boolop.op, ast.Or)) + body = save = self.statements + fail_save = self.expl_stmts + levels = len(boolop.values) - 1 + self.push_format_context() + # Process each operand, short-circuiting if needed. + for i, v in enumerate(boolop.values): + if i: + fail_inner: list[ast.stmt] = [] + # cond is set in a prior loop iteration below + self.expl_stmts.append(ast.If(cond, fail_inner, [])) # noqa: F821 + self.expl_stmts = fail_inner + match v: + # Check if the left operand is an ast.NamedExpr and the value has already been visited + case ast.Compare( + left=ast.NamedExpr(target=ast.Name(id=target_id)) + ) if target_id in [ + e.id for e in boolop.values[:i] if hasattr(e, "id") + ]: + pytest_temp = self.variable() + self.variables_overwrite[self.scope][target_id] = v.left # type:ignore[assignment] + # mypy's false positive, we're checking that the 'target' attribute exists. + v.left.target.id = pytest_temp # type:ignore[attr-defined] + self.push_format_context() + res, expl = self.visit(v) + body.append(ast.Assign([ast.Name(res_var, ast.Store())], res)) + expl_format = self.pop_format_context(ast.Constant(expl)) + call = ast.Call(app, [expl_format], []) + self.expl_stmts.append(ast.Expr(call)) + if i < levels: + cond: ast.expr = res + if is_or: + cond = ast.UnaryOp(ast.Not(), cond) + inner: list[ast.stmt] = [] + self.statements.append(ast.If(cond, inner, [])) + self.statements = body = inner + self.statements = save + self.expl_stmts = fail_save + expl_template = self.helper("_format_boolop", expl_list, ast.Constant(is_or)) + expl = self.pop_format_context(expl_template) + return ast.Name(res_var, ast.Load()), self.explanation_param(expl) + + def visit_UnaryOp(self, unary: ast.UnaryOp) -> tuple[ast.Name, str]: + pattern = UNARY_MAP[unary.op.__class__] + operand_res, operand_expl = self.visit(unary.operand) + res = self.assign(ast.copy_location(ast.UnaryOp(unary.op, operand_res), unary)) + return res, pattern % (operand_expl,) + + def visit_BinOp(self, binop: ast.BinOp) -> tuple[ast.Name, str]: + symbol = BINOP_MAP[binop.op.__class__] + left_expr, left_expl = self.visit(binop.left) + right_expr, right_expl = self.visit(binop.right) + explanation = f"({left_expl} {symbol} {right_expl})" + res = self.assign( + ast.copy_location(ast.BinOp(left_expr, binop.op, right_expr), binop) + ) + return res, explanation + + def visit_Call(self, call: ast.Call) -> tuple[ast.Name, str]: + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + for arg in call.args: + if isinstance(arg, ast.Name) and arg.id in self.variables_overwrite.get( + self.scope, {} + ): + arg = self.variables_overwrite[self.scope][arg.id] # type:ignore[assignment] + res, expl = self.visit(arg) + arg_expls.append(expl) + new_args.append(res) + for keyword in call.keywords: + match keyword.value: + case ast.Name(id=id) if id in self.variables_overwrite.get( + self.scope, {} + ): + keyword.value = self.variables_overwrite[self.scope][id] # type:ignore[assignment] + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + if keyword.arg: + arg_expls.append(keyword.arg + "=" + expl) + else: # **args have `arg` keywords with an .arg of None + arg_expls.append("**" + expl) + + expl = "{}({})".format(func_expl, ", ".join(arg_expls)) + new_call = ast.copy_location(ast.Call(new_func, new_args, new_kwargs), call) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = f"{res_expl}\n{{{res_expl} = {expl}\n}}" + return res, outer_expl + + def visit_Starred(self, starred: ast.Starred) -> tuple[ast.Starred, str]: + # A Starred node can appear in a function call. + res, expl = self.visit(starred.value) + new_starred = ast.Starred(res, starred.ctx) + return new_starred, "*" + expl + + def visit_Attribute(self, attr: ast.Attribute) -> tuple[ast.Name, str]: + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + value, value_expl = self.visit(attr.value) + res = self.assign( + ast.copy_location(ast.Attribute(value, attr.attr, ast.Load()), attr) + ) + res_expl = self.explanation_param(self.display(res)) + pat = "%s\n{%s = %s.%s\n}" + expl = pat % (res_expl, res_expl, value_expl, attr.attr) + return res, expl + + def visit_Compare(self, comp: ast.Compare) -> tuple[ast.expr, str]: + self.push_format_context() + # We first check if we have overwritten a variable in the previous assert + match comp.left: + case ast.Name(id=name_id) if name_id in self.variables_overwrite.get( + self.scope, {} + ): + comp.left = self.variables_overwrite[self.scope][name_id] # type: ignore[assignment] + case ast.NamedExpr(target=ast.Name(id=target_id)): + self.variables_overwrite[self.scope][target_id] = comp.left # type: ignore[assignment] + left_res, left_expl = self.visit(comp.left) + if isinstance(comp.left, ast.Compare | ast.BoolOp): + left_expl = f"({left_expl})" + res_variables = [self.variable() for i in range(len(comp.ops))] + load_names: list[ast.expr] = [ast.Name(v, ast.Load()) for v in res_variables] + store_names = [ast.Name(v, ast.Store()) for v in res_variables] + it = zip(range(len(comp.ops)), comp.ops, comp.comparators, strict=True) + expls: list[ast.expr] = [] + syms: list[ast.expr] = [] + results = [left_res] + for i, op, next_operand in it: + match (next_operand, left_res): + case ( + ast.NamedExpr(target=ast.Name(id=target_id)), + ast.Name(id=name_id), + ) if target_id == name_id: + next_operand.target.id = self.variable() + self.variables_overwrite[self.scope][name_id] = next_operand # type: ignore[assignment] + + next_res, next_expl = self.visit(next_operand) + if isinstance(next_operand, ast.Compare | ast.BoolOp): + next_expl = f"({next_expl})" + results.append(next_res) + sym = BINOP_MAP[op.__class__] + syms.append(ast.Constant(sym)) + expl = f"{left_expl} {sym} {next_expl}" + expls.append(ast.Constant(expl)) + res_expr = ast.copy_location(ast.Compare(left_res, [op], [next_res]), comp) + self.statements.append(ast.Assign([store_names[i]], res_expr)) + left_res, left_expl = next_res, next_expl + # Use pytest.assertion.util._reprcompare if that's available. + expl_call = self.helper( + "_call_reprcompare", + ast.Tuple(syms, ast.Load()), + ast.Tuple(load_names, ast.Load()), + ast.Tuple(expls, ast.Load()), + ast.Tuple(results, ast.Load()), + ) + if len(comp.ops) > 1: + res: ast.expr = ast.BoolOp(ast.And(), load_names) + else: + res = load_names[0] + + return res, self.explanation_param(self.pop_format_context(expl_call)) + + +def try_makedirs(cache_dir: Path) -> bool: + """Attempt to create the given directory and sub-directories exist. + + Returns True if successful or if it already exists. + """ + try: + os.makedirs(cache_dir, exist_ok=True) + except (FileNotFoundError, NotADirectoryError, FileExistsError): + # One of the path components was not a directory: + # - we're in a zip file + # - it is a file + return False + except PermissionError: + return False + except OSError as e: + # as of now, EROFS doesn't have an equivalent OSError-subclass + # + # squashfuse_ll returns ENOSYS "OSError: [Errno 38] Function not + # implemented" for a read-only error + if e.errno in {errno.EROFS, errno.ENOSYS}: + return False + raise + return True + + +def get_cache_dir(file_path: Path) -> Path: + """Return the cache directory to write .pyc files for the given .py file path.""" + if sys.pycache_prefix: + # given: + # prefix = '/tmp/pycs' + # path = '/home/user/proj/test_app.py' + # we want: + # '/tmp/pycs/home/user/proj' + return Path(sys.pycache_prefix) / Path(*file_path.parts[1:-1]) + else: + # classic pycache directory + return file_path.parent / "__pycache__" diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/truncate.py b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/truncate.py new file mode 100644 index 00000000..5820e6e8 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/truncate.py @@ -0,0 +1,137 @@ +"""Utilities for truncating assertion output. + +Current default behaviour is to truncate assertion explanations at +terminal lines, unless running with an assertions verbosity level of at least 2 or running on CI. +""" + +from __future__ import annotations + +from _pytest.compat import running_on_ci +from _pytest.config import Config +from _pytest.nodes import Item + + +DEFAULT_MAX_LINES = 8 +DEFAULT_MAX_CHARS = DEFAULT_MAX_LINES * 80 +USAGE_MSG = "use '-vv' to show" + + +def truncate_if_required(explanation: list[str], item: Item) -> list[str]: + """Truncate this assertion explanation if the given test item is eligible.""" + should_truncate, max_lines, max_chars = _get_truncation_parameters(item) + if should_truncate: + return _truncate_explanation( + explanation, + max_lines=max_lines, + max_chars=max_chars, + ) + return explanation + + +def _get_truncation_parameters(item: Item) -> tuple[bool, int, int]: + """Return the truncation parameters related to the given item, as (should truncate, max lines, max chars).""" + # We do not need to truncate if one of conditions is met: + # 1. Verbosity level is 2 or more; + # 2. Test is being run in CI environment; + # 3. Both truncation_limit_lines and truncation_limit_chars + # .ini parameters are set to 0 explicitly. + max_lines = item.config.getini("truncation_limit_lines") + max_lines = int(max_lines if max_lines is not None else DEFAULT_MAX_LINES) + + max_chars = item.config.getini("truncation_limit_chars") + max_chars = int(max_chars if max_chars is not None else DEFAULT_MAX_CHARS) + + verbose = item.config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + + should_truncate = verbose < 2 and not running_on_ci() + should_truncate = should_truncate and (max_lines > 0 or max_chars > 0) + + return should_truncate, max_lines, max_chars + + +def _truncate_explanation( + input_lines: list[str], + max_lines: int, + max_chars: int, +) -> list[str]: + """Truncate given list of strings that makes up the assertion explanation. + + Truncates to either max_lines, or max_chars - whichever the input reaches + first, taking the truncation explanation into account. The remaining lines + will be replaced by a usage message. + """ + # Check if truncation required + input_char_count = len("".join(input_lines)) + # The length of the truncation explanation depends on the number of lines + # removed but is at least 68 characters: + # The real value is + # 64 (for the base message: + # '...\n...Full output truncated (1 line hidden), use '-vv' to show")' + # ) + # + 1 (for plural) + # + int(math.log10(len(input_lines) - max_lines)) (number of hidden line, at least 1) + # + 3 for the '...' added to the truncated line + # But if there's more than 100 lines it's very likely that we're going to + # truncate, so we don't need the exact value using log10. + tolerable_max_chars = ( + max_chars + 70 # 64 + 1 (for plural) + 2 (for '99') + 3 for '...' + ) + # The truncation explanation add two lines to the output + tolerable_max_lines = max_lines + 2 + if ( + len(input_lines) <= tolerable_max_lines + and input_char_count <= tolerable_max_chars + ): + return input_lines + # Truncate first to max_lines, and then truncate to max_chars if necessary + if max_lines > 0: + truncated_explanation = input_lines[:max_lines] + else: + truncated_explanation = input_lines + truncated_char = True + # We reevaluate the need to truncate chars following removal of some lines + if len("".join(truncated_explanation)) > tolerable_max_chars and max_chars > 0: + truncated_explanation = _truncate_by_char_count( + truncated_explanation, max_chars + ) + else: + truncated_char = False + + if truncated_explanation == input_lines: + # No truncation happened, so we do not need to add any explanations + return truncated_explanation + + truncated_line_count = len(input_lines) - len(truncated_explanation) + if truncated_explanation[-1]: + # Add ellipsis and take into account part-truncated final line + truncated_explanation[-1] = truncated_explanation[-1] + "..." + if truncated_char: + # It's possible that we did not remove any char from this line + truncated_line_count += 1 + else: + # Add proper ellipsis when we were able to fit a full line exactly + truncated_explanation[-1] = "..." + return [ + *truncated_explanation, + "", + f"...Full output truncated ({truncated_line_count} line" + f"{'' if truncated_line_count == 1 else 's'} hidden), {USAGE_MSG}", + ] + + +def _truncate_by_char_count(input_lines: list[str], max_chars: int) -> list[str]: + # Find point at which input length exceeds total allowed length + iterated_char_count = 0 + for iterated_index, input_line in enumerate(input_lines): + if iterated_char_count + len(input_line) > max_chars: + break + iterated_char_count += len(input_line) + + # Create truncated explanation with modified final line + truncated_result = input_lines[:iterated_index] + final_line = input_lines[iterated_index] + if final_line: + final_line_truncate_point = max_chars - iterated_char_count + final_line = final_line[:final_line_truncate_point] + truncated_result.append(final_line) + return truncated_result diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/util.py b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/util.py new file mode 100644 index 00000000..f35d83a6 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/assertion/util.py @@ -0,0 +1,615 @@ +# mypy: allow-untyped-defs +"""Utilities for assertion debugging.""" + +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Mapping +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import pprint +from typing import Any +from typing import Literal +from typing import Protocol +from unicodedata import normalize + +from _pytest import outcomes +import _pytest._code +from _pytest._io.pprint import PrettyPrinter +from _pytest._io.saferepr import saferepr +from _pytest._io.saferepr import saferepr_unlimited +from _pytest.compat import running_on_ci +from _pytest.config import Config + + +# The _reprcompare attribute on the util module is used by the new assertion +# interpretation code and assertion rewriter to detect this plugin was +# loaded and in turn call the hooks defined here as part of the +# DebugInterpreter. +_reprcompare: Callable[[str, object, object], str | None] | None = None + +# Works similarly as _reprcompare attribute. Is populated with the hook call +# when pytest_runtest_setup is called. +_assertion_pass: Callable[[int, str, str], None] | None = None + +# Config object which is assigned during pytest_runtest_protocol. +_config: Config | None = None + + +class _HighlightFunc(Protocol): + def __call__(self, source: str, lexer: Literal["diff", "python"] = "python") -> str: + """Apply highlighting to the given source.""" + + +def dummy_highlighter(source: str, lexer: Literal["diff", "python"] = "python") -> str: + """Dummy highlighter that returns the text unprocessed. + + Needed for _notin_text, as the diff gets post-processed to only show the "+" part. + """ + return source + + +def format_explanation(explanation: str) -> str: + r"""Format an explanation. + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + lines = _split_explanation(explanation) + result = _format_lines(lines) + return "\n".join(result) + + +def _split_explanation(explanation: str) -> list[str]: + r"""Return a list of individual lines in the explanation. + + This will return a list of lines split on '\n{', '\n}' and '\n~'. + Any other newlines will be escaped and appear in the line as the + literal '\n' characters. + """ + raw_lines = (explanation or "").split("\n") + lines = [raw_lines[0]] + for values in raw_lines[1:]: + if values and values[0] in ["{", "}", "~", ">"]: + lines.append(values) + else: + lines[-1] += "\\n" + values + return lines + + +def _format_lines(lines: Sequence[str]) -> list[str]: + """Format the individual lines. + + This will replace the '{', '}' and '~' characters of our mini formatting + language with the proper 'where ...', 'and ...' and ' + ...' text, taking + care of indentation along the way. + + Return a list of formatted lines. + """ + result = list(lines[:1]) + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith("{"): + if stackcnt[-1]: + s = "and " + else: + s = "where " + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(" +" + " " * (len(stack) - 1) + s + line[1:]) + elif line.startswith("}"): + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line[0] in ["~", ">"] + stack[-1] += 1 + indent = len(stack) if line.startswith("~") else len(stack) - 1 + result.append(" " * indent + line[1:]) + assert len(stack) == 1 + return result + + +def issequence(x: Any) -> bool: + return isinstance(x, collections.abc.Sequence) and not isinstance(x, str) + + +def istext(x: Any) -> bool: + return isinstance(x, str) + + +def isdict(x: Any) -> bool: + return isinstance(x, dict) + + +def isset(x: Any) -> bool: + return isinstance(x, set | frozenset) + + +def isnamedtuple(obj: Any) -> bool: + return isinstance(obj, tuple) and getattr(obj, "_fields", None) is not None + + +def isdatacls(obj: Any) -> bool: + return getattr(obj, "__dataclass_fields__", None) is not None + + +def isattrs(obj: Any) -> bool: + return getattr(obj, "__attrs_attrs__", None) is not None + + +def isiterable(obj: Any) -> bool: + try: + iter(obj) + return not istext(obj) + except Exception: + return False + + +def has_default_eq( + obj: object, +) -> bool: + """Check if an instance of an object contains the default eq + + First, we check if the object's __eq__ attribute has __code__, + if so, we check the equally of the method code filename (__code__.co_filename) + to the default one generated by the dataclass and attr module + for dataclasses the default co_filename is , for attrs class, the __eq__ should contain "attrs eq generated" + """ + # inspired from https://github.com/willmcgugan/rich/blob/07d51ffc1aee6f16bd2e5a25b4e82850fb9ed778/rich/pretty.py#L68 + if hasattr(obj.__eq__, "__code__") and hasattr(obj.__eq__.__code__, "co_filename"): + code_filename = obj.__eq__.__code__.co_filename + + if isattrs(obj): + return "attrs generated " in code_filename + + return code_filename == "" # data class + return True + + +def assertrepr_compare( + config, op: str, left: Any, right: Any, use_ascii: bool = False +) -> list[str] | None: + """Return specialised explanations for some operators/operands.""" + verbose = config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + + # Strings which normalize equal are often hard to distinguish when printed; use ascii() to make this easier. + # See issue #3246. + use_ascii = ( + isinstance(left, str) + and isinstance(right, str) + and normalize("NFD", left) == normalize("NFD", right) + ) + + if verbose > 1: + left_repr = saferepr_unlimited(left, use_ascii=use_ascii) + right_repr = saferepr_unlimited(right, use_ascii=use_ascii) + else: + # XXX: "15 chars indentation" is wrong + # ("E AssertionError: assert "); should use term width. + maxsize = ( + 80 - 15 - len(op) - 2 + ) // 2 # 15 chars indentation, 1 space around op + + left_repr = saferepr(left, maxsize=maxsize, use_ascii=use_ascii) + right_repr = saferepr(right, maxsize=maxsize, use_ascii=use_ascii) + + summary = f"{left_repr} {op} {right_repr}" + highlighter = config.get_terminal_writer()._highlight + + explanation = None + try: + if op == "==": + explanation = _compare_eq_any(left, right, highlighter, verbose) + elif op == "not in": + if istext(left) and istext(right): + explanation = _notin_text(left, right, verbose) + elif op == "!=": + if isset(left) and isset(right): + explanation = ["Both sets are equal"] + elif op == ">=": + if isset(left) and isset(right): + explanation = _compare_gte_set(left, right, highlighter, verbose) + elif op == "<=": + if isset(left) and isset(right): + explanation = _compare_lte_set(left, right, highlighter, verbose) + elif op == ">": + if isset(left) and isset(right): + explanation = _compare_gt_set(left, right, highlighter, verbose) + elif op == "<": + if isset(left) and isset(right): + explanation = _compare_lt_set(left, right, highlighter, verbose) + + except outcomes.Exit: + raise + except Exception: + repr_crash = _pytest._code.ExceptionInfo.from_current()._getreprcrash() + explanation = [ + f"(pytest_assertion plugin: representation of details failed: {repr_crash}.", + " Probably an object has a faulty __repr__.)", + ] + + if not explanation: + return None + + if explanation[0] != "": + explanation = ["", *explanation] + return [summary, *explanation] + + +def _compare_eq_any( + left: Any, right: Any, highlighter: _HighlightFunc, verbose: int = 0 +) -> list[str]: + explanation = [] + if istext(left) and istext(right): + explanation = _diff_text(left, right, highlighter, verbose) + else: + from _pytest.python_api import ApproxBase + + if isinstance(left, ApproxBase) or isinstance(right, ApproxBase): + # Although the common order should be obtained == expected, this ensures both ways + approx_side = left if isinstance(left, ApproxBase) else right + other_side = right if isinstance(left, ApproxBase) else left + + explanation = approx_side._repr_compare(other_side) + elif type(left) is type(right) and ( + isdatacls(left) or isattrs(left) or isnamedtuple(left) + ): + # Note: unlike dataclasses/attrs, namedtuples compare only the + # field values, not the type or field names. But this branch + # intentionally only handles the same-type case, which was often + # used in older code bases before dataclasses/attrs were available. + explanation = _compare_eq_cls(left, right, highlighter, verbose) + elif issequence(left) and issequence(right): + explanation = _compare_eq_sequence(left, right, highlighter, verbose) + elif isset(left) and isset(right): + explanation = _compare_eq_set(left, right, highlighter, verbose) + elif isdict(left) and isdict(right): + explanation = _compare_eq_dict(left, right, highlighter, verbose) + + if isiterable(left) and isiterable(right): + expl = _compare_eq_iterable(left, right, highlighter, verbose) + explanation.extend(expl) + + return explanation + + +def _diff_text( + left: str, right: str, highlighter: _HighlightFunc, verbose: int = 0 +) -> list[str]: + """Return the explanation for the diff between text. + + Unless --verbose is used this will skip leading and trailing + characters which are identical to keep the diff minimal. + """ + from difflib import ndiff + + explanation: list[str] = [] + + if verbose < 1: + i = 0 # just in case left or right has zero length + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + break + if i > 42: + i -= 10 # Provide some context + explanation = [ + f"Skipping {i} identical leading characters in diff, use -v to show" + ] + left = left[i:] + right = right[i:] + if len(left) == len(right): + for i in range(len(left)): + if left[-i] != right[-i]: + break + if i > 42: + i -= 10 # Provide some context + explanation += [ + f"Skipping {i} identical trailing " + "characters in diff, use -v to show" + ] + left = left[:-i] + right = right[:-i] + keepends = True + if left.isspace() or right.isspace(): + left = repr(str(left)) + right = repr(str(right)) + explanation += ["Strings contain only whitespace, escaping them using repr()"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation.extend( + highlighter( + "\n".join( + line.strip("\n") + for line in ndiff(right.splitlines(keepends), left.splitlines(keepends)) + ), + lexer="diff", + ).splitlines() + ) + return explanation + + +def _compare_eq_iterable( + left: Iterable[Any], + right: Iterable[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + if verbose <= 0 and not running_on_ci(): + return ["Use -v to get more diff"] + # dynamic import to speedup pytest + import difflib + + left_formatting = PrettyPrinter().pformat(left).splitlines() + right_formatting = PrettyPrinter().pformat(right).splitlines() + + explanation = ["", "Full diff:"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation.extend( + highlighter( + "\n".join( + line.rstrip() + for line in difflib.ndiff(right_formatting, left_formatting) + ), + lexer="diff", + ).splitlines() + ) + return explanation + + +def _compare_eq_sequence( + left: Sequence[Any], + right: Sequence[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + comparing_bytes = isinstance(left, bytes) and isinstance(right, bytes) + explanation: list[str] = [] + len_left = len(left) + len_right = len(right) + for i in range(min(len_left, len_right)): + if left[i] != right[i]: + if comparing_bytes: + # when comparing bytes, we want to see their ascii representation + # instead of their numeric values (#5260) + # using a slice gives us the ascii representation: + # >>> s = b'foo' + # >>> s[0] + # 102 + # >>> s[0:1] + # b'f' + left_value = left[i : i + 1] + right_value = right[i : i + 1] + else: + left_value = left[i] + right_value = right[i] + + explanation.append( + f"At index {i} diff:" + f" {highlighter(repr(left_value))} != {highlighter(repr(right_value))}" + ) + break + + if comparing_bytes: + # when comparing bytes, it doesn't help to show the "sides contain one or more + # items" longer explanation, so skip it + + return explanation + + len_diff = len_left - len_right + if len_diff: + if len_diff > 0: + dir_with_more = "Left" + extra = saferepr(left[len_right]) + else: + len_diff = 0 - len_diff + dir_with_more = "Right" + extra = saferepr(right[len_left]) + + if len_diff == 1: + explanation += [ + f"{dir_with_more} contains one more item: {highlighter(extra)}" + ] + else: + explanation += [ + f"{dir_with_more} contains {len_diff} more items, first extra item: {highlighter(extra)}" + ] + return explanation + + +def _compare_eq_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = [] + explanation.extend(_set_one_sided_diff("left", left, right, highlighter)) + explanation.extend(_set_one_sided_diff("right", right, left, highlighter)) + return explanation + + +def _compare_gt_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = _compare_gte_set(left, right, highlighter) + if not explanation: + return ["Both sets are equal"] + return explanation + + +def _compare_lt_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = _compare_lte_set(left, right, highlighter) + if not explanation: + return ["Both sets are equal"] + return explanation + + +def _compare_gte_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + return _set_one_sided_diff("right", right, left, highlighter) + + +def _compare_lte_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + return _set_one_sided_diff("left", left, right, highlighter) + + +def _set_one_sided_diff( + posn: str, + set1: AbstractSet[Any], + set2: AbstractSet[Any], + highlighter: _HighlightFunc, +) -> list[str]: + explanation = [] + diff = set1 - set2 + if diff: + explanation.append(f"Extra items in the {posn} set:") + for item in diff: + explanation.append(highlighter(saferepr(item))) + return explanation + + +def _compare_eq_dict( + left: Mapping[Any, Any], + right: Mapping[Any, Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation: list[str] = [] + set_left = set(left) + set_right = set(right) + common = set_left.intersection(set_right) + same = {k: left[k] for k in common if left[k] == right[k]} + if same and verbose < 2: + explanation += [f"Omitting {len(same)} identical items, use -vv to show"] + elif same: + explanation += ["Common items:"] + explanation += highlighter(pprint.pformat(same)).splitlines() + diff = {k for k in common if left[k] != right[k]} + if diff: + explanation += ["Differing items:"] + for k in diff: + explanation += [ + highlighter(saferepr({k: left[k]})) + + " != " + + highlighter(saferepr({k: right[k]})) + ] + extra_left = set_left - set_right + len_extra_left = len(extra_left) + if len_extra_left: + explanation.append( + f"Left contains {len_extra_left} more item{'' if len_extra_left == 1 else 's'}:" + ) + explanation.extend( + highlighter(pprint.pformat({k: left[k] for k in extra_left})).splitlines() + ) + extra_right = set_right - set_left + len_extra_right = len(extra_right) + if len_extra_right: + explanation.append( + f"Right contains {len_extra_right} more item{'' if len_extra_right == 1 else 's'}:" + ) + explanation.extend( + highlighter(pprint.pformat({k: right[k] for k in extra_right})).splitlines() + ) + return explanation + + +def _compare_eq_cls( + left: Any, right: Any, highlighter: _HighlightFunc, verbose: int +) -> list[str]: + if not has_default_eq(left): + return [] + if isdatacls(left): + import dataclasses + + all_fields = dataclasses.fields(left) + fields_to_check = [info.name for info in all_fields if info.compare] + elif isattrs(left): + all_fields = left.__attrs_attrs__ + fields_to_check = [field.name for field in all_fields if getattr(field, "eq")] + elif isnamedtuple(left): + fields_to_check = left._fields + else: + assert False + + indent = " " + same = [] + diff = [] + for field in fields_to_check: + if getattr(left, field) == getattr(right, field): + same.append(field) + else: + diff.append(field) + + explanation = [] + if same or diff: + explanation += [""] + if same and verbose < 2: + explanation.append(f"Omitting {len(same)} identical items, use -vv to show") + elif same: + explanation += ["Matching attributes:"] + explanation += highlighter(pprint.pformat(same)).splitlines() + if diff: + explanation += ["Differing attributes:"] + explanation += highlighter(pprint.pformat(diff)).splitlines() + for field in diff: + field_left = getattr(left, field) + field_right = getattr(right, field) + explanation += [ + "", + f"Drill down into differing attribute {field}:", + f"{indent}{field}: {highlighter(repr(field_left))} != {highlighter(repr(field_right))}", + ] + explanation += [ + indent + line + for line in _compare_eq_any( + field_left, field_right, highlighter, verbose + ) + ] + return explanation + + +def _notin_text(term: str, text: str, verbose: int = 0) -> list[str]: + index = text.find(term) + head = text[:index] + tail = text[index + len(term) :] + correct_text = head + tail + diff = _diff_text(text, correct_text, dummy_highlighter, verbose) + newdiff = [f"{saferepr(term, maxsize=42)} is contained here:"] + for line in diff: + if line.startswith("Skipping"): + continue + if line.startswith("- "): + continue + if line.startswith("+ "): + newdiff.append(" " + line[2:]) + else: + newdiff.append(line) + return newdiff diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/cacheprovider.py b/Backend/venv/lib/python3.12/site-packages/_pytest/cacheprovider.py new file mode 100644 index 00000000..4383f105 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/cacheprovider.py @@ -0,0 +1,646 @@ +# mypy: allow-untyped-defs +"""Implementation of the cache provider.""" + +# This plugin was not named "cache" to avoid conflicts with the external +# pytest-cache version. +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Iterable +import dataclasses +import errno +import json +import os +from pathlib import Path +import tempfile +from typing import final + +from .pathlib import resolve_from_str +from .pathlib import rm_rf +from .reports import CollectReport +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.nodes import Directory +from _pytest.nodes import File +from _pytest.reports import TestReport + + +README_CONTENT = """\ +# pytest cache directory # + +This directory contains data from the pytest's cache plugin, +which provides the `--lf` and `--ff` options, as well as the `cache` fixture. + +**Do not** commit this to version control. + +See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information. +""" + +CACHEDIR_TAG_CONTENT = b"""\ +Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by pytest. +# For information about cache directory tags, see: +# https://bford.info/cachedir/spec.html +""" + + +@final +@dataclasses.dataclass +class Cache: + """Instance of the `cache` fixture.""" + + _cachedir: Path = dataclasses.field(repr=False) + _config: Config = dataclasses.field(repr=False) + + # Sub-directory under cache-dir for directories created by `mkdir()`. + _CACHE_PREFIX_DIRS = "d" + + # Sub-directory under cache-dir for values created by `set()`. + _CACHE_PREFIX_VALUES = "v" + + def __init__( + self, cachedir: Path, config: Config, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._cachedir = cachedir + self._config = config + + @classmethod + def for_config(cls, config: Config, *, _ispytest: bool = False) -> Cache: + """Create the Cache instance for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + cachedir = cls.cache_dir_from_config(config, _ispytest=True) + if config.getoption("cacheclear") and cachedir.is_dir(): + cls.clear_cache(cachedir, _ispytest=True) + return cls(cachedir, config, _ispytest=True) + + @classmethod + def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None: + """Clear the sub-directories used to hold cached directories and values. + + :meta private: + """ + check_ispytest(_ispytest) + for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES): + d = cachedir / prefix + if d.is_dir(): + rm_rf(d) + + @staticmethod + def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path: + """Get the path to the cache directory for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + return resolve_from_str(config.getini("cache_dir"), config.rootpath) + + def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None: + """Issue a cache warning. + + :meta private: + """ + check_ispytest(_ispytest) + import warnings + + from _pytest.warning_types import PytestCacheWarning + + warnings.warn( + PytestCacheWarning(fmt.format(**args) if args else fmt), + self._config.hook, + stacklevel=3, + ) + + def _mkdir(self, path: Path) -> None: + self._ensure_cache_dir_and_supporting_files() + path.mkdir(exist_ok=True, parents=True) + + def mkdir(self, name: str) -> Path: + """Return a directory path object with the given name. + + If the directory does not yet exist, it will be created. You can use + it to manage files to e.g. store/retrieve database dumps across test + sessions. + + .. versionadded:: 7.0 + + :param name: + Must be a string not containing a ``/`` separator. + Make sure the name contains your plugin or application + identifiers to prevent clashes with other cache users. + """ + path = Path(name) + if len(path.parts) > 1: + raise ValueError("name is not allowed to contain path separators") + res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path) + self._mkdir(res) + return res + + def _getvaluepath(self, key: str) -> Path: + return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key)) + + def get(self, key: str, default): + """Return the cached value for the given key. + + If no value was yet cached or the value cannot be read, the specified + default is returned. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param default: + The value to return in case of a cache-miss or invalid cache value. + """ + path = self._getvaluepath(key) + try: + with path.open("r", encoding="UTF-8") as f: + return json.load(f) + except (ValueError, OSError): + return default + + def set(self, key: str, value: object) -> None: + """Save value for the given key. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param value: + Must be of any combination of basic python types, + including nested types like lists of dictionaries. + """ + path = self._getvaluepath(key) + try: + self._mkdir(path.parent) + except OSError as exc: + self.warn( + f"could not create cache path {path}: {exc}", + _ispytest=True, + ) + return + data = json.dumps(value, ensure_ascii=False, indent=2) + try: + f = path.open("w", encoding="UTF-8") + except OSError as exc: + self.warn( + f"cache could not write path {path}: {exc}", + _ispytest=True, + ) + else: + with f: + f.write(data) + + def _ensure_cache_dir_and_supporting_files(self) -> None: + """Create the cache dir and its supporting files.""" + if self._cachedir.is_dir(): + return + + self._cachedir.parent.mkdir(parents=True, exist_ok=True) + with tempfile.TemporaryDirectory( + prefix="pytest-cache-files-", + dir=self._cachedir.parent, + ) as newpath: + path = Path(newpath) + + # Reset permissions to the default, see #12308. + # Note: there's no way to get the current umask atomically, eek. + umask = os.umask(0o022) + os.umask(umask) + path.chmod(0o777 - umask) + + with open(path.joinpath("README.md"), "x", encoding="UTF-8") as f: + f.write(README_CONTENT) + with open(path.joinpath(".gitignore"), "x", encoding="UTF-8") as f: + f.write("# Created by pytest automatically.\n*\n") + with open(path.joinpath("CACHEDIR.TAG"), "xb") as f: + f.write(CACHEDIR_TAG_CONTENT) + + try: + path.rename(self._cachedir) + except OSError as e: + # If 2 concurrent pytests both race to the rename, the loser + # gets "Directory not empty" from the rename. In this case, + # everything is handled so just continue (while letting the + # temporary directory be cleaned up). + # On Windows, the error is a FileExistsError which translates to EEXIST. + if e.errno not in (errno.ENOTEMPTY, errno.EEXIST): + raise + else: + # Create a directory in place of the one we just moved so that + # `TemporaryDirectory`'s cleanup doesn't complain. + # + # TODO: pass ignore_cleanup_errors=True when we no longer support python < 3.10. + # See https://github.com/python/cpython/issues/74168. Note that passing + # delete=False would do the wrong thing in case of errors and isn't supported + # until python 3.12. + path.mkdir() + + +class LFPluginCollWrapper: + def __init__(self, lfplugin: LFPlugin) -> None: + self.lfplugin = lfplugin + self._collected_at_least_one_failure = False + + @hookimpl(wrapper=True) + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> Generator[None, CollectReport, CollectReport]: + res = yield + if isinstance(collector, Session | Directory): + # Sort any lf-paths to the beginning. + lf_paths = self.lfplugin._last_failed_paths + + # Use stable sort to prioritize last failed. + def sort_key(node: nodes.Item | nodes.Collector) -> bool: + return node.path in lf_paths + + res.result = sorted( + res.result, + key=sort_key, + reverse=True, + ) + + elif isinstance(collector, File): + if collector.path in self.lfplugin._last_failed_paths: + result = res.result + lastfailed = self.lfplugin.lastfailed + + # Only filter with known failures. + if not self._collected_at_least_one_failure: + if not any(x.nodeid in lastfailed for x in result): + return res + self.lfplugin.config.pluginmanager.register( + LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip" + ) + self._collected_at_least_one_failure = True + + session = collector.session + result[:] = [ + x + for x in result + if x.nodeid in lastfailed + # Include any passed arguments (not trivial to filter). + or session.isinitpath(x.path) + # Keep all sub-collectors. + or isinstance(x, nodes.Collector) + ] + + return res + + +class LFPluginCollSkipfiles: + def __init__(self, lfplugin: LFPlugin) -> None: + self.lfplugin = lfplugin + + @hookimpl + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> CollectReport | None: + if isinstance(collector, File): + if collector.path not in self.lfplugin._last_failed_paths: + self.lfplugin._skipped_files += 1 + + return CollectReport( + collector.nodeid, "passed", longrepr=None, result=[] + ) + return None + + +class LFPlugin: + """Plugin which implements the --lf (run last-failing) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + active_keys = "lf", "failedfirst" + self.active = any(config.getoption(key) for key in active_keys) + assert config.cache + self.lastfailed: dict[str, bool] = config.cache.get("cache/lastfailed", {}) + self._previously_failed_count: int | None = None + self._report_status: str | None = None + self._skipped_files = 0 # count skipped files during collection due to --lf + + if config.getoption("lf"): + self._last_failed_paths = self.get_last_failed_paths() + config.pluginmanager.register( + LFPluginCollWrapper(self), "lfplugin-collwrapper" + ) + + def get_last_failed_paths(self) -> set[Path]: + """Return a set with all Paths of the previously failed nodeids and + their parents.""" + rootpath = self.config.rootpath + result = set() + for nodeid in self.lastfailed: + path = rootpath / nodeid.split("::")[0] + result.add(path) + result.update(path.parents) + return {x for x in result if x.exists()} + + def pytest_report_collectionfinish(self) -> str | None: + if self.active and self.config.get_verbosity() >= 0: + return f"run-last-failure: {self._report_status}" + return None + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if (report.when == "call" and report.passed) or report.skipped: + self.lastfailed.pop(report.nodeid, None) + elif report.failed: + self.lastfailed[report.nodeid] = True + + def pytest_collectreport(self, report: CollectReport) -> None: + passed = report.outcome in ("passed", "skipped") + if passed: + if report.nodeid in self.lastfailed: + self.lastfailed.pop(report.nodeid) + self.lastfailed.update((item.nodeid, True) for item in report.result) + else: + self.lastfailed[report.nodeid] = True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection_modifyitems( + self, config: Config, items: list[nodes.Item] + ) -> Generator[None]: + res = yield + + if not self.active: + return res + + if self.lastfailed: + previously_failed = [] + previously_passed = [] + for item in items: + if item.nodeid in self.lastfailed: + previously_failed.append(item) + else: + previously_passed.append(item) + self._previously_failed_count = len(previously_failed) + + if not previously_failed: + # Running a subset of all tests with recorded failures + # only outside of it. + self._report_status = ( + f"{len(self.lastfailed)} known failures not in selected tests" + ) + else: + if self.config.getoption("lf"): + items[:] = previously_failed + config.hook.pytest_deselected(items=previously_passed) + else: # --failedfirst + items[:] = previously_failed + previously_passed + + noun = "failure" if self._previously_failed_count == 1 else "failures" + suffix = " first" if self.config.getoption("failedfirst") else "" + self._report_status = ( + f"rerun previous {self._previously_failed_count} {noun}{suffix}" + ) + + if self._skipped_files > 0: + files_noun = "file" if self._skipped_files == 1 else "files" + self._report_status += f" (skipped {self._skipped_files} {files_noun})" + else: + self._report_status = "no previously failed tests, " + if self.config.getoption("last_failed_no_failures") == "none": + self._report_status += "deselecting all items." + config.hook.pytest_deselected(items=items[:]) + items[:] = [] + else: + self._report_status += "not deselecting items." + + return res + + def pytest_sessionfinish(self, session: Session) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + assert config.cache is not None + saved_lastfailed = config.cache.get("cache/lastfailed", {}) + if saved_lastfailed != self.lastfailed: + config.cache.set("cache/lastfailed", self.lastfailed) + + +class NFPlugin: + """Plugin which implements the --nf (run new-first) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + self.active = config.option.newfirst + assert config.cache is not None + self.cached_nodeids = set(config.cache.get("cache/nodeids", [])) + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection_modifyitems(self, items: list[nodes.Item]) -> Generator[None]: + res = yield + + if self.active: + new_items: dict[str, nodes.Item] = {} + other_items: dict[str, nodes.Item] = {} + for item in items: + if item.nodeid not in self.cached_nodeids: + new_items[item.nodeid] = item + else: + other_items[item.nodeid] = item + + items[:] = self._get_increasing_order( + new_items.values() + ) + self._get_increasing_order(other_items.values()) + self.cached_nodeids.update(new_items) + else: + self.cached_nodeids.update(item.nodeid for item in items) + + return res + + def _get_increasing_order(self, items: Iterable[nodes.Item]) -> list[nodes.Item]: + return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) + + def pytest_sessionfinish(self) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + if config.getoption("collectonly"): + return + + assert config.cache is not None + config.cache.set("cache/nodeids", sorted(self.cached_nodeids)) + + +def pytest_addoption(parser: Parser) -> None: + """Add command-line options for cache functionality. + + :param parser: Parser object to add command-line options to. + """ + group = parser.getgroup("general") + group.addoption( + "--lf", + "--last-failed", + action="store_true", + dest="lf", + help="Rerun only the tests that failed at the last run (or all if none failed)", + ) + group.addoption( + "--ff", + "--failed-first", + action="store_true", + dest="failedfirst", + help="Run all tests, but run the last failures first. " + "This may re-order tests and thus lead to " + "repeated fixture setup/teardown.", + ) + group.addoption( + "--nf", + "--new-first", + action="store_true", + dest="newfirst", + help="Run tests from new files first, then the rest of the tests " + "sorted by file mtime", + ) + group.addoption( + "--cache-show", + action="append", + nargs="?", + dest="cacheshow", + help=( + "Show cache contents, don't perform collection or tests. " + "Optional argument: glob (default: '*')." + ), + ) + group.addoption( + "--cache-clear", + action="store_true", + dest="cacheclear", + help="Remove all cache contents at start of test run", + ) + cache_dir_default = ".pytest_cache" + if "TOX_ENV_DIR" in os.environ: + cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default) + parser.addini("cache_dir", default=cache_dir_default, help="Cache directory path") + group.addoption( + "--lfnf", + "--last-failed-no-failures", + action="store", + dest="last_failed_no_failures", + choices=("all", "none"), + default="all", + help="With ``--lf``, determines whether to execute tests when there " + "are no previously (known) failures or when no " + "cached ``lastfailed`` data was found. " + "``all`` (the default) runs the full test suite again. " + "``none`` just emits a message about no known failures and exits successfully.", + ) + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.cacheshow and not config.option.help: + from _pytest.main import wrap_session + + return wrap_session(config, cacheshow) + return None + + +@hookimpl(tryfirst=True) +def pytest_configure(config: Config) -> None: + """Configure cache system and register related plugins. + + Creates the Cache instance and registers the last-failed (LFPlugin) + and new-first (NFPlugin) plugins with the plugin manager. + + :param config: pytest configuration object. + """ + config.cache = Cache.for_config(config, _ispytest=True) + config.pluginmanager.register(LFPlugin(config), "lfplugin") + config.pluginmanager.register(NFPlugin(config), "nfplugin") + + +@fixture +def cache(request: FixtureRequest) -> Cache: + """Return a cache object that can persist state between testing sessions. + + cache.get(key, default) + cache.set(key, value) + + Keys must be ``/`` separated strings, where the first part is usually the + name of your plugin or application to avoid clashes with other cache users. + + Values can be any object handled by the json stdlib module. + """ + assert request.config.cache is not None + return request.config.cache + + +def pytest_report_header(config: Config) -> str | None: + """Display cachedir with --cache-show and if non-default.""" + if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache": + assert config.cache is not None + cachedir = config.cache._cachedir + # TODO: evaluate generating upward relative paths + # starting with .., ../.. if sensible + + try: + displaypath = cachedir.relative_to(config.rootpath) + except ValueError: + displaypath = cachedir + return f"cachedir: {displaypath}" + return None + + +def cacheshow(config: Config, session: Session) -> int: + """Display cache contents when --cache-show is used. + + Shows cached values and directories matching the specified glob pattern + (default: '*'). Displays cache location, cached test results, and + any cached directories created by plugins. + + :param config: pytest configuration object. + :param session: pytest session object. + :returns: Exit code (0 for success). + """ + from pprint import pformat + + assert config.cache is not None + + tw = TerminalWriter() + tw.line("cachedir: " + str(config.cache._cachedir)) + if not config.cache._cachedir.is_dir(): + tw.line("cache is empty") + return 0 + + glob = config.option.cacheshow[0] + if glob is None: + glob = "*" + + dummy = object() + basedir = config.cache._cachedir + vdir = basedir / Cache._CACHE_PREFIX_VALUES + tw.sep("-", f"cache values for {glob!r}") + for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()): + key = str(valpath.relative_to(vdir)) + val = config.cache.get(key, dummy) + if val is dummy: + tw.line(f"{key} contains unreadable content, will be ignored") + else: + tw.line(f"{key} contains:") + for line in pformat(val).splitlines(): + tw.line(" " + line) + + ddir = basedir / Cache._CACHE_PREFIX_DIRS + if ddir.is_dir(): + contents = sorted(ddir.rglob(glob)) + tw.sep("-", f"cache directories for {glob!r}") + for p in contents: + # if p.is_dir(): + # print("%s/" % p.relative_to(basedir)) + if p.is_file(): + key = str(p.relative_to(basedir)) + tw.line(f"{key} is a file of length {p.stat().st_size}") + return 0 diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/capture.py b/Backend/venv/lib/python3.12/site-packages/_pytest/capture.py new file mode 100644 index 00000000..6d98676b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/capture.py @@ -0,0 +1,1144 @@ +# mypy: allow-untyped-defs +"""Per-test stdout/stderr capturing mechanism.""" + +from __future__ import annotations + +import abc +import collections +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +import contextlib +import io +from io import UnsupportedOperation +import os +import sys +from tempfile import TemporaryFile +from types import TracebackType +from typing import Any +from typing import AnyStr +from typing import BinaryIO +from typing import cast +from typing import Final +from typing import final +from typing import Generic +from typing import Literal +from typing import NamedTuple +from typing import TextIO +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from typing_extensions import Self + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import SubRequest +from _pytest.nodes import Collector +from _pytest.nodes import File +from _pytest.nodes import Item +from _pytest.reports import CollectReport + + +_CaptureMethod = Literal["fd", "sys", "no", "tee-sys"] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--capture", + action="store", + default="fd", + metavar="method", + choices=["fd", "sys", "no", "tee-sys"], + help="Per-test capturing method: one of fd|sys|no|tee-sys", + ) + group._addoption( # private to use reserved lower-case short option + "-s", + action="store_const", + const="no", + dest="capture", + help="Shortcut for --capture=no", + ) + + +def _colorama_workaround() -> None: + """Ensure colorama is imported so that it attaches to the correct stdio + handles on Windows. + + colorama uses the terminal on import time. So if something does the + first import of colorama while I/O capture is active, colorama will + fail in various ways. + """ + if sys.platform.startswith("win32"): + try: + import colorama # noqa: F401 + except ImportError: + pass + + +def _readline_workaround() -> None: + """Ensure readline is imported early so it attaches to the correct stdio handles. + + This isn't a problem with the default GNU readline implementation, but in + some configurations, Python uses libedit instead (on macOS, and for prebuilt + binaries such as used by uv). + + In theory this is only needed if readline.backend == "libedit", but the + workaround consists of importing readline here, so we already worked around + the issue by the time we could check if we need to. + """ + try: + import readline # noqa: F401 + except ImportError: + pass + + +def _windowsconsoleio_workaround(stream: TextIO) -> None: + """Workaround for Windows Unicode console handling. + + Python 3.6 implemented Unicode console handling for Windows. This works + by reading/writing to the raw console handle using + ``{Read,Write}ConsoleW``. + + The problem is that we are going to ``dup2`` over the stdio file + descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the + handles used by Python to write to the console. Though there is still some + weirdness and the console handle seems to only be closed randomly and not + on the first call to ``CloseHandle``, or maybe it gets reopened with the + same handle value when we suspend capturing. + + The workaround in this case will reopen stdio with a different fd which + also means a different handle by replicating the logic in + "Py_lifecycle.c:initstdio/create_stdio". + + :param stream: + In practice ``sys.stdout`` or ``sys.stderr``, but given + here as parameter for unittesting purposes. + + See https://github.com/pytest-dev/py/issues/103. + """ + if not sys.platform.startswith("win32") or hasattr(sys, "pypy_version_info"): + return + + # Bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666). + if not hasattr(stream, "buffer"): # type: ignore[unreachable,unused-ignore] + return + + raw_stdout = stream.buffer.raw if hasattr(stream.buffer, "raw") else stream.buffer + + if not isinstance(raw_stdout, io._WindowsConsoleIO): # type: ignore[attr-defined,unused-ignore] + return + + def _reopen_stdio(f, mode): + if not hasattr(stream.buffer, "raw") and mode[0] == "w": + buffering = 0 + else: + buffering = -1 + + return io.TextIOWrapper( + open(os.dup(f.fileno()), mode, buffering), + f.encoding, + f.errors, + f.newlines, + f.line_buffering, + ) + + sys.stdin = _reopen_stdio(sys.stdin, "rb") + sys.stdout = _reopen_stdio(sys.stdout, "wb") + sys.stderr = _reopen_stdio(sys.stderr, "wb") + + +@hookimpl(wrapper=True) +def pytest_load_initial_conftests(early_config: Config) -> Generator[None]: + ns = early_config.known_args_namespace + if ns.capture == "fd": + _windowsconsoleio_workaround(sys.stdout) + _colorama_workaround() + _readline_workaround() + pluginmanager = early_config.pluginmanager + capman = CaptureManager(ns.capture) + pluginmanager.register(capman, "capturemanager") + + # Make sure that capturemanager is properly reset at final shutdown. + early_config.add_cleanup(capman.stop_global_capturing) + + # Finally trigger conftest loading but while capturing (issue #93). + capman.start_global_capturing() + try: + try: + yield + finally: + capman.suspend_global_capture() + except BaseException: + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + raise + + +# IO Helpers. + + +class EncodedFile(io.TextIOWrapper): + __slots__ = () + + @property + def name(self) -> str: + # Ensure that file.name is a string. Workaround for a Python bug + # fixed in >=3.7.4: https://bugs.python.org/issue36015 + return repr(self.buffer) + + @property + def mode(self) -> str: + # TextIOWrapper doesn't expose a mode, but at least some of our + # tests check it. + assert hasattr(self.buffer, "mode") + return cast(str, self.buffer.mode.replace("b", "")) + + +class CaptureIO(io.TextIOWrapper): + def __init__(self) -> None: + super().__init__(io.BytesIO(), encoding="UTF-8", newline="", write_through=True) + + def getvalue(self) -> str: + assert isinstance(self.buffer, io.BytesIO) + return self.buffer.getvalue().decode("UTF-8") + + +class TeeCaptureIO(CaptureIO): + def __init__(self, other: TextIO) -> None: + self._other = other + super().__init__() + + def write(self, s: str) -> int: + super().write(s) + return self._other.write(s) + + +class DontReadFromInput(TextIO): + @property + def encoding(self) -> str: + assert sys.__stdin__ is not None + return sys.__stdin__.encoding + + def read(self, size: int = -1) -> str: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + readline = read + + def __next__(self) -> str: + return self.readline() + + def readlines(self, hint: int | None = -1) -> list[str]: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + def __iter__(self) -> Iterator[str]: + return self + + def fileno(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no fileno()") + + def flush(self) -> None: + raise UnsupportedOperation("redirected stdin is pseudofile, has no flush()") + + def isatty(self) -> bool: + return False + + def close(self) -> None: + pass + + def readable(self) -> bool: + return False + + def seek(self, offset: int, whence: int = 0) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no seek(int)") + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no tell()") + + def truncate(self, size: int | None = None) -> int: + raise UnsupportedOperation("cannot truncate stdin") + + def write(self, data: str) -> int: + raise UnsupportedOperation("cannot write to stdin") + + def writelines(self, lines: Iterable[str]) -> None: + raise UnsupportedOperation("Cannot write to stdin") + + def writable(self) -> bool: + return False + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + type: type[BaseException] | None, + value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + pass + + @property + def buffer(self) -> BinaryIO: + # The str/bytes doesn't actually matter in this type, so OK to fake. + return self # type: ignore[return-value] + + +# Capture classes. + + +class CaptureBase(abc.ABC, Generic[AnyStr]): + EMPTY_BUFFER: AnyStr + + @abc.abstractmethod + def __init__(self, fd: int) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def start(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def done(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def suspend(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def resume(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def writeorg(self, data: AnyStr) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def snap(self) -> AnyStr: + raise NotImplementedError() + + +patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"} + + +class NoCapture(CaptureBase[str]): + EMPTY_BUFFER = "" + + def __init__(self, fd: int) -> None: + pass + + def start(self) -> None: + pass + + def done(self) -> None: + pass + + def suspend(self) -> None: + pass + + def resume(self) -> None: + pass + + def snap(self) -> str: + return "" + + def writeorg(self, data: str) -> None: + pass + + +class SysCaptureBase(CaptureBase[AnyStr]): + def __init__( + self, fd: int, tmpfile: TextIO | None = None, *, tee: bool = False + ) -> None: + name = patchsysdict[fd] + self._old: TextIO = getattr(sys, name) + self.name = name + if tmpfile is None: + if name == "stdin": + tmpfile = DontReadFromInput() + else: + tmpfile = CaptureIO() if not tee else TeeCaptureIO(self._old) + self.tmpfile = tmpfile + self._state = "initialized" + + def repr(self, class_name: str) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + class_name, + self.name, + (hasattr(self, "_old") and repr(self._old)) or "", + self._state, + self.tmpfile, + ) + + def __repr__(self) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + self.__class__.__name__, + self.name, + (hasattr(self, "_old") and repr(self._old)) or "", + self._state, + self.tmpfile, + ) + + def _assert_state(self, op: str, states: tuple[str, ...]) -> None: + assert self._state in states, ( + "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + ) + + def start(self) -> None: + self._assert_state("start", ("initialized",)) + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + def done(self) -> None: + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + setattr(sys, self.name, self._old) + del self._old + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + setattr(sys, self.name, self._old) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + +class SysCaptureBinary(SysCaptureBase[bytes]): + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: bytes) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.flush() + self._old.buffer.write(data) + self._old.buffer.flush() + + +class SysCapture(SysCaptureBase[str]): + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + assert isinstance(self.tmpfile, CaptureIO) + res = self.tmpfile.getvalue() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.write(data) + self._old.flush() + + +class FDCaptureBase(CaptureBase[AnyStr]): + def __init__(self, targetfd: int) -> None: + self.targetfd = targetfd + + try: + os.fstat(targetfd) + except OSError: + # FD capturing is conceptually simple -- create a temporary file, + # redirect the FD to it, redirect back when done. But when the + # target FD is invalid it throws a wrench into this lovely scheme. + # + # Tests themselves shouldn't care if the FD is valid, FD capturing + # should work regardless of external circumstances. So falling back + # to just sys capturing is not a good option. + # + # Further complications are the need to support suspend() and the + # possibility of FD reuse (e.g. the tmpfile getting the very same + # target FD). The following approach is robust, I believe. + self.targetfd_invalid: int | None = os.open(os.devnull, os.O_RDWR) + os.dup2(self.targetfd_invalid, targetfd) + else: + self.targetfd_invalid = None + self.targetfd_save = os.dup(targetfd) + + if targetfd == 0: + self.tmpfile = open(os.devnull, encoding="utf-8") + self.syscapture: CaptureBase[str] = SysCapture(targetfd) + else: + self.tmpfile = EncodedFile( + TemporaryFile(buffering=0), + encoding="utf-8", + errors="replace", + newline="", + write_through=True, + ) + if targetfd in patchsysdict: + self.syscapture = SysCapture(targetfd, self.tmpfile) + else: + self.syscapture = NoCapture(targetfd) + + self._state = "initialized" + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} {self.targetfd} oldfd={self.targetfd_save} " + f"_state={self._state!r} tmpfile={self.tmpfile!r}>" + ) + + def _assert_state(self, op: str, states: tuple[str, ...]) -> None: + assert self._state in states, ( + "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + ) + + def start(self) -> None: + """Start capturing on targetfd using memorized tmpfile.""" + self._assert_state("start", ("initialized",)) + os.dup2(self.tmpfile.fileno(), self.targetfd) + self.syscapture.start() + self._state = "started" + + def done(self) -> None: + """Stop capturing, restore streams, return original capture file, + seeked to position zero.""" + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + os.dup2(self.targetfd_save, self.targetfd) + os.close(self.targetfd_save) + if self.targetfd_invalid is not None: + if self.targetfd_invalid != self.targetfd: + os.close(self.targetfd) + os.close(self.targetfd_invalid) + self.syscapture.done() + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + if self._state == "suspended": + return + self.syscapture.suspend() + os.dup2(self.targetfd_save, self.targetfd) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + self.syscapture.resume() + os.dup2(self.tmpfile.fileno(), self.targetfd) + self._state = "started" + + +class FDCaptureBinary(FDCaptureBase[bytes]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces `bytes`. + """ + + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res # type: ignore[return-value] + + def writeorg(self, data: bytes) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + os.write(self.targetfd_save, data) + + +class FDCapture(FDCaptureBase[str]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces text. + """ + + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + # XXX use encoding of original stream + os.write(self.targetfd_save, data.encode("utf-8")) + + +# MultiCapture + + +# Generic NamedTuple only supported since Python 3.11. +if sys.version_info >= (3, 11) or TYPE_CHECKING: + + @final + class CaptureResult(NamedTuple, Generic[AnyStr]): + """The result of :method:`caplog.readouterr() `.""" + + out: AnyStr + err: AnyStr + +else: + + class CaptureResult( + collections.namedtuple("CaptureResult", ["out", "err"]), # noqa: PYI024 + Generic[AnyStr], + ): + """The result of :method:`caplog.readouterr() `.""" + + __slots__ = () + + +class MultiCapture(Generic[AnyStr]): + _state = None + _in_suspended = False + + def __init__( + self, + in_: CaptureBase[AnyStr] | None, + out: CaptureBase[AnyStr] | None, + err: CaptureBase[AnyStr] | None, + ) -> None: + self.in_: CaptureBase[AnyStr] | None = in_ + self.out: CaptureBase[AnyStr] | None = out + self.err: CaptureBase[AnyStr] | None = err + + def __repr__(self) -> str: + return ( + f"" + ) + + def start_capturing(self) -> None: + self._state = "started" + if self.in_: + self.in_.start() + if self.out: + self.out.start() + if self.err: + self.err.start() + + def pop_outerr_to_orig(self) -> tuple[AnyStr, AnyStr]: + """Pop current snapshot out/err capture and flush to orig streams.""" + out, err = self.readouterr() + if out: + assert self.out is not None + self.out.writeorg(out) + if err: + assert self.err is not None + self.err.writeorg(err) + return out, err + + def suspend_capturing(self, in_: bool = False) -> None: + self._state = "suspended" + if self.out: + self.out.suspend() + if self.err: + self.err.suspend() + if in_ and self.in_: + self.in_.suspend() + self._in_suspended = True + + def resume_capturing(self) -> None: + self._state = "started" + if self.out: + self.out.resume() + if self.err: + self.err.resume() + if self._in_suspended: + assert self.in_ is not None + self.in_.resume() + self._in_suspended = False + + def stop_capturing(self) -> None: + """Stop capturing and reset capturing streams.""" + if self._state == "stopped": + raise ValueError("was already stopped") + self._state = "stopped" + if self.out: + self.out.done() + if self.err: + self.err.done() + if self.in_: + self.in_.done() + + def is_started(self) -> bool: + """Whether actively capturing -- not suspended or stopped.""" + return self._state == "started" + + def readouterr(self) -> CaptureResult[AnyStr]: + out = self.out.snap() if self.out else "" + err = self.err.snap() if self.err else "" + # TODO: This type error is real, need to fix. + return CaptureResult(out, err) # type: ignore[arg-type] + + +def _get_multicapture(method: _CaptureMethod) -> MultiCapture[str]: + if method == "fd": + return MultiCapture(in_=FDCapture(0), out=FDCapture(1), err=FDCapture(2)) + elif method == "sys": + return MultiCapture(in_=SysCapture(0), out=SysCapture(1), err=SysCapture(2)) + elif method == "no": + return MultiCapture(in_=None, out=None, err=None) + elif method == "tee-sys": + return MultiCapture( + in_=None, out=SysCapture(1, tee=True), err=SysCapture(2, tee=True) + ) + raise ValueError(f"unknown capturing method: {method!r}") + + +# CaptureManager and CaptureFixture + + +class CaptureManager: + """The capture plugin. + + Manages that the appropriate capture method is enabled/disabled during + collection and each test phase (setup, call, teardown). After each of + those points, the captured output is obtained and attached to the + collection/runtest report. + + There are two levels of capture: + + * global: enabled by default and can be suppressed by the ``-s`` + option. This is always enabled/disabled during collection and each test + phase. + + * fixture: when a test function or one of its fixture depend on the + ``capsys`` or ``capfd`` fixtures. In this case special handling is + needed to ensure the fixtures take precedence over the global capture. + """ + + def __init__(self, method: _CaptureMethod) -> None: + self._method: Final = method + self._global_capturing: MultiCapture[str] | None = None + self._capture_fixture: CaptureFixture[Any] | None = None + + def __repr__(self) -> str: + return ( + f"" + ) + + def is_capturing(self) -> str | bool: + if self.is_globally_capturing(): + return "global" + if self._capture_fixture: + return f"fixture {self._capture_fixture.request.fixturename}" + return False + + # Global capturing control + + def is_globally_capturing(self) -> bool: + return self._method != "no" + + def start_global_capturing(self) -> None: + assert self._global_capturing is None + self._global_capturing = _get_multicapture(self._method) + self._global_capturing.start_capturing() + + def stop_global_capturing(self) -> None: + if self._global_capturing is not None: + self._global_capturing.pop_outerr_to_orig() + self._global_capturing.stop_capturing() + self._global_capturing = None + + def resume_global_capture(self) -> None: + # During teardown of the python process, and on rare occasions, capture + # attributes can be `None` while trying to resume global capture. + if self._global_capturing is not None: + self._global_capturing.resume_capturing() + + def suspend_global_capture(self, in_: bool = False) -> None: + if self._global_capturing is not None: + self._global_capturing.suspend_capturing(in_=in_) + + def suspend(self, in_: bool = False) -> None: + # Need to undo local capsys-et-al if it exists before disabling global capture. + self.suspend_fixture() + self.suspend_global_capture(in_) + + def resume(self) -> None: + self.resume_global_capture() + self.resume_fixture() + + def read_global_capture(self) -> CaptureResult[str]: + assert self._global_capturing is not None + return self._global_capturing.readouterr() + + # Fixture Control + + def set_fixture(self, capture_fixture: CaptureFixture[Any]) -> None: + if self._capture_fixture: + current_fixture = self._capture_fixture.request.fixturename + requested_fixture = capture_fixture.request.fixturename + capture_fixture.request.raiseerror( + f"cannot use {requested_fixture} and {current_fixture} at the same time" + ) + self._capture_fixture = capture_fixture + + def unset_fixture(self) -> None: + self._capture_fixture = None + + def activate_fixture(self) -> None: + """If the current item is using ``capsys`` or ``capfd``, activate + them so they take precedence over the global capture.""" + if self._capture_fixture: + self._capture_fixture._start() + + def deactivate_fixture(self) -> None: + """Deactivate the ``capsys`` or ``capfd`` fixture of this item, if any.""" + if self._capture_fixture: + self._capture_fixture.close() + + def suspend_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._suspend() + + def resume_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._resume() + + # Helper context managers + + @contextlib.contextmanager + def global_and_fixture_disabled(self) -> Generator[None]: + """Context manager to temporarily disable global and current fixture capturing.""" + do_fixture = self._capture_fixture and self._capture_fixture._is_started() + if do_fixture: + self.suspend_fixture() + do_global = self._global_capturing and self._global_capturing.is_started() + if do_global: + self.suspend_global_capture() + try: + yield + finally: + if do_global: + self.resume_global_capture() + if do_fixture: + self.resume_fixture() + + @contextlib.contextmanager + def item_capture(self, when: str, item: Item) -> Generator[None]: + self.resume_global_capture() + self.activate_fixture() + try: + yield + finally: + self.deactivate_fixture() + self.suspend_global_capture(in_=False) + + out, err = self.read_global_capture() + item.add_report_section(when, "stdout", out) + item.add_report_section(when, "stderr", err) + + # Hooks + + @hookimpl(wrapper=True) + def pytest_make_collect_report( + self, collector: Collector + ) -> Generator[None, CollectReport, CollectReport]: + if isinstance(collector, File): + self.resume_global_capture() + try: + rep = yield + finally: + self.suspend_global_capture() + out, err = self.read_global_capture() + if out: + rep.sections.append(("Captured stdout", out)) + if err: + rep.sections.append(("Captured stderr", err)) + else: + rep = yield + return rep + + @hookimpl(wrapper=True) + def pytest_runtest_setup(self, item: Item) -> Generator[None]: + with self.item_capture("setup", item): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtest_call(self, item: Item) -> Generator[None]: + with self.item_capture("call", item): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtest_teardown(self, item: Item) -> Generator[None]: + with self.item_capture("teardown", item): + return (yield) + + @hookimpl(tryfirst=True) + def pytest_keyboard_interrupt(self) -> None: + self.stop_global_capturing() + + @hookimpl(tryfirst=True) + def pytest_internalerror(self) -> None: + self.stop_global_capturing() + + +class CaptureFixture(Generic[AnyStr]): + """Object returned by the :fixture:`capsys`, :fixture:`capsysbinary`, + :fixture:`capfd` and :fixture:`capfdbinary` fixtures.""" + + def __init__( + self, + captureclass: type[CaptureBase[AnyStr]], + request: SubRequest, + *, + config: dict[str, Any] | None = None, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.captureclass: type[CaptureBase[AnyStr]] = captureclass + self.request = request + self._config = config if config else {} + self._capture: MultiCapture[AnyStr] | None = None + self._captured_out: AnyStr = self.captureclass.EMPTY_BUFFER + self._captured_err: AnyStr = self.captureclass.EMPTY_BUFFER + + def _start(self) -> None: + if self._capture is None: + self._capture = MultiCapture( + in_=None, + out=self.captureclass(1, **self._config), + err=self.captureclass(2, **self._config), + ) + self._capture.start_capturing() + + def close(self) -> None: + if self._capture is not None: + out, err = self._capture.pop_outerr_to_orig() + self._captured_out += out + self._captured_err += err + self._capture.stop_capturing() + self._capture = None + + def readouterr(self) -> CaptureResult[AnyStr]: + """Read and return the captured output so far, resetting the internal + buffer. + + :returns: + The captured content as a namedtuple with ``out`` and ``err`` + string attributes. + """ + captured_out, captured_err = self._captured_out, self._captured_err + if self._capture is not None: + out, err = self._capture.readouterr() + captured_out += out + captured_err += err + self._captured_out = self.captureclass.EMPTY_BUFFER + self._captured_err = self.captureclass.EMPTY_BUFFER + return CaptureResult(captured_out, captured_err) + + def _suspend(self) -> None: + """Suspend this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.suspend_capturing() + + def _resume(self) -> None: + """Resume this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.resume_capturing() + + def _is_started(self) -> bool: + """Whether actively capturing -- not disabled or closed.""" + if self._capture is not None: + return self._capture.is_started() + return False + + @contextlib.contextmanager + def disabled(self) -> Generator[None]: + """Temporarily disable capturing while inside the ``with`` block.""" + capmanager: CaptureManager = self.request.config.pluginmanager.getplugin( + "capturemanager" + ) + with capmanager.global_and_fixture_disabled(): + yield + + +# The fixtures. + + +@fixture +def capsys(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_output(capsys): + print("hello") + captured = capsys.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capteesys(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable simultaneous text capturing and pass-through of writes + to ``sys.stdout`` and ``sys.stderr`` as defined by ``--capture=``. + + + The captured output is made available via ``capteesys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + The output is also passed-through, allowing it to be "live-printed", + reported, or both as defined by ``--capture=``. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_output(capteesys): + print("hello") + captured = capteesys.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture( + SysCapture, request, config=dict(tee=True), _ispytest=True + ) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes]]: + r"""Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsysbinary.readouterr()`` + method calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``bytes`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + + .. code-block:: python + + def test_output(capsysbinary): + print("hello") + captured = capsysbinary.readouterr() + assert captured.out == b"hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfd(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable text capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_system_echo(capfd): + os.system('echo "hello"') + captured = capfd.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfdbinary(request: SubRequest) -> Generator[CaptureFixture[bytes]]: + r"""Enable bytes capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``byte`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + + .. code-block:: python + + def test_system_echo(capfdbinary): + os.system('echo "hello"') + captured = capfdbinary.readouterr() + assert captured.out == b"hello\n" + + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/compat.py b/Backend/venv/lib/python3.12/site-packages/_pytest/compat.py new file mode 100644 index 00000000..72c3d091 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/compat.py @@ -0,0 +1,314 @@ +# mypy: allow-untyped-defs +"""Python version compatibility code and random general utilities.""" + +from __future__ import annotations + +from collections.abc import Callable +import enum +import functools +import inspect +from inspect import Parameter +from inspect import Signature +import os +from pathlib import Path +import sys +from typing import Any +from typing import Final +from typing import NoReturn + +import py + + +if sys.version_info >= (3, 14): + from annotationlib import Format + + +#: constant to prepare valuing pylib path replacements/lazy proxies later on +# intended for removal in pytest 8.0 or 9.0 + +# fmt: off +# intentional space to create a fake difference for the verification +LEGACY_PATH = py.path. local +# fmt: on + + +def legacy_path(path: str | os.PathLike[str]) -> LEGACY_PATH: + """Internal wrapper to prepare lazy proxies for legacy_path instances""" + return LEGACY_PATH(path) + + +# fmt: off +# Singleton type for NOTSET, as described in: +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class NotSetType(enum.Enum): + token = 0 +NOTSET: Final = NotSetType.token +# fmt: on + + +def iscoroutinefunction(func: object) -> bool: + """Return True if func is a coroutine function (a function defined with async + def syntax, and doesn't contain yield), or a function decorated with + @asyncio.coroutine. + + Note: copied and modified from Python 3.5's builtin coroutines.py to avoid + importing asyncio directly, which in turns also initializes the "logging" + module as a side-effect (see issue #8). + """ + return inspect.iscoroutinefunction(func) or getattr(func, "_is_coroutine", False) + + +def is_async_function(func: object) -> bool: + """Return True if the given function seems to be an async function or + an async generator.""" + return iscoroutinefunction(func) or inspect.isasyncgenfunction(func) + + +def signature(obj: Callable[..., Any]) -> Signature: + """Return signature without evaluating annotations.""" + if sys.version_info >= (3, 14): + return inspect.signature(obj, annotation_format=Format.STRING) + return inspect.signature(obj) + + +def getlocation(function, curdir: str | os.PathLike[str] | None = None) -> str: + function = get_real_func(function) + fn = Path(inspect.getfile(function)) + lineno = function.__code__.co_firstlineno + if curdir is not None: + try: + relfn = fn.relative_to(curdir) + except ValueError: + pass + else: + return f"{relfn}:{lineno + 1}" + return f"{fn}:{lineno + 1}" + + +def num_mock_patch_args(function) -> int: + """Return number of arguments used up by mock arguments (if any).""" + patchings = getattr(function, "patchings", None) + if not patchings: + return 0 + + mock_sentinel = getattr(sys.modules.get("mock"), "DEFAULT", object()) + ut_mock_sentinel = getattr(sys.modules.get("unittest.mock"), "DEFAULT", object()) + + return len( + [ + p + for p in patchings + if not p.attribute_name + and (p.new is mock_sentinel or p.new is ut_mock_sentinel) + ] + ) + + +def getfuncargnames( + function: Callable[..., object], + *, + name: str = "", + cls: type | None = None, +) -> tuple[str, ...]: + """Return the names of a function's mandatory arguments. + + Should return the names of all function arguments that: + * Aren't bound to an instance or type as in instance or class methods. + * Don't have default values. + * Aren't bound with functools.partial. + * Aren't replaced with mocks. + + The cls arguments indicate that the function should be treated as a bound + method even though it's not unless the function is a static method. + + The name parameter should be the original name in which the function was collected. + """ + # TODO(RonnyPfannschmidt): This function should be refactored when we + # revisit fixtures. The fixture mechanism should ask the node for + # the fixture names, and not try to obtain directly from the + # function object well after collection has occurred. + + # The parameters attribute of a Signature object contains an + # ordered mapping of parameter names to Parameter instances. This + # creates a tuple of the names of the parameters that don't have + # defaults. + try: + parameters = signature(function).parameters.values() + except (ValueError, TypeError) as e: + from _pytest.outcomes import fail + + fail( + f"Could not determine arguments of {function!r}: {e}", + pytrace=False, + ) + + arg_names = tuple( + p.name + for p in parameters + if ( + p.kind is Parameter.POSITIONAL_OR_KEYWORD + or p.kind is Parameter.KEYWORD_ONLY + ) + and p.default is Parameter.empty + ) + if not name: + name = function.__name__ + + # If this function should be treated as a bound method even though + # it's passed as an unbound method or function, and its first parameter + # wasn't defined as positional only, remove the first parameter name. + if not any(p.kind is Parameter.POSITIONAL_ONLY for p in parameters) and ( + # Not using `getattr` because we don't want to resolve the staticmethod. + # Not using `cls.__dict__` because we want to check the entire MRO. + cls + and not isinstance( + inspect.getattr_static(cls, name, default=None), staticmethod + ) + ): + arg_names = arg_names[1:] + # Remove any names that will be replaced with mocks. + if hasattr(function, "__wrapped__"): + arg_names = arg_names[num_mock_patch_args(function) :] + return arg_names + + +def get_default_arg_names(function: Callable[..., Any]) -> tuple[str, ...]: + # Note: this code intentionally mirrors the code at the beginning of + # getfuncargnames, to get the arguments which were excluded from its result + # because they had default values. + return tuple( + p.name + for p in signature(function).parameters.values() + if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) + and p.default is not Parameter.empty + ) + + +_non_printable_ascii_translate_table = { + i: f"\\x{i:02x}" for i in range(128) if i not in range(32, 127) +} +_non_printable_ascii_translate_table.update( + {ord("\t"): "\\t", ord("\r"): "\\r", ord("\n"): "\\n"} +) + + +def ascii_escaped(val: bytes | str) -> str: + r"""If val is pure ASCII, return it as an str, otherwise, escape + bytes objects into a sequence of escaped bytes: + + b'\xc3\xb4\xc5\xd6' -> r'\xc3\xb4\xc5\xd6' + + and escapes strings into a sequence of escaped unicode ids, e.g.: + + r'4\nV\U00043efa\x0eMXWB\x1e\u3028\u15fd\xcd\U0007d944' + + Note: + The obvious "v.decode('unicode-escape')" will return + valid UTF-8 unicode if it finds them in bytes, but we + want to return escaped bytes for any byte, even if they match + a UTF-8 string. + """ + if isinstance(val, bytes): + ret = val.decode("ascii", "backslashreplace") + else: + ret = val.encode("unicode_escape").decode("ascii") + return ret.translate(_non_printable_ascii_translate_table) + + +def get_real_func(obj): + """Get the real function object of the (possibly) wrapped object by + :func:`functools.wraps`, or :func:`functools.partial`.""" + obj = inspect.unwrap(obj) + + if isinstance(obj, functools.partial): + obj = obj.func + return obj + + +def getimfunc(func): + try: + return func.__func__ + except AttributeError: + return func + + +def safe_getattr(object: Any, name: str, default: Any) -> Any: + """Like getattr but return default upon any Exception or any OutcomeException. + + Attribute access can potentially fail for 'evil' Python objects. + See issue #214. + It catches OutcomeException because of #2490 (issue #580), new outcomes + are derived from BaseException instead of Exception (for more details + check #2707). + """ + from _pytest.outcomes import TEST_OUTCOME + + try: + return getattr(object, name, default) + except TEST_OUTCOME: + return default + + +def safe_isclass(obj: object) -> bool: + """Ignore any exception via isinstance on Python 3.""" + try: + return inspect.isclass(obj) + except Exception: + return False + + +def get_user_id() -> int | None: + """Return the current process's real user id or None if it could not be + determined. + + :return: The user id or None if it could not be determined. + """ + # mypy follows the version and platform checking expectation of PEP 484: + # https://mypy.readthedocs.io/en/stable/common_issues.html?highlight=platform#python-version-and-system-platform-checks + # Containment checks are too complex for mypy v1.5.0 and cause failure. + if sys.platform == "win32" or sys.platform == "emscripten": + # win32 does not have a getuid() function. + # Emscripten has a return 0 stub. + return None + else: + # On other platforms, a return value of -1 is assumed to indicate that + # the current process's real user id could not be determined. + ERROR = -1 + uid = os.getuid() + return uid if uid != ERROR else None + + +if sys.version_info >= (3, 11): + from typing import assert_never +else: + + def assert_never(value: NoReturn) -> NoReturn: + assert False, f"Unhandled value: {value} ({type(value).__name__})" + + +class CallableBool: + """ + A bool-like object that can also be called, returning its true/false value. + + Used for backwards compatibility in cases where something was supposed to be a method + but was implemented as a simple attribute by mistake (see `TerminalReporter.isatty`). + + Do not use in new code. + """ + + def __init__(self, value: bool) -> None: + self._value = value + + def __bool__(self) -> bool: + return self._value + + def __call__(self) -> bool: + return self._value + + +def running_on_ci() -> bool: + """Check if we're currently running on a CI system.""" + # Only enable CI mode if one of these env variables is defined and non-empty. + # Note: review `regendoc` tox env in case this list is changed. + env_vars = ["CI", "BUILD_NUMBER"] + return any(os.environ.get(var) for var in env_vars) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/__init__.py b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__init__.py new file mode 100644 index 00000000..9b2afe3e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__init__.py @@ -0,0 +1,2166 @@ +# mypy: allow-untyped-defs +"""Command line options, config-file and conftest.py processing.""" + +from __future__ import annotations + +import argparse +import builtins +import collections.abc +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import contextlib +import copy +import dataclasses +import enum +from functools import lru_cache +import glob +import importlib.metadata +import inspect +import os +import pathlib +import re +import shlex +import sys +from textwrap import dedent +import types +from types import FunctionType +from typing import Any +from typing import cast +from typing import Final +from typing import final +from typing import IO +from typing import TextIO +from typing import TYPE_CHECKING +import warnings + +import pluggy +from pluggy import HookimplMarker +from pluggy import HookimplOpts +from pluggy import HookspecMarker +from pluggy import HookspecOpts +from pluggy import PluginManager + +from .compat import PathAwareHookProxy +from .exceptions import PrintHelp as PrintHelp +from .exceptions import UsageError as UsageError +from .findpaths import determine_setup +from _pytest import __version__ +import _pytest._code +from _pytest._code import ExceptionInfo +from _pytest._code import filter_traceback +from _pytest._code.code import TracebackStyle +from _pytest._io import TerminalWriter +from _pytest.compat import assert_never +from _pytest.config.argparsing import Argument +from _pytest.config.argparsing import FILE_OR_DIR +from _pytest.config.argparsing import Parser +import _pytest.deprecated +import _pytest.hookspec +from _pytest.outcomes import fail +from _pytest.outcomes import Skipped +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportMode +from _pytest.pathlib import resolve_package_path +from _pytest.pathlib import safe_exists +from _pytest.stash import Stash +from _pytest.warning_types import PytestConfigWarning +from _pytest.warning_types import warn_explicit_for + + +if TYPE_CHECKING: + from _pytest.assertion.rewrite import AssertionRewritingHook + from _pytest.cacheprovider import Cache + from _pytest.terminal import TerminalReporter + +_PluggyPlugin = object +"""A type to represent plugin objects. + +Plugins can be any namespace, so we can't narrow it down much, but we use an +alias to make the intent clear. + +Ideally this type would be provided by pluggy itself. +""" + + +hookimpl = HookimplMarker("pytest") +hookspec = HookspecMarker("pytest") + + +@final +class ExitCode(enum.IntEnum): + """Encodes the valid exit codes by pytest. + + Currently users and plugins may supply other exit codes as well. + + .. versionadded:: 5.0 + """ + + #: Tests passed. + OK = 0 + #: Tests failed. + TESTS_FAILED = 1 + #: pytest was interrupted. + INTERRUPTED = 2 + #: An internal error got in the way. + INTERNAL_ERROR = 3 + #: pytest was misused. + USAGE_ERROR = 4 + #: pytest couldn't find tests. + NO_TESTS_COLLECTED = 5 + + __module__ = "pytest" + + +class ConftestImportFailure(Exception): + def __init__( + self, + path: pathlib.Path, + *, + cause: Exception, + ) -> None: + self.path = path + self.cause = cause + + def __str__(self) -> str: + return f"{type(self.cause).__name__}: {self.cause} (from {self.path})" + + +def filter_traceback_for_conftest_import_failure( + entry: _pytest._code.TracebackEntry, +) -> bool: + """Filter tracebacks entries which point to pytest internals or importlib. + + Make a special case for importlib because we use it to import test modules and conftest files + in _pytest.pathlib.import_path. + """ + return filter_traceback(entry) and "importlib" not in str(entry.path).split(os.sep) + + +def print_conftest_import_error(e: ConftestImportFailure, file: TextIO) -> None: + exc_info = ExceptionInfo.from_exception(e.cause) + tw = TerminalWriter(file) + tw.line(f"ImportError while loading conftest '{e.path}'.", red=True) + exc_info.traceback = exc_info.traceback.filter( + filter_traceback_for_conftest_import_failure + ) + exc_repr = ( + exc_info.getrepr(style="short", chain=False) + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + for line in formatted_tb.splitlines(): + tw.line(line.rstrip(), red=True) + + +def print_usage_error(e: UsageError, file: TextIO) -> None: + tw = TerminalWriter(file) + for msg in e.args: + tw.line(f"ERROR: {msg}\n", red=True) + + +def main( + args: list[str] | os.PathLike[str] | None = None, + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> int | ExitCode: + """Perform an in-process test run. + + :param args: + List of command line arguments. If `None` or not given, defaults to reading + arguments directly from the process command line (:data:`sys.argv`). + :param plugins: List of plugin objects to be auto-registered during initialization. + + :returns: An exit code. + """ + # Handle a single `--version` argument early to avoid starting up the entire pytest infrastructure. + new_args = sys.argv[1:] if args is None else args + if isinstance(new_args, Sequence) and new_args.count("--version") == 1: + sys.stdout.write(f"pytest {__version__}\n") + return ExitCode.OK + + old_pytest_version = os.environ.get("PYTEST_VERSION") + try: + os.environ["PYTEST_VERSION"] = __version__ + try: + config = _prepareconfig(new_args, plugins) + except ConftestImportFailure as e: + print_conftest_import_error(e, file=sys.stderr) + return ExitCode.USAGE_ERROR + + try: + ret: ExitCode | int = config.hook.pytest_cmdline_main(config=config) + try: + return ExitCode(ret) + except ValueError: + return ret + finally: + config._ensure_unconfigure() + except UsageError as e: + print_usage_error(e, file=sys.stderr) + return ExitCode.USAGE_ERROR + finally: + if old_pytest_version is None: + os.environ.pop("PYTEST_VERSION", None) + else: + os.environ["PYTEST_VERSION"] = old_pytest_version + + +def console_main() -> int: + """The CLI entry point of pytest. + + This function is not meant for programmable use; use `main()` instead. + """ + # https://docs.python.org/3/library/signal.html#note-on-sigpipe + try: + code = main() + sys.stdout.flush() + return code + except BrokenPipeError: + # Python flushes standard streams on exit; redirect remaining output + # to devnull to avoid another BrokenPipeError at shutdown + devnull = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull, sys.stdout.fileno()) + return 1 # Python exits with error code 1 on EPIPE + + +class cmdline: # compatibility namespace + main = staticmethod(main) + + +def filename_arg(path: str, optname: str) -> str: + """Argparse type validator for filename arguments. + + :path: Path of filename. + :optname: Name of the option. + """ + if os.path.isdir(path): + raise UsageError(f"{optname} must be a filename, given: {path}") + return path + + +def directory_arg(path: str, optname: str) -> str: + """Argparse type validator for directory arguments. + + :path: Path of directory. + :optname: Name of the option. + """ + if not os.path.isdir(path): + raise UsageError(f"{optname} must be a directory, given: {path}") + return path + + +# Plugins that cannot be disabled via "-p no:X" currently. +essential_plugins = ( + "mark", + "main", + "runner", + "fixtures", + "helpconfig", # Provides -p. +) + +default_plugins = ( + *essential_plugins, + "python", + "terminal", + "debugging", + "unittest", + "capture", + "skipping", + "legacypath", + "tmpdir", + "monkeypatch", + "recwarn", + "pastebin", + "assertion", + "junitxml", + "doctest", + "cacheprovider", + "setuponly", + "setupplan", + "stepwise", + "unraisableexception", + "threadexception", + "warnings", + "logging", + "reports", + "faulthandler", + "subtests", +) + +builtin_plugins = { + *default_plugins, + "pytester", + "pytester_assertions", +} + + +def get_config( + args: Iterable[str] | None = None, + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> Config: + # Subsequent calls to main will create a fresh instance. + pluginmanager = PytestPluginManager() + invocation_params = Config.InvocationParams( + args=args or (), + plugins=plugins, + dir=pathlib.Path.cwd(), + ) + config = Config(pluginmanager, invocation_params=invocation_params) + + if invocation_params.args: + # Handle any "-p no:plugin" args. + pluginmanager.consider_preparse(invocation_params.args, exclude_only=True) + + for spec in default_plugins: + pluginmanager.import_plugin(spec) + + return config + + +def get_plugin_manager() -> PytestPluginManager: + """Obtain a new instance of the + :py:class:`pytest.PytestPluginManager`, with default plugins + already loaded. + + This function can be used by integration with other tools, like hooking + into pytest to run tests into an IDE. + """ + return get_config().pluginmanager + + +def _prepareconfig( + args: list[str] | os.PathLike[str], + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> Config: + if isinstance(args, os.PathLike): + args = [os.fspath(args)] + elif not isinstance(args, list): + msg = ( # type:ignore[unreachable] + "`args` parameter expected to be a list of strings, got: {!r} (type: {})" + ) + raise TypeError(msg.format(args, type(args))) + + initial_config = get_config(args, plugins) + pluginmanager = initial_config.pluginmanager + try: + if plugins: + for plugin in plugins: + if isinstance(plugin, str): + pluginmanager.consider_pluginarg(plugin) + else: + pluginmanager.register(plugin) + config: Config = pluginmanager.hook.pytest_cmdline_parse( + pluginmanager=pluginmanager, args=args + ) + return config + except BaseException: + initial_config._ensure_unconfigure() + raise + + +def _get_directory(path: pathlib.Path) -> pathlib.Path: + """Get the directory of a path - itself if already a directory.""" + if path.is_file(): + return path.parent + else: + return path + + +def _get_legacy_hook_marks( + method: Any, + hook_type: str, + opt_names: tuple[str, ...], +) -> dict[str, bool]: + if TYPE_CHECKING: + # abuse typeguard from importlib to avoid massive method type union that's lacking an alias + assert inspect.isroutine(method) + known_marks: set[str] = {m.name for m in getattr(method, "pytestmark", [])} + must_warn: list[str] = [] + opts: dict[str, bool] = {} + for opt_name in opt_names: + opt_attr = getattr(method, opt_name, AttributeError) + if opt_attr is not AttributeError: + must_warn.append(f"{opt_name}={opt_attr}") + opts[opt_name] = True + elif opt_name in known_marks: + must_warn.append(f"{opt_name}=True") + opts[opt_name] = True + else: + opts[opt_name] = False + if must_warn: + hook_opts = ", ".join(must_warn) + message = _pytest.deprecated.HOOK_LEGACY_MARKING.format( + type=hook_type, + fullname=method.__qualname__, + hook_opts=hook_opts, + ) + warn_explicit_for(cast(FunctionType, method), message) + return opts + + +@final +class PytestPluginManager(PluginManager): + """A :py:class:`pluggy.PluginManager ` with + additional pytest-specific functionality: + + * Loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and + ``pytest_plugins`` global variables found in plugins being loaded. + * ``conftest.py`` loading during start-up. + """ + + def __init__(self) -> None: + from _pytest.assertion import DummyRewriteHook + from _pytest.assertion import RewriteHook + + super().__init__("pytest") + + # -- State related to local conftest plugins. + # All loaded conftest modules. + self._conftest_plugins: set[types.ModuleType] = set() + # All conftest modules applicable for a directory. + # This includes the directory's own conftest modules as well + # as those of its parent directories. + self._dirpath2confmods: dict[pathlib.Path, list[types.ModuleType]] = {} + # Cutoff directory above which conftests are no longer discovered. + self._confcutdir: pathlib.Path | None = None + # If set, conftest loading is skipped. + self._noconftest = False + + # _getconftestmodules()'s call to _get_directory() causes a stat + # storm when it's called potentially thousands of times in a test + # session (#9478), often with the same path, so cache it. + self._get_directory = lru_cache(256)(_get_directory) + + # plugins that were explicitly skipped with pytest.skip + # list of (module name, skip reason) + # previously we would issue a warning when a plugin was skipped, but + # since we refactored warnings as first citizens of Config, they are + # just stored here to be used later. + self.skipped_plugins: list[tuple[str, str]] = [] + + self.add_hookspecs(_pytest.hookspec) + self.register(self) + if os.environ.get("PYTEST_DEBUG"): + err: IO[str] = sys.stderr + encoding: str = getattr(err, "encoding", "utf8") + try: + err = open( + os.dup(err.fileno()), + mode=err.mode, + buffering=1, + encoding=encoding, + ) + except Exception: + pass + self.trace.root.setwriter(err.write) + self.enable_tracing() + + # Config._consider_importhook will set a real object if required. + self.rewrite_hook: RewriteHook = DummyRewriteHook() + # Used to know when we are importing conftests after the pytest_configure stage. + self._configured = False + + def parse_hookimpl_opts( + self, plugin: _PluggyPlugin, name: str + ) -> HookimplOpts | None: + """:meta private:""" + # pytest hooks are always prefixed with "pytest_", + # so we avoid accessing possibly non-readable attributes + # (see issue #1073). + if not name.startswith("pytest_"): + return None + # Ignore names which cannot be hooks. + if name == "pytest_plugins": + return None + + opts = super().parse_hookimpl_opts(plugin, name) + if opts is not None: + return opts + + method = getattr(plugin, name) + # Consider only actual functions for hooks (#3775). + if not inspect.isroutine(method): + return None + # Collect unmarked hooks as long as they have the `pytest_' prefix. + legacy = _get_legacy_hook_marks( + method, "impl", ("tryfirst", "trylast", "optionalhook", "hookwrapper") + ) + return cast(HookimplOpts, legacy) + + def parse_hookspec_opts(self, module_or_class, name: str) -> HookspecOpts | None: + """:meta private:""" + opts = super().parse_hookspec_opts(module_or_class, name) + if opts is None: + method = getattr(module_or_class, name) + if name.startswith("pytest_"): + legacy = _get_legacy_hook_marks( + method, "spec", ("firstresult", "historic") + ) + opts = cast(HookspecOpts, legacy) + return opts + + def register(self, plugin: _PluggyPlugin, name: str | None = None) -> str | None: + if name in _pytest.deprecated.DEPRECATED_EXTERNAL_PLUGINS: + warnings.warn( + PytestConfigWarning( + "{} plugin has been merged into the core, " + "please remove it from your requirements.".format( + name.replace("_", "-") + ) + ) + ) + return None + plugin_name = super().register(plugin, name) + if plugin_name is not None: + self.hook.pytest_plugin_registered.call_historic( + kwargs=dict( + plugin=plugin, + plugin_name=plugin_name, + manager=self, + ) + ) + + if isinstance(plugin, types.ModuleType): + self.consider_module(plugin) + return plugin_name + + def getplugin(self, name: str): + # Support deprecated naming because plugins (xdist e.g.) use it. + plugin: _PluggyPlugin | None = self.get_plugin(name) + return plugin + + def hasplugin(self, name: str) -> bool: + """Return whether a plugin with the given name is registered.""" + return bool(self.get_plugin(name)) + + def pytest_configure(self, config: Config) -> None: + """:meta private:""" + # XXX now that the pluginmanager exposes hookimpl(tryfirst...) + # we should remove tryfirst/trylast as markers. + config.addinivalue_line( + "markers", + "tryfirst: mark a hook implementation function such that the " + "plugin machinery will try to call it first/as early as possible. " + "DEPRECATED, use @pytest.hookimpl(tryfirst=True) instead.", + ) + config.addinivalue_line( + "markers", + "trylast: mark a hook implementation function such that the " + "plugin machinery will try to call it last/as late as possible. " + "DEPRECATED, use @pytest.hookimpl(trylast=True) instead.", + ) + self._configured = True + + # + # Internal API for local conftest plugin handling. + # + def _set_initial_conftests( + self, + args: Sequence[str | pathlib.Path], + pyargs: bool, + noconftest: bool, + rootpath: pathlib.Path, + confcutdir: pathlib.Path | None, + invocation_dir: pathlib.Path, + importmode: ImportMode | str, + *, + consider_namespace_packages: bool, + ) -> None: + """Load initial conftest files given a preparsed "namespace". + + As conftest files may add their own command line options which have + arguments ('--my-opt somepath') we might get some false positives. + All builtin and 3rd party plugins will have been loaded, however, so + common options will not confuse our logic here. + """ + self._confcutdir = ( + absolutepath(invocation_dir / confcutdir) if confcutdir else None + ) + self._noconftest = noconftest + self._using_pyargs = pyargs + foundanchor = False + for initial_path in args: + path = str(initial_path) + # remove node-id syntax + i = path.find("::") + if i != -1: + path = path[:i] + anchor = absolutepath(invocation_dir / path) + + # Ensure we do not break if what appears to be an anchor + # is in fact a very long option (#10169, #11394). + if safe_exists(anchor): + self._try_load_conftest( + anchor, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + foundanchor = True + if not foundanchor: + self._try_load_conftest( + invocation_dir, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + + def _is_in_confcutdir(self, path: pathlib.Path) -> bool: + """Whether to consider the given path to load conftests from.""" + if self._confcutdir is None: + return True + # The semantics here are literally: + # Do not load a conftest if it is found upwards from confcut dir. + # But this is *not* the same as: + # Load only conftests from confcutdir or below. + # At first glance they might seem the same thing, however we do support use cases where + # we want to load conftests that are not found in confcutdir or below, but are found + # in completely different directory hierarchies like packages installed + # in out-of-source trees. + # (see #9767 for a regression where the logic was inverted). + return path not in self._confcutdir.parents + + def _try_load_conftest( + self, + anchor: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> None: + self._loadconftestmodules( + anchor, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + # let's also consider test* subdirs + if anchor.is_dir(): + for x in anchor.glob("test*"): + if x.is_dir(): + self._loadconftestmodules( + x, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + + def _loadconftestmodules( + self, + path: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> None: + if self._noconftest: + return + + directory = self._get_directory(path) + + # Optimization: avoid repeated searches in the same directory. + # Assumes always called with same importmode and rootpath. + if directory in self._dirpath2confmods: + return + + clist = [] + for parent in reversed((directory, *directory.parents)): + if self._is_in_confcutdir(parent): + conftestpath = parent / "conftest.py" + if conftestpath.is_file(): + mod = self._importconftest( + conftestpath, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + clist.append(mod) + self._dirpath2confmods[directory] = clist + + def _getconftestmodules(self, path: pathlib.Path) -> Sequence[types.ModuleType]: + directory = self._get_directory(path) + return self._dirpath2confmods.get(directory, ()) + + def _rget_with_confmod( + self, + name: str, + path: pathlib.Path, + ) -> tuple[types.ModuleType, Any]: + modules = self._getconftestmodules(path) + for mod in reversed(modules): + try: + return mod, getattr(mod, name) + except AttributeError: + continue + raise KeyError(name) + + def _importconftest( + self, + conftestpath: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> types.ModuleType: + conftestpath_plugin_name = str(conftestpath) + existing = self.get_plugin(conftestpath_plugin_name) + if existing is not None: + return cast(types.ModuleType, existing) + + # conftest.py files there are not in a Python package all have module + # name "conftest", and thus conflict with each other. Clear the existing + # before loading the new one, otherwise the existing one will be + # returned from the module cache. + pkgpath = resolve_package_path(conftestpath) + if pkgpath is None: + try: + del sys.modules[conftestpath.stem] + except KeyError: + pass + + try: + mod = import_path( + conftestpath, + mode=importmode, + root=rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + except Exception as e: + assert e.__traceback__ is not None + raise ConftestImportFailure(conftestpath, cause=e) from e + + self._check_non_top_pytest_plugins(mod, conftestpath) + + self._conftest_plugins.add(mod) + dirpath = conftestpath.parent + if dirpath in self._dirpath2confmods: + for path, mods in self._dirpath2confmods.items(): + if dirpath in path.parents or path == dirpath: + if mod in mods: + raise AssertionError( + f"While trying to load conftest path {conftestpath!s}, " + f"found that the module {mod} is already loaded with path {mod.__file__}. " + "This is not supposed to happen. Please report this issue to pytest." + ) + mods.append(mod) + self.trace(f"loading conftestmodule {mod!r}") + self.consider_conftest(mod, registration_name=conftestpath_plugin_name) + return mod + + def _check_non_top_pytest_plugins( + self, + mod: types.ModuleType, + conftestpath: pathlib.Path, + ) -> None: + if ( + hasattr(mod, "pytest_plugins") + and self._configured + and not self._using_pyargs + ): + msg = ( + "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported:\n" + "It affects the entire test suite instead of just below the conftest as expected.\n" + " {}\n" + "Please move it to a top level conftest file at the rootdir:\n" + " {}\n" + "For more information, visit:\n" + " https://docs.pytest.org/en/stable/deprecations.html#pytest-plugins-in-non-top-level-conftest-files" + ) + fail(msg.format(conftestpath, self._confcutdir), pytrace=False) + + # + # API for bootstrapping plugin loading + # + # + + def consider_preparse( + self, args: Sequence[str], *, exclude_only: bool = False + ) -> None: + """:meta private:""" + i = 0 + n = len(args) + while i < n: + opt = args[i] + i += 1 + if isinstance(opt, str): + if opt == "-p": + try: + parg = args[i] + except IndexError: + return + i += 1 + elif opt.startswith("-p"): + parg = opt[2:] + else: + continue + parg = parg.strip() + if exclude_only and not parg.startswith("no:"): + continue + self.consider_pluginarg(parg) + + def consider_pluginarg(self, arg: str) -> None: + """:meta private:""" + if arg.startswith("no:"): + name = arg[3:] + if name in essential_plugins: + raise UsageError(f"plugin {name} cannot be disabled") + + # PR #4304: remove stepwise if cacheprovider is blocked. + if name == "cacheprovider": + self.set_blocked("stepwise") + self.set_blocked("pytest_stepwise") + + self.set_blocked(name) + if not name.startswith("pytest_"): + self.set_blocked("pytest_" + name) + else: + name = arg + # Unblock the plugin. + self.unblock(name) + if not name.startswith("pytest_"): + self.unblock("pytest_" + name) + self.import_plugin(arg, consider_entry_points=True) + + def consider_conftest( + self, conftestmodule: types.ModuleType, registration_name: str + ) -> None: + """:meta private:""" + self.register(conftestmodule, name=registration_name) + + def consider_env(self) -> None: + """:meta private:""" + self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) + + def consider_module(self, mod: types.ModuleType) -> None: + """:meta private:""" + self._import_plugin_specs(getattr(mod, "pytest_plugins", [])) + + def _import_plugin_specs( + self, spec: None | types.ModuleType | str | Sequence[str] + ) -> None: + plugins = _get_plugin_specs_as_list(spec) + for import_spec in plugins: + self.import_plugin(import_spec) + + def import_plugin(self, modname: str, consider_entry_points: bool = False) -> None: + """Import a plugin with ``modname``. + + If ``consider_entry_points`` is True, entry point names are also + considered to find a plugin. + """ + # Most often modname refers to builtin modules, e.g. "pytester", + # "terminal" or "capture". Those plugins are registered under their + # basename for historic purposes but must be imported with the + # _pytest prefix. + assert isinstance(modname, str), ( + f"module name as text required, got {modname!r}" + ) + if self.is_blocked(modname) or self.get_plugin(modname) is not None: + return + + importspec = "_pytest." + modname if modname in builtin_plugins else modname + self.rewrite_hook.mark_rewrite(importspec) + + if consider_entry_points: + loaded = self.load_setuptools_entrypoints("pytest11", name=modname) + if loaded: + return + + try: + __import__(importspec) + except ImportError as e: + raise ImportError( + f'Error importing plugin "{modname}": {e.args[0]}' + ).with_traceback(e.__traceback__) from e + + except Skipped as e: + self.skipped_plugins.append((modname, e.msg or "")) + else: + mod = sys.modules[importspec] + self.register(mod, modname) + + +def _get_plugin_specs_as_list( + specs: None | types.ModuleType | str | Sequence[str], +) -> list[str]: + """Parse a plugins specification into a list of plugin names.""" + # None means empty. + if specs is None: + return [] + # Workaround for #3899 - a submodule which happens to be called "pytest_plugins". + if isinstance(specs, types.ModuleType): + return [] + # Comma-separated list. + if isinstance(specs, str): + return specs.split(",") if specs else [] + # Direct specification. + if isinstance(specs, collections.abc.Sequence): + return list(specs) + raise UsageError( + f"Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: {specs!r}" + ) + + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() + + +def _iter_rewritable_modules(package_files: Iterable[str]) -> Iterator[str]: + """Given an iterable of file names in a source distribution, return the "names" that should + be marked for assertion rewrite. + + For example the package "pytest_mock/__init__.py" should be added as "pytest_mock" in + the assertion rewrite mechanism. + + This function has to deal with dist-info based distributions and egg based distributions + (which are still very much in use for "editable" installs). + + Here are the file names as seen in a dist-info based distribution: + + pytest_mock/__init__.py + pytest_mock/_version.py + pytest_mock/plugin.py + pytest_mock.egg-info/PKG-INFO + + Here are the file names as seen in an egg based distribution: + + src/pytest_mock/__init__.py + src/pytest_mock/_version.py + src/pytest_mock/plugin.py + src/pytest_mock.egg-info/PKG-INFO + LICENSE + setup.py + + We have to take in account those two distribution flavors in order to determine which + names should be considered for assertion rewriting. + + More information: + https://github.com/pytest-dev/pytest-mock/issues/167 + """ + package_files = list(package_files) + seen_some = False + for fn in package_files: + is_simple_module = "/" not in fn and fn.endswith(".py") + is_package = fn.count("/") == 1 and fn.endswith("__init__.py") + if is_simple_module: + module_name, _ = os.path.splitext(fn) + # we ignore "setup.py" at the root of the distribution + # as well as editable installation finder modules made by setuptools + if module_name != "setup" and not module_name.startswith("__editable__"): + seen_some = True + yield module_name + elif is_package: + package_name = os.path.dirname(fn) + seen_some = True + yield package_name + + if not seen_some: + # At this point we did not find any packages or modules suitable for assertion + # rewriting, so we try again by stripping the first path component (to account for + # "src" based source trees for example). + # This approach lets us have the common case continue to be fast, as egg-distributions + # are rarer. + new_package_files = [] + for fn in package_files: + parts = fn.split("/") + new_fn = "/".join(parts[1:]) + if new_fn: + new_package_files.append(new_fn) + if new_package_files: + yield from _iter_rewritable_modules(new_package_files) + + +@final +class Config: + """Access to configuration values, pluginmanager and plugin hooks. + + :param PytestPluginManager pluginmanager: + A pytest PluginManager. + + :param InvocationParams invocation_params: + Object containing parameters regarding the :func:`pytest.main` + invocation. + """ + + @final + @dataclasses.dataclass(frozen=True) + class InvocationParams: + """Holds parameters passed during :func:`pytest.main`. + + The object attributes are read-only. + + .. versionadded:: 5.1 + + .. note:: + + Note that the environment variable ``PYTEST_ADDOPTS`` and the ``addopts`` + configuration option are handled by pytest, not being included in the ``args`` attribute. + + Plugins accessing ``InvocationParams`` must be aware of that. + """ + + args: tuple[str, ...] + """The command-line arguments as passed to :func:`pytest.main`.""" + plugins: Sequence[str | _PluggyPlugin] | None + """Extra plugins, might be `None`.""" + dir: pathlib.Path + """The directory from which :func:`pytest.main` was invoked.""" + + def __init__( + self, + *, + args: Iterable[str], + plugins: Sequence[str | _PluggyPlugin] | None, + dir: pathlib.Path, + ) -> None: + object.__setattr__(self, "args", tuple(args)) + object.__setattr__(self, "plugins", plugins) + object.__setattr__(self, "dir", dir) + + class ArgsSource(enum.Enum): + """Indicates the source of the test arguments. + + .. versionadded:: 7.2 + """ + + #: Command line arguments. + ARGS = enum.auto() + #: Invocation directory. + INVOCATION_DIR = enum.auto() + INCOVATION_DIR = INVOCATION_DIR # backwards compatibility alias + #: 'testpaths' configuration value. + TESTPATHS = enum.auto() + + # Set by cacheprovider plugin. + cache: Cache + + def __init__( + self, + pluginmanager: PytestPluginManager, + *, + invocation_params: InvocationParams | None = None, + ) -> None: + if invocation_params is None: + invocation_params = self.InvocationParams( + args=(), plugins=None, dir=pathlib.Path.cwd() + ) + + self.option = argparse.Namespace() + """Access to command line option as attributes. + + :type: argparse.Namespace + """ + + self.invocation_params = invocation_params + """The parameters with which pytest was invoked. + + :type: InvocationParams + """ + + self._parser = Parser( + usage=f"%(prog)s [options] [{FILE_OR_DIR}] [{FILE_OR_DIR}] [...]", + processopt=self._processopt, + _ispytest=True, + ) + self.pluginmanager = pluginmanager + """The plugin manager handles plugin registration and hook invocation. + + :type: PytestPluginManager + """ + + self.stash = Stash() + """A place where plugins can store information on the config for their + own use. + + :type: Stash + """ + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + self.trace = self.pluginmanager.trace.root.get("config") + self.hook: pluggy.HookRelay = PathAwareHookProxy(self.pluginmanager.hook) # type: ignore[assignment] + self._inicache: dict[str, Any] = {} + self._opt2dest: dict[str, str] = {} + self._cleanup_stack = contextlib.ExitStack() + self.pluginmanager.register(self, "pytestconfig") + self._configured = False + self.hook.pytest_addoption.call_historic( + kwargs=dict(parser=self._parser, pluginmanager=self.pluginmanager) + ) + self.args_source = Config.ArgsSource.ARGS + self.args: list[str] = [] + + @property + def rootpath(self) -> pathlib.Path: + """The path to the :ref:`rootdir `. + + .. versionadded:: 6.1 + """ + return self._rootpath + + @property + def inipath(self) -> pathlib.Path | None: + """The path to the :ref:`configfile `. + + .. versionadded:: 6.1 + """ + return self._inipath + + def add_cleanup(self, func: Callable[[], None]) -> None: + """Add a function to be called when the config object gets out of + use (usually coinciding with pytest_unconfigure). + """ + self._cleanup_stack.callback(func) + + def _do_configure(self) -> None: + assert not self._configured + self._configured = True + self.hook.pytest_configure.call_historic(kwargs=dict(config=self)) + + def _ensure_unconfigure(self) -> None: + try: + if self._configured: + self._configured = False + try: + self.hook.pytest_unconfigure(config=self) + finally: + self.hook.pytest_configure._call_history = [] + finally: + try: + self._cleanup_stack.close() + finally: + self._cleanup_stack = contextlib.ExitStack() + + def get_terminal_writer(self) -> TerminalWriter: + terminalreporter: TerminalReporter | None = self.pluginmanager.get_plugin( + "terminalreporter" + ) + assert terminalreporter is not None + return terminalreporter._tw + + def pytest_cmdline_parse( + self, pluginmanager: PytestPluginManager, args: list[str] + ) -> Config: + try: + self.parse(args) + except UsageError: + # Handle `--version --version` and `--help` here in a minimal fashion. + # This gets done via helpconfig normally, but its + # pytest_cmdline_main is not called in case of errors. + if getattr(self.option, "version", False) or "--version" in args: + from _pytest.helpconfig import show_version_verbose + + # Note that `--version` (single argument) is handled early by `Config.main()`, so the only + # way we are reaching this point is via `--version --version`. + show_version_verbose(self) + elif ( + getattr(self.option, "help", False) or "--help" in args or "-h" in args + ): + self._parser.optparser.print_help() + sys.stdout.write( + "\nNOTE: displaying only minimal help due to UsageError.\n\n" + ) + + raise + + return self + + def notify_exception( + self, + excinfo: ExceptionInfo[BaseException], + option: argparse.Namespace | None = None, + ) -> None: + if option and getattr(option, "fulltrace", False): + style: TracebackStyle = "long" + else: + style = "native" + excrepr = excinfo.getrepr( + funcargs=True, showlocals=getattr(option, "showlocals", False), style=style + ) + res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo) + if not any(res): + for line in str(excrepr).split("\n"): + sys.stderr.write(f"INTERNALERROR> {line}\n") + sys.stderr.flush() + + def cwd_relative_nodeid(self, nodeid: str) -> str: + # nodeid's are relative to the rootpath, compute relative to cwd. + if self.invocation_params.dir != self.rootpath: + base_path_part, *nodeid_part = nodeid.split("::") + # Only process path part + fullpath = self.rootpath / base_path_part + relative_path = bestrelpath(self.invocation_params.dir, fullpath) + + nodeid = "::".join([relative_path, *nodeid_part]) + return nodeid + + @classmethod + def fromdictargs(cls, option_dict: Mapping[str, Any], args: list[str]) -> Config: + """Constructor usable for subprocesses.""" + config = get_config(args) + config.option.__dict__.update(option_dict) + config.parse(args, addopts=False) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + + def _processopt(self, opt: Argument) -> None: + for name in opt._short_opts + opt._long_opts: + self._opt2dest[name] = opt.dest + + if hasattr(opt, "default"): + if not hasattr(self.option, opt.dest): + setattr(self.option, opt.dest, opt.default) + + @hookimpl(trylast=True) + def pytest_load_initial_conftests(self, early_config: Config) -> None: + # We haven't fully parsed the command line arguments yet, so + # early_config.args it not set yet. But we need it for + # discovering the initial conftests. So "pre-run" the logic here. + # It will be done for real in `parse()`. + args, _args_source = early_config._decide_args( + args=early_config.known_args_namespace.file_or_dir, + pyargs=early_config.known_args_namespace.pyargs, + testpaths=early_config.getini("testpaths"), + invocation_dir=early_config.invocation_params.dir, + rootpath=early_config.rootpath, + warn=False, + ) + self.pluginmanager._set_initial_conftests( + args=args, + pyargs=early_config.known_args_namespace.pyargs, + noconftest=early_config.known_args_namespace.noconftest, + rootpath=early_config.rootpath, + confcutdir=early_config.known_args_namespace.confcutdir, + invocation_dir=early_config.invocation_params.dir, + importmode=early_config.known_args_namespace.importmode, + consider_namespace_packages=early_config.getini( + "consider_namespace_packages" + ), + ) + + def _consider_importhook(self) -> None: + """Install the PEP 302 import hook if using assertion rewriting. + + Needs to parse the --assert= option from the commandline + and find all the installed plugins to mark them for rewriting + by the importhook. + """ + mode = getattr(self.known_args_namespace, "assertmode", "plain") + + disable_autoload = getattr( + self.known_args_namespace, "disable_plugin_autoload", False + ) or bool(os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD")) + if mode == "rewrite": + import _pytest.assertion + + try: + hook = _pytest.assertion.install_importhook(self) + except SystemError: + mode = "plain" + else: + self._mark_plugins_for_rewrite(hook, disable_autoload) + self._warn_about_missing_assertion(mode) + + def _mark_plugins_for_rewrite( + self, hook: AssertionRewritingHook, disable_autoload: bool + ) -> None: + """Given an importhook, mark for rewrite any top-level + modules or packages in the distribution package for + all pytest plugins.""" + self.pluginmanager.rewrite_hook = hook + + if disable_autoload: + # We don't autoload from distribution package entry points, + # no need to continue. + return + + package_files = ( + str(file) + for dist in importlib.metadata.distributions() + if any(ep.group == "pytest11" for ep in dist.entry_points) + for file in dist.files or [] + ) + + for name in _iter_rewritable_modules(package_files): + hook.mark_rewrite(name) + + def _configure_python_path(self) -> None: + # `pythonpath = a b` will set `sys.path` to `[a, b, x, y, z, ...]` + for path in reversed(self.getini("pythonpath")): + sys.path.insert(0, str(path)) + self.add_cleanup(self._unconfigure_python_path) + + def _unconfigure_python_path(self) -> None: + for path in self.getini("pythonpath"): + path_str = str(path) + if path_str in sys.path: + sys.path.remove(path_str) + + def _validate_args(self, args: list[str], via: str) -> list[str]: + """Validate known args.""" + self._parser.extra_info["config source"] = via + try: + self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + finally: + self._parser.extra_info.pop("config source", None) + + return args + + def _decide_args( + self, + *, + args: list[str], + pyargs: bool, + testpaths: list[str], + invocation_dir: pathlib.Path, + rootpath: pathlib.Path, + warn: bool, + ) -> tuple[list[str], ArgsSource]: + """Decide the args (initial paths/nodeids) to use given the relevant inputs. + + :param warn: Whether can issue warnings. + + :returns: The args and the args source. Guaranteed to be non-empty. + """ + if args: + source = Config.ArgsSource.ARGS + result = args + else: + if invocation_dir == rootpath: + source = Config.ArgsSource.TESTPATHS + if pyargs: + result = testpaths + else: + result = [] + for path in testpaths: + result.extend(sorted(glob.iglob(path, recursive=True))) + if testpaths and not result: + if warn: + warning_text = ( + "No files were found in testpaths; " + "consider removing or adjusting your testpaths configuration. " + "Searching recursively from the current directory instead." + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), stacklevel=3 + ) + else: + result = [] + if not result: + source = Config.ArgsSource.INVOCATION_DIR + result = [str(invocation_dir)] + return result, source + + @hookimpl(wrapper=True) + def pytest_collection(self) -> Generator[None, object, object]: + # Validate invalid configuration keys after collection is done so we + # take in account options added by late-loading conftest files. + try: + return (yield) + finally: + self._validate_config_options() + + def _checkversion(self) -> None: + import pytest + + minver_ini_value = self.inicfg.get("minversion", None) + minver = minver_ini_value.value if minver_ini_value is not None else None + if minver: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if not isinstance(minver, str): + raise pytest.UsageError( + f"{self.inipath}: 'minversion' must be a single value" + ) + + if Version(minver) > Version(pytest.__version__): + raise pytest.UsageError( + f"{self.inipath}: 'minversion' requires pytest-{minver}, actual pytest-{pytest.__version__}'" + ) + + def _validate_config_options(self) -> None: + for key in sorted(self._get_unknown_ini_keys()): + self._warn_or_fail_if_strict(f"Unknown config option: {key}\n") + + def _validate_plugins(self) -> None: + required_plugins = sorted(self.getini("required_plugins")) + if not required_plugins: + return + + # Imported lazily to improve start-up time. + from packaging.requirements import InvalidRequirement + from packaging.requirements import Requirement + from packaging.version import Version + + plugin_info = self.pluginmanager.list_plugin_distinfo() + plugin_dist_info = {dist.project_name: dist.version for _, dist in plugin_info} + + missing_plugins = [] + for required_plugin in required_plugins: + try: + req = Requirement(required_plugin) + except InvalidRequirement: + missing_plugins.append(required_plugin) + continue + + if req.name not in plugin_dist_info: + missing_plugins.append(required_plugin) + elif not req.specifier.contains( + Version(plugin_dist_info[req.name]), prereleases=True + ): + missing_plugins.append(required_plugin) + + if missing_plugins: + raise UsageError( + "Missing required plugins: {}".format(", ".join(missing_plugins)), + ) + + def _warn_or_fail_if_strict(self, message: str) -> None: + strict_config = self.getini("strict_config") + if strict_config is None: + strict_config = self.getini("strict") + if strict_config: + raise UsageError(message) + + self.issue_config_time_warning(PytestConfigWarning(message), stacklevel=3) + + def _get_unknown_ini_keys(self) -> set[str]: + known_keys = self._parser._inidict.keys() | self._parser._ini_aliases.keys() + return self.inicfg.keys() - known_keys + + def parse(self, args: list[str], addopts: bool = True) -> None: + # Parse given cmdline arguments into this config object. + assert self.args == [], ( + "can only parse cmdline args at most once per Config object" + ) + + self.hook.pytest_addhooks.call_historic( + kwargs=dict(pluginmanager=self.pluginmanager) + ) + + if addopts: + env_addopts = os.environ.get("PYTEST_ADDOPTS", "") + if len(env_addopts): + args[:] = ( + self._validate_args(shlex.split(env_addopts), "via PYTEST_ADDOPTS") + + args + ) + + ns = self._parser.parse_known_args(args, namespace=copy.copy(self.option)) + rootpath, inipath, inicfg, ignored_config_files = determine_setup( + inifile=ns.inifilename, + override_ini=ns.override_ini, + args=ns.file_or_dir, + rootdir_cmd_arg=ns.rootdir or None, + invocation_dir=self.invocation_params.dir, + ) + self._rootpath = rootpath + self._inipath = inipath + self._ignored_config_files = ignored_config_files + self.inicfg = inicfg + self._parser.extra_info["rootdir"] = str(self.rootpath) + self._parser.extra_info["inifile"] = str(self.inipath) + + self._parser.addini("addopts", "Extra command line options", "args") + self._parser.addini("minversion", "Minimally required pytest version") + self._parser.addini( + "pythonpath", type="paths", help="Add paths to sys.path", default=[] + ) + self._parser.addini( + "required_plugins", + "Plugins that must be present for pytest to run", + type="args", + default=[], + ) + + if addopts: + args[:] = ( + self._validate_args(self.getini("addopts"), "via addopts config") + args + ) + + self.known_args_namespace = self._parser.parse_known_args( + args, namespace=copy.copy(self.option) + ) + self._checkversion() + self._consider_importhook() + self._configure_python_path() + self.pluginmanager.consider_preparse(args, exclude_only=False) + if ( + not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD") + and not self.known_args_namespace.disable_plugin_autoload + ): + # Autoloading from distribution package entry point has + # not been disabled. + self.pluginmanager.load_setuptools_entrypoints("pytest11") + # Otherwise only plugins explicitly specified in PYTEST_PLUGINS + # are going to be loaded. + self.pluginmanager.consider_env() + + self._parser.parse_known_args(args, namespace=self.known_args_namespace) + + self._validate_plugins() + self._warn_about_skipped_plugins() + + if self.known_args_namespace.confcutdir is None: + if self.inipath is not None: + confcutdir = str(self.inipath.parent) + else: + confcutdir = str(self.rootpath) + self.known_args_namespace.confcutdir = confcutdir + try: + self.hook.pytest_load_initial_conftests( + early_config=self, args=args, parser=self._parser + ) + except ConftestImportFailure as e: + if self.known_args_namespace.help or self.known_args_namespace.version: + # we don't want to prevent --help/--version to work + # so just let it pass and print a warning at the end + self.issue_config_time_warning( + PytestConfigWarning(f"could not load initial conftests: {e.path}"), + stacklevel=2, + ) + else: + raise + + try: + self._parser.parse(args, namespace=self.option) + except PrintHelp: + return + + self.args, self.args_source = self._decide_args( + args=getattr(self.option, FILE_OR_DIR), + pyargs=self.option.pyargs, + testpaths=self.getini("testpaths"), + invocation_dir=self.invocation_params.dir, + rootpath=self.rootpath, + warn=True, + ) + + def issue_config_time_warning(self, warning: Warning, stacklevel: int) -> None: + """Issue and handle a warning during the "configure" stage. + + During ``pytest_configure`` we can't capture warnings using the ``catch_warnings_for_item`` + function because it is not possible to have hook wrappers around ``pytest_configure``. + + This function is mainly intended for plugins that need to issue warnings during + ``pytest_configure`` (or similar stages). + + :param warning: The warning instance. + :param stacklevel: stacklevel forwarded to warnings.warn. + """ + if self.pluginmanager.is_blocked("warnings"): + return + + cmdline_filters = self.known_args_namespace.pythonwarnings or [] + config_filters = self.getini("filterwarnings") + + with warnings.catch_warnings(record=True) as records: + warnings.simplefilter("always", type(warning)) + apply_warning_filters(config_filters, cmdline_filters) + warnings.warn(warning, stacklevel=stacklevel) + + if records: + frame = sys._getframe(stacklevel - 1) + location = frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name + self.hook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=records[0], + when="config", + nodeid="", + location=location, + ) + ) + + def addinivalue_line(self, name: str, line: str) -> None: + """Add a line to a configuration option. The option must have been + declared but might not yet be set in which case the line becomes + the first line in its value.""" + x = self.getini(name) + assert isinstance(x, list) + x.append(line) # modifies the cached list inline + + def getini(self, name: str) -> Any: + """Return configuration value the an :ref:`configuration file `. + + If a configuration value is not defined in a + :ref:`configuration file `, then the ``default`` value + provided while registering the configuration through + :func:`parser.addini ` will be returned. + Please note that you can even provide ``None`` as a valid + default value. + + If ``default`` is not provided while registering using + :func:`parser.addini `, then a default value + based on the ``type`` parameter passed to + :func:`parser.addini ` will be returned. + The default values based on ``type`` are: + ``paths``, ``pathlist``, ``args`` and ``linelist`` : empty list ``[]`` + ``bool`` : ``False`` + ``string`` : empty string ``""`` + ``int`` : ``0`` + ``float`` : ``0.0`` + + If neither the ``default`` nor the ``type`` parameter is passed + while registering the configuration through + :func:`parser.addini `, then the configuration + is treated as a string and a default empty string '' is returned. + + If the specified name hasn't been registered through a prior + :func:`parser.addini ` call (usually from a + plugin), a ValueError is raised. + """ + canonical_name = self._parser._ini_aliases.get(name, name) + try: + return self._inicache[canonical_name] + except KeyError: + pass + self._inicache[canonical_name] = val = self._getini(canonical_name) + return val + + # Meant for easy monkeypatching by legacypath plugin. + # Can be inlined back (with no cover removed) once legacypath is gone. + def _getini_unknown_type(self, name: str, type: str, value: object): + msg = ( + f"Option {name} has unknown configuration type {type} with value {value!r}" + ) + raise ValueError(msg) # pragma: no cover + + def _getini(self, name: str): + # If this is an alias, resolve to canonical name. + canonical_name = self._parser._ini_aliases.get(name, name) + + try: + _description, type, default = self._parser._inidict[canonical_name] + except KeyError as e: + raise ValueError(f"unknown configuration value: {name!r}") from e + + # Collect all possible values (canonical name + aliases) from inicfg. + # Each candidate is (ConfigValue, is_canonical). + candidates = [] + if canonical_name in self.inicfg: + candidates.append((self.inicfg[canonical_name], True)) + for alias, target in self._parser._ini_aliases.items(): + if target == canonical_name and alias in self.inicfg: + candidates.append((self.inicfg[alias], False)) + + if not candidates: + return default + + # Pick the best candidate based on precedence: + # 1. CLI override takes precedence over file, then + # 2. Canonical name takes precedence over alias. + selected = max(candidates, key=lambda x: (x[0].origin == "override", x[1]))[0] + value = selected.value + mode = selected.mode + + if mode == "ini": + # In ini mode, values are always str | list[str]. + assert isinstance(value, (str, list)) + return self._getini_ini(name, canonical_name, type, value, default) + elif mode == "toml": + return self._getini_toml(name, canonical_name, type, value, default) + else: + assert_never(mode) + + def _getini_ini( + self, + name: str, + canonical_name: str, + type: str, + value: str | list[str], + default: Any, + ): + """Handle config values read in INI mode. + + In INI mode, values are stored as str or list[str] only, and coerced + from string based on the registered type. + """ + # Note: some coercions are only required if we are reading from .ini + # files, because the file format doesn't contain type information, but + # when reading from toml (in ini mode) we will get either str or list of + # str values (see load_config_dict_from_file). For example: + # + # ini: + # a_line_list = "tests acceptance" + # + # in this case, we need to split the string to obtain a list of strings. + # + # toml (ini mode): + # a_line_list = ["tests", "acceptance"] + # + # in this case, we already have a list ready to use. + if type == "paths": + dp = ( + self.inipath.parent + if self.inipath is not None + else self.invocation_params.dir + ) + input_values = shlex.split(value) if isinstance(value, str) else value + return [dp / x for x in input_values] + elif type == "args": + return shlex.split(value) if isinstance(value, str) else value + elif type == "linelist": + if isinstance(value, str): + return [t for t in map(lambda x: x.strip(), value.split("\n")) if t] + else: + return value + elif type == "bool": + return _strtobool(str(value).strip()) + elif type == "string": + return value + elif type == "int": + if not isinstance(value, str): + raise TypeError( + f"Expected an int string for option {name} of type integer, but got: {value!r}" + ) from None + return int(value) + elif type == "float": + if not isinstance(value, str): + raise TypeError( + f"Expected a float string for option {name} of type float, but got: {value!r}" + ) from None + return float(value) + else: + return self._getini_unknown_type(name, type, value) + + def _getini_toml( + self, + name: str, + canonical_name: str, + type: str, + value: object, + default: Any, + ): + """Handle TOML config values with strict type validation and no coercion. + + In TOML mode, values already have native types from TOML parsing. + We validate types match expectations exactly, including list items. + """ + value_type = builtins.type(value).__name__ + if type == "paths": + # Expect a list of strings. + if not isinstance(value, list): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a list for type 'paths', " + f"got {value_type}: {value!r}" + ) + for i, item in enumerate(value): + if not isinstance(item, str): + item_type = builtins.type(item).__name__ + raise TypeError( + f"{self.inipath}: config option '{name}' expects a list of strings, " + f"but item at index {i} is {item_type}: {item!r}" + ) + dp = ( + self.inipath.parent + if self.inipath is not None + else self.invocation_params.dir + ) + return [dp / x for x in value] + elif type in {"args", "linelist"}: + # Expect a list of strings. + if not isinstance(value, list): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a list for type '{type}', " + f"got {value_type}: {value!r}" + ) + for i, item in enumerate(value): + if not isinstance(item, str): + item_type = builtins.type(item).__name__ + raise TypeError( + f"{self.inipath}: config option '{name}' expects a list of strings, " + f"but item at index {i} is {item_type}: {item!r}" + ) + return list(value) + elif type == "bool": + # Expect a boolean. + if not isinstance(value, bool): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a bool, " + f"got {value_type}: {value!r}" + ) + return value + elif type == "int": + # Expect an integer (but not bool, which is a subclass of int). + if not isinstance(value, int) or isinstance(value, bool): + raise TypeError( + f"{self.inipath}: config option '{name}' expects an int, " + f"got {value_type}: {value!r}" + ) + return value + elif type == "float": + # Expect a float or integer only. + if not isinstance(value, (float, int)) or isinstance(value, bool): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a float, " + f"got {value_type}: {value!r}" + ) + return value + elif type == "string": + # Expect a string. + if not isinstance(value, str): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a string, " + f"got {value_type}: {value!r}" + ) + return value + else: + return self._getini_unknown_type(name, type, value) + + def _getconftest_pathlist( + self, name: str, path: pathlib.Path + ) -> list[pathlib.Path] | None: + try: + mod, relroots = self.pluginmanager._rget_with_confmod(name, path) + except KeyError: + return None + assert mod.__file__ is not None + modpath = pathlib.Path(mod.__file__).parent + values: list[pathlib.Path] = [] + for relroot in relroots: + if isinstance(relroot, os.PathLike): + relroot = pathlib.Path(relroot) + else: + relroot = relroot.replace("/", os.sep) + relroot = absolutepath(modpath / relroot) + values.append(relroot) + return values + + def getoption(self, name: str, default: Any = notset, skip: bool = False): + """Return command line option value. + + :param name: Name of the option. You may also specify + the literal ``--OPT`` option instead of the "dest" option name. + :param default: Fallback value if no option of that name is **declared** via :hook:`pytest_addoption`. + Note this parameter will be ignored when the option is **declared** even if the option's value is ``None``. + :param skip: If ``True``, raise :func:`pytest.skip` if option is undeclared or has a ``None`` value. + Note that even if ``True``, if a default was specified it will be returned instead of a skip. + """ + name = self._opt2dest.get(name, name) + try: + val = getattr(self.option, name) + if val is None and skip: + raise AttributeError(name) + return val + except AttributeError as e: + if default is not notset: + return default + if skip: + import pytest + + pytest.skip(f"no {name!r} option found") + raise ValueError(f"no option named {name!r}") from e + + def getvalue(self, name: str, path=None): + """Deprecated, use getoption() instead.""" + return self.getoption(name) + + def getvalueorskip(self, name: str, path=None): + """Deprecated, use getoption(skip=True) instead.""" + return self.getoption(name, skip=True) + + #: Verbosity type for failed assertions (see :confval:`verbosity_assertions`). + VERBOSITY_ASSERTIONS: Final = "assertions" + #: Verbosity type for test case execution (see :confval:`verbosity_test_cases`). + VERBOSITY_TEST_CASES: Final = "test_cases" + #: Verbosity type for failed subtests (see :confval:`verbosity_subtests`). + VERBOSITY_SUBTESTS: Final = "subtests" + + _VERBOSITY_INI_DEFAULT: Final = "auto" + + def get_verbosity(self, verbosity_type: str | None = None) -> int: + r"""Retrieve the verbosity level for a fine-grained verbosity type. + + :param verbosity_type: Verbosity type to get level for. If a level is + configured for the given type, that value will be returned. If the + given type is not a known verbosity type, the global verbosity + level will be returned. If the given type is None (default), the + global verbosity level will be returned. + + To configure a level for a fine-grained verbosity type, the + configuration file should have a setting for the configuration name + and a numeric value for the verbosity level. A special value of "auto" + can be used to explicitly use the global verbosity level. + + Example: + + .. tab:: toml + + .. code-block:: toml + + [tool.pytest] + verbosity_assertions = 2 + + .. tab:: ini + + .. code-block:: ini + + [pytest] + verbosity_assertions = 2 + + .. code-block:: console + + pytest -v + + .. code-block:: python + + print(config.get_verbosity()) # 1 + print(config.get_verbosity(Config.VERBOSITY_ASSERTIONS)) # 2 + """ + global_level = self.getoption("verbose", default=0) + assert isinstance(global_level, int) + if verbosity_type is None: + return global_level + + ini_name = Config._verbosity_ini_name(verbosity_type) + if ini_name not in self._parser._inidict: + return global_level + + level = self.getini(ini_name) + if level == Config._VERBOSITY_INI_DEFAULT: + return global_level + + return int(level) + + @staticmethod + def _verbosity_ini_name(verbosity_type: str) -> str: + return f"verbosity_{verbosity_type}" + + @staticmethod + def _add_verbosity_ini(parser: Parser, verbosity_type: str, help: str) -> None: + """Add a output verbosity configuration option for the given output type. + + :param parser: Parser for command line arguments and config-file values. + :param verbosity_type: Fine-grained verbosity category. + :param help: Description of the output this type controls. + + The value should be retrieved via a call to + :py:func:`config.get_verbosity(type) `. + """ + parser.addini( + Config._verbosity_ini_name(verbosity_type), + help=help, + type="string", + default=Config._VERBOSITY_INI_DEFAULT, + ) + + def _warn_about_missing_assertion(self, mode: str) -> None: + if not _assertion_supported(): + if mode == "plain": + warning_text = ( + "ASSERTIONS ARE NOT EXECUTED" + " and FAILING TESTS WILL PASS. Are you" + " using python -O?" + ) + else: + warning_text = ( + "assertions not in test modules or" + " plugins will be ignored" + " because assert statements are not executed " + "by the underlying Python interpreter " + "(are you using python -O?)\n" + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), + stacklevel=3, + ) + + def _warn_about_skipped_plugins(self) -> None: + for module_name, msg in self.pluginmanager.skipped_plugins: + self.issue_config_time_warning( + PytestConfigWarning(f"skipped plugin {module_name!r}: {msg}"), + stacklevel=2, + ) + + +def _assertion_supported() -> bool: + try: + assert False + except AssertionError: + return True + else: + return False # type: ignore[unreachable] + + +def create_terminal_writer( + config: Config, file: TextIO | None = None +) -> TerminalWriter: + """Create a TerminalWriter instance configured according to the options + in the config object. + + Every code which requires a TerminalWriter object and has access to a + config object should use this function. + """ + tw = TerminalWriter(file=file) + + if config.option.color == "yes": + tw.hasmarkup = True + elif config.option.color == "no": + tw.hasmarkup = False + + if config.option.code_highlight == "yes": + tw.code_highlight = True + elif config.option.code_highlight == "no": + tw.code_highlight = False + + return tw + + +def _strtobool(val: str) -> bool: + """Convert a string representation of truth to True or False. + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + + .. note:: Copied from distutils.util. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return True + elif val in ("n", "no", "f", "false", "off", "0"): + return False + else: + raise ValueError(f"invalid truth value {val!r}") + + +@lru_cache(maxsize=50) +def parse_warning_filter( + arg: str, *, escape: bool +) -> tuple[warnings._ActionKind, str, type[Warning], str, int]: + """Parse a warnings filter string. + + This is copied from warnings._setoption with the following changes: + + * Does not apply the filter. + * Escaping is optional. + * Raises UsageError so we get nice error messages on failure. + """ + __tracebackhide__ = True + error_template = dedent( + f"""\ + while parsing the following warning configuration: + + {arg} + + This error occurred: + + {{error}} + """ + ) + + parts = arg.split(":") + if len(parts) > 5: + doc_url = ( + "https://docs.python.org/3/library/warnings.html#describing-warning-filters" + ) + error = dedent( + f"""\ + Too many fields ({len(parts)}), expected at most 5 separated by colons: + + action:message:category:module:line + + For more information please consult: {doc_url} + """ + ) + raise UsageError(error_template.format(error=error)) + + while len(parts) < 5: + parts.append("") + action_, message, category_, module, lineno_ = (s.strip() for s in parts) + try: + action: warnings._ActionKind = warnings._getaction(action_) # type: ignore[attr-defined] + except warnings._OptionError as e: + raise UsageError(error_template.format(error=str(e))) from None + try: + category: type[Warning] = _resolve_warning_category(category_) + except ImportError: + raise + except Exception: + exc_info = ExceptionInfo.from_current() + exception_text = exc_info.getrepr(style="native") + raise UsageError(error_template.format(error=exception_text)) from None + if message and escape: + message = re.escape(message) + if module and escape: + module = re.escape(module) + r"\Z" + if lineno_: + try: + lineno = int(lineno_) + if lineno < 0: + raise ValueError("number is negative") + except ValueError as e: + raise UsageError( + error_template.format(error=f"invalid lineno {lineno_!r}: {e}") + ) from None + else: + lineno = 0 + try: + re.compile(message) + re.compile(module) + except re.error as e: + raise UsageError( + error_template.format(error=f"Invalid regex {e.pattern!r}: {e}") + ) from None + return action, message, category, module, lineno + + +def _resolve_warning_category(category: str) -> type[Warning]: + """ + Copied from warnings._getcategory, but changed so it lets exceptions (specially ImportErrors) + propagate so we can get access to their tracebacks (#9218). + """ + __tracebackhide__ = True + if not category: + return Warning + + if "." not in category: + import builtins as m + + klass = category + else: + module, _, klass = category.rpartition(".") + m = __import__(module, None, None, [klass]) + cat = getattr(m, klass) + if not issubclass(cat, Warning): + raise UsageError(f"{cat} is not a Warning subclass") + return cast(type[Warning], cat) + + +def apply_warning_filters( + config_filters: Iterable[str], cmdline_filters: Iterable[str] +) -> None: + """Applies pytest-configured filters to the warnings module""" + # Filters should have this precedence: cmdline options, config. + # Filters should be applied in the inverse order of precedence. + for arg in config_filters: + try: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + except ImportError as e: + warnings.warn( + f"Failed to import filter module '{e.name}': {arg}", PytestConfigWarning + ) + continue + + for arg in cmdline_filters: + try: + warnings.filterwarnings(*parse_warning_filter(arg, escape=True)) + except ImportError as e: + warnings.warn( + f"Failed to import filter module '{e.name}': {arg}", PytestConfigWarning + ) + continue diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..b8e739c4 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/argparsing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/argparsing.cpython-312.pyc new file mode 100644 index 00000000..76c419ce Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/argparsing.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/compat.cpython-312.pyc new file mode 100644 index 00000000..cd6ba647 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 00000000..5bbcabfb Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/findpaths.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/findpaths.cpython-312.pyc new file mode 100644 index 00000000..d216d1a8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/config/__pycache__/findpaths.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/argparsing.py b/Backend/venv/lib/python3.12/site-packages/_pytest/config/argparsing.py new file mode 100644 index 00000000..8216ad8b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/config/argparsing.py @@ -0,0 +1,578 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Mapping +from collections.abc import Sequence +import os +import sys +from typing import Any +from typing import final +from typing import Literal +from typing import NoReturn + +from .exceptions import UsageError +import _pytest._io +from _pytest.deprecated import check_ispytest + + +FILE_OR_DIR = "file_or_dir" + + +class NotSet: + def __repr__(self) -> str: + return "" + + +NOT_SET = NotSet() + + +@final +class Parser: + """Parser for command line arguments and config-file values. + + :ivar extra_info: Dict of generic param -> value to display in case + there's an error processing the command line arguments. + """ + + def __init__( + self, + usage: str | None = None, + processopt: Callable[[Argument], None] | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + + from _pytest._argcomplete import filescompleter + + self._processopt = processopt + self.extra_info: dict[str, Any] = {} + self.optparser = PytestArgumentParser(self, usage, self.extra_info) + anonymous_arggroup = self.optparser.add_argument_group("Custom options") + self._anonymous = OptionGroup( + anonymous_arggroup, "_anonymous", self, _ispytest=True + ) + self._groups = [self._anonymous] + file_or_dir_arg = self.optparser.add_argument(FILE_OR_DIR, nargs="*") + file_or_dir_arg.completer = filescompleter # type: ignore + + self._inidict: dict[str, tuple[str, str, Any]] = {} + # Maps alias -> canonical name. + self._ini_aliases: dict[str, str] = {} + + @property + def prog(self) -> str: + return self.optparser.prog + + @prog.setter + def prog(self, value: str) -> None: + self.optparser.prog = value + + def processoption(self, option: Argument) -> None: + if self._processopt: + if option.dest: + self._processopt(option) + + def getgroup( + self, name: str, description: str = "", after: str | None = None + ) -> OptionGroup: + """Get (or create) a named option Group. + + :param name: Name of the option group. + :param description: Long description for --help output. + :param after: Name of another group, used for ordering --help output. + :returns: The option group. + + The returned group object has an ``addoption`` method with the same + signature as :func:`parser.addoption ` but + will be shown in the respective group in the output of + ``pytest --help``. + """ + for group in self._groups: + if group.name == name: + return group + + arggroup = self.optparser.add_argument_group(description or name) + group = OptionGroup(arggroup, name, self, _ispytest=True) + i = 0 + for i, grp in enumerate(self._groups): + if grp.name == after: + break + self._groups.insert(i + 1, group) + # argparse doesn't provide a way to control `--help` order, so must + # access its internals ☹. + self.optparser._action_groups.insert(i + 1, self.optparser._action_groups.pop()) + return group + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Register a command line option. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :meth:`add_argument() + ` function accepts. + + After command line parsing, options are available on the pytest config + object via ``config.option.NAME`` where ``NAME`` is usually set + by passing a ``dest`` attribute, for example + ``addoption("--long", dest="NAME", ...)``. + """ + self._anonymous.addoption(*opts, **attrs) + + def parse( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> argparse.Namespace: + """Parse the arguments. + + Unlike ``parse_known_args`` and ``parse_known_and_unknown_args``, + raises PrintHelp on `--help` and UsageError on unknown flags + + :meta private: + """ + from _pytest._argcomplete import try_argcomplete + + try_argcomplete(self.optparser) + strargs = [os.fspath(x) for x in args] + if namespace is None: + namespace = argparse.Namespace() + try: + namespace._raise_print_help = True + return self.optparser.parse_intermixed_args(strargs, namespace=namespace) + finally: + del namespace._raise_print_help + + def parse_known_args( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> argparse.Namespace: + """Parse the known arguments at this point. + + :returns: An argparse namespace object. + """ + return self.parse_known_and_unknown_args(args, namespace=namespace)[0] + + def parse_known_and_unknown_args( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> tuple[argparse.Namespace, list[str]]: + """Parse the known arguments at this point, and also return the + remaining unknown flag arguments. + + :returns: + A tuple containing an argparse namespace object for the known + arguments, and a list of unknown flag arguments. + """ + strargs = [os.fspath(x) for x in args] + if sys.version_info < (3, 12, 8) or (3, 13) <= sys.version_info < (3, 13, 1): + # Older argparse have a bugged parse_known_intermixed_args. + namespace, unknown = self.optparser.parse_known_args(strargs, namespace) + assert namespace is not None + file_or_dir = getattr(namespace, FILE_OR_DIR) + unknown_flags: list[str] = [] + for arg in unknown: + (unknown_flags if arg.startswith("-") else file_or_dir).append(arg) + return namespace, unknown_flags + else: + return self.optparser.parse_known_intermixed_args(strargs, namespace) + + def addini( + self, + name: str, + help: str, + type: Literal[ + "string", "paths", "pathlist", "args", "linelist", "bool", "int", "float" + ] + | None = None, + default: Any = NOT_SET, + *, + aliases: Sequence[str] = (), + ) -> None: + """Register a configuration file option. + + :param name: + Name of the configuration. + :param type: + Type of the configuration. Can be: + + * ``string``: a string + * ``bool``: a boolean + * ``args``: a list of strings, separated as in a shell + * ``linelist``: a list of strings, separated by line breaks + * ``paths``: a list of :class:`pathlib.Path`, separated as in a shell + * ``pathlist``: a list of ``py.path``, separated as in a shell + * ``int``: an integer + * ``float``: a floating-point number + + .. versionadded:: 8.4 + + The ``float`` and ``int`` types. + + For ``paths`` and ``pathlist`` types, they are considered relative to the config-file. + In case the execution is happening without a config-file defined, + they will be considered relative to the current working directory (for example with ``--override-ini``). + + .. versionadded:: 7.0 + The ``paths`` variable type. + + .. versionadded:: 8.1 + Use the current working directory to resolve ``paths`` and ``pathlist`` in the absence of a config-file. + + Defaults to ``string`` if ``None`` or not passed. + :param default: + Default value if no config-file option exists but is queried. + :param aliases: + Additional names by which this option can be referenced. + Aliases resolve to the canonical name. + + .. versionadded:: 9.0 + The ``aliases`` parameter. + + The value of configuration keys can be retrieved via a call to + :py:func:`config.getini(name) `. + """ + assert type in ( + None, + "string", + "paths", + "pathlist", + "args", + "linelist", + "bool", + "int", + "float", + ) + if type is None: + type = "string" + if default is NOT_SET: + default = get_ini_default_for_type(type) + + self._inidict[name] = (help, type, default) + + for alias in aliases: + if alias in self._inidict: + raise ValueError( + f"alias {alias!r} conflicts with existing configuration option" + ) + if (already := self._ini_aliases.get(alias)) is not None: + raise ValueError(f"{alias!r} is already an alias of {already!r}") + self._ini_aliases[alias] = name + + +def get_ini_default_for_type( + type: Literal[ + "string", "paths", "pathlist", "args", "linelist", "bool", "int", "float" + ], +) -> Any: + """ + Used by addini to get the default value for a given config option type, when + default is not supplied. + """ + if type in ("paths", "pathlist", "args", "linelist"): + return [] + elif type == "bool": + return False + elif type == "int": + return 0 + elif type == "float": + return 0.0 + else: + return "" + + +class ArgumentError(Exception): + """Raised if an Argument instance is created with invalid or + inconsistent arguments.""" + + def __init__(self, msg: str, option: Argument | str) -> None: + self.msg = msg + self.option_id = str(option) + + def __str__(self) -> str: + if self.option_id: + return f"option {self.option_id}: {self.msg}" + else: + return self.msg + + +class Argument: + """Class that mimics the necessary behaviour of optparse.Option. + + It's currently a least effort implementation and ignoring choices + and integer prefixes. + + https://docs.python.org/3/library/optparse.html#optparse-standard-option-types + """ + + def __init__(self, *names: str, **attrs: Any) -> None: + """Store params in private vars for use in add_argument.""" + self._attrs = attrs + self._short_opts: list[str] = [] + self._long_opts: list[str] = [] + try: + self.type = attrs["type"] + except KeyError: + pass + try: + # Attribute existence is tested in Config._processopt. + self.default = attrs["default"] + except KeyError: + pass + self._set_opt_strings(names) + dest: str | None = attrs.get("dest") + if dest: + self.dest = dest + elif self._long_opts: + self.dest = self._long_opts[0][2:].replace("-", "_") + else: + try: + self.dest = self._short_opts[0][1:] + except IndexError as e: + self.dest = "???" # Needed for the error repr. + raise ArgumentError("need a long or short option", self) from e + + def names(self) -> list[str]: + return self._short_opts + self._long_opts + + def attrs(self) -> Mapping[str, Any]: + # Update any attributes set by processopt. + for attr in ("default", "dest", "help", self.dest): + try: + self._attrs[attr] = getattr(self, attr) + except AttributeError: + pass + return self._attrs + + def _set_opt_strings(self, opts: Sequence[str]) -> None: + """Directly from optparse. + + Might not be necessary as this is passed to argparse later on. + """ + for opt in opts: + if len(opt) < 2: + raise ArgumentError( + f"invalid option string {opt!r}: " + "must be at least two characters long", + self, + ) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise ArgumentError( + f"invalid short option string {opt!r}: " + "must be of the form -x, (x any non-dash char)", + self, + ) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise ArgumentError( + f"invalid long option string {opt!r}: " + "must start with --, followed by non-dash", + self, + ) + self._long_opts.append(opt) + + def __repr__(self) -> str: + args: list[str] = [] + if self._short_opts: + args += ["_short_opts: " + repr(self._short_opts)] + if self._long_opts: + args += ["_long_opts: " + repr(self._long_opts)] + args += ["dest: " + repr(self.dest)] + if hasattr(self, "type"): + args += ["type: " + repr(self.type)] + if hasattr(self, "default"): + args += ["default: " + repr(self.default)] + return "Argument({})".format(", ".join(args)) + + +class OptionGroup: + """A group of options shown in its own section.""" + + def __init__( + self, + arggroup: argparse._ArgumentGroup, + name: str, + parser: Parser | None, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._arggroup = arggroup + self.name = name + self.options: list[Argument] = [] + self.parser = parser + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Add an option to this group. + + If a shortened version of a long option is specified, it will + be suppressed in the help. ``addoption('--twowords', '--two-words')`` + results in help showing ``--two-words`` only, but ``--twowords`` gets + accepted **and** the automatic destination is in ``args.twowords``. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :meth:`add_argument() + ` function accepts. + """ + conflict = set(opts).intersection( + name for opt in self.options for name in opt.names() + ) + if conflict: + raise ValueError(f"option names {conflict} already added") + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=False) + + def _addoption(self, *opts: str, **attrs: Any) -> None: + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=True) + + def _addoption_instance(self, option: Argument, shortupper: bool = False) -> None: + if not shortupper: + for opt in option._short_opts: + if opt[0] == "-" and opt[1].islower(): + raise ValueError("lowercase shortoptions reserved") + + if self.parser: + self.parser.processoption(option) + + self._arggroup.add_argument(*option.names(), **option.attrs()) + self.options.append(option) + + +class PytestArgumentParser(argparse.ArgumentParser): + def __init__( + self, + parser: Parser, + usage: str | None, + extra_info: dict[str, str], + ) -> None: + self._parser = parser + super().__init__( + usage=usage, + add_help=False, + formatter_class=DropShorterLongHelpFormatter, + allow_abbrev=False, + fromfile_prefix_chars="@", + ) + # extra_info is a dict of (param -> value) to display if there's + # an usage error to provide more contextual information to the user. + self.extra_info = extra_info + + def error(self, message: str) -> NoReturn: + """Transform argparse error message into UsageError.""" + msg = f"{self.prog}: error: {message}" + if self.extra_info: + msg += "\n" + "\n".join( + f" {k}: {v}" for k, v in sorted(self.extra_info.items()) + ) + raise UsageError(self.format_usage() + msg) + + +class DropShorterLongHelpFormatter(argparse.HelpFormatter): + """Shorten help for long options that differ only in extra hyphens. + + - Collapse **long** options that are the same except for extra hyphens. + - Shortcut if there are only two options and one of them is a short one. + - Cache result on the action object as this is called at least 2 times. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # Use more accurate terminal width. + if "width" not in kwargs: + kwargs["width"] = _pytest._io.get_terminal_width() + super().__init__(*args, **kwargs) + + def _format_action_invocation(self, action: argparse.Action) -> str: + orgstr = super()._format_action_invocation(action) + if orgstr and orgstr[0] != "-": # only optional arguments + return orgstr + res: str | None = getattr(action, "_formatted_action_invocation", None) + if res: + return res + options = orgstr.split(", ") + if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2): + # a shortcut for '-h, --help' or '--abc', '-a' + action._formatted_action_invocation = orgstr # type: ignore + return orgstr + return_list = [] + short_long: dict[str, str] = {} + for option in options: + if len(option) == 2 or option[2] == " ": + continue + if not option.startswith("--"): + raise ArgumentError( + f'long optional argument without "--": [{option}]', option + ) + xxoption = option[2:] + shortened = xxoption.replace("-", "") + if shortened not in short_long or len(short_long[shortened]) < len( + xxoption + ): + short_long[shortened] = xxoption + # now short_long has been filled out to the longest with dashes + # **and** we keep the right option ordering from add_argument + for option in options: + if len(option) == 2 or option[2] == " ": + return_list.append(option) + if option[2:] == short_long.get(option.replace("-", "")): + return_list.append(option.replace(" ", "=", 1)) + formatted_action_invocation = ", ".join(return_list) + action._formatted_action_invocation = formatted_action_invocation # type: ignore + return formatted_action_invocation + + def _split_lines(self, text, width): + """Wrap lines after splitting on original newlines. + + This allows to have explicit line breaks in the help text. + """ + import textwrap + + lines = [] + for line in text.splitlines(): + lines.extend(textwrap.wrap(line.strip(), width)) + return lines + + +class OverrideIniAction(argparse.Action): + """Custom argparse action that makes a CLI flag equivalent to overriding an + option, in addition to behaving like `store_true`. + + This can simplify things since code only needs to inspect the config option + and not consider the CLI flag. + """ + + def __init__( + self, + option_strings: Sequence[str], + dest: str, + nargs: int | str | None = None, + *args, + ini_option: str, + ini_value: str, + **kwargs, + ) -> None: + super().__init__(option_strings, dest, 0, *args, **kwargs) + self.ini_option = ini_option + self.ini_value = ini_value + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + *args, + **kwargs, + ) -> None: + setattr(namespace, self.dest, True) + current_overrides = getattr(namespace, "override_ini", None) + if current_overrides is None: + current_overrides = [] + current_overrides.append(f"{self.ini_option}={self.ini_value}") + setattr(namespace, "override_ini", current_overrides) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/compat.py b/Backend/venv/lib/python3.12/site-packages/_pytest/config/compat.py new file mode 100644 index 00000000..21eab4c7 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/config/compat.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from collections.abc import Mapping +import functools +from pathlib import Path +from typing import Any +import warnings + +import pluggy + +from ..compat import LEGACY_PATH +from ..compat import legacy_path +from ..deprecated import HOOK_LEGACY_PATH_ARG + + +# hookname: (Path, LEGACY_PATH) +imply_paths_hooks: Mapping[str, tuple[str, str]] = { + "pytest_ignore_collect": ("collection_path", "path"), + "pytest_collect_file": ("file_path", "path"), + "pytest_pycollect_makemodule": ("module_path", "path"), + "pytest_report_header": ("start_path", "startdir"), + "pytest_report_collectionfinish": ("start_path", "startdir"), +} + + +def _check_path(path: Path, fspath: LEGACY_PATH) -> None: + if Path(fspath) != path: + raise ValueError( + f"Path({fspath!r}) != {path!r}\n" + "if both path and fspath are given they need to be equal" + ) + + +class PathAwareHookProxy: + """ + this helper wraps around hook callers + until pluggy supports fixingcalls, this one will do + + it currently doesn't return full hook caller proxies for fixed hooks, + this may have to be changed later depending on bugs + """ + + def __init__(self, hook_relay: pluggy.HookRelay) -> None: + self._hook_relay = hook_relay + + def __dir__(self) -> list[str]: + return dir(self._hook_relay) + + def __getattr__(self, key: str) -> pluggy.HookCaller: + hook: pluggy.HookCaller = getattr(self._hook_relay, key) + if key not in imply_paths_hooks: + self.__dict__[key] = hook + return hook + else: + path_var, fspath_var = imply_paths_hooks[key] + + @functools.wraps(hook) + def fixed_hook(**kw: Any) -> Any: + path_value: Path | None = kw.pop(path_var, None) + fspath_value: LEGACY_PATH | None = kw.pop(fspath_var, None) + if fspath_value is not None: + warnings.warn( + HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg=fspath_var, pathlib_path_arg=path_var + ), + stacklevel=2, + ) + if path_value is not None: + if fspath_value is not None: + _check_path(path_value, fspath_value) + else: + fspath_value = legacy_path(path_value) + else: + assert fspath_value is not None + path_value = Path(fspath_value) + + kw[path_var] = path_value + kw[fspath_var] = fspath_value + return hook(**kw) + + fixed_hook.name = hook.name # type: ignore[attr-defined] + fixed_hook.spec = hook.spec # type: ignore[attr-defined] + fixed_hook.__name__ = key + self.__dict__[key] = fixed_hook + return fixed_hook # type: ignore[return-value] diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/exceptions.py b/Backend/venv/lib/python3.12/site-packages/_pytest/config/exceptions.py new file mode 100644 index 00000000..d84a9ea6 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/config/exceptions.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from typing import final + + +@final +class UsageError(Exception): + """Error in pytest usage or invocation.""" + + __module__ = "pytest" + + +class PrintHelp(Exception): + """Raised when pytest should print its help to skip the rest of the + argument parsing and validation.""" diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/config/findpaths.py b/Backend/venv/lib/python3.12/site-packages/_pytest/config/findpaths.py new file mode 100644 index 00000000..3c628a09 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/config/findpaths.py @@ -0,0 +1,350 @@ +from __future__ import annotations + +from collections.abc import Iterable +from collections.abc import Sequence +from dataclasses import dataclass +from dataclasses import KW_ONLY +import os +from pathlib import Path +import sys +from typing import Literal +from typing import TypeAlias + +import iniconfig + +from .exceptions import UsageError +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.pathlib import commonpath +from _pytest.pathlib import safe_exists + + +@dataclass(frozen=True) +class ConfigValue: + """Represents a configuration value with its origin and parsing mode. + + This allows tracking whether a value came from a configuration file + or from a CLI override (--override-ini), which is important for + determining precedence when dealing with ini option aliases. + + The mode tracks the parsing mode/data model used for the value: + - "ini": from INI files or [tool.pytest.ini_options], where the only + supported value types are `str` or `list[str]`. + - "toml": from TOML files (not in INI mode), where native TOML types + are preserved. + """ + + value: object + _: KW_ONLY + origin: Literal["file", "override"] + mode: Literal["ini", "toml"] + + +ConfigDict: TypeAlias = dict[str, ConfigValue] + + +def _parse_ini_config(path: Path) -> iniconfig.IniConfig: + """Parse the given generic '.ini' file using legacy IniConfig parser, returning + the parsed object. + + Raise UsageError if the file cannot be parsed. + """ + try: + return iniconfig.IniConfig(str(path)) + except iniconfig.ParseError as exc: + raise UsageError(str(exc)) from exc + + +def load_config_dict_from_file( + filepath: Path, +) -> ConfigDict | None: + """Load pytest configuration from the given file path, if supported. + + Return None if the file does not contain valid pytest configuration. + """ + # Configuration from ini files are obtained from the [pytest] section, if present. + if filepath.suffix == ".ini": + iniconfig = _parse_ini_config(filepath) + + if "pytest" in iniconfig: + return { + k: ConfigValue(v, origin="file", mode="ini") + for k, v in iniconfig["pytest"].items() + } + else: + # "pytest.ini" files are always the source of configuration, even if empty. + if filepath.name in {"pytest.ini", ".pytest.ini"}: + return {} + + # '.cfg' files are considered if they contain a "[tool:pytest]" section. + elif filepath.suffix == ".cfg": + iniconfig = _parse_ini_config(filepath) + + if "tool:pytest" in iniconfig.sections: + return { + k: ConfigValue(v, origin="file", mode="ini") + for k, v in iniconfig["tool:pytest"].items() + } + elif "pytest" in iniconfig.sections: + # If a setup.cfg contains a "[pytest]" section, we raise a failure to indicate users that + # plain "[pytest]" sections in setup.cfg files is no longer supported (#3086). + fail(CFG_PYTEST_SECTION.format(filename="setup.cfg"), pytrace=False) + + # '.toml' files are considered if they contain a [tool.pytest] table (toml mode) + # or [tool.pytest.ini_options] table (ini mode) for pyproject.toml, + # or [pytest] table (toml mode) for pytest.toml/.pytest.toml. + elif filepath.suffix == ".toml": + if sys.version_info >= (3, 11): + import tomllib + else: + import tomli as tomllib + + toml_text = filepath.read_text(encoding="utf-8") + try: + config = tomllib.loads(toml_text) + except tomllib.TOMLDecodeError as exc: + raise UsageError(f"{filepath}: {exc}") from exc + + # pytest.toml and .pytest.toml use [pytest] table directly. + if filepath.name in ("pytest.toml", ".pytest.toml"): + pytest_config = config.get("pytest", {}) + if pytest_config: + # TOML mode - preserve native TOML types. + return { + k: ConfigValue(v, origin="file", mode="toml") + for k, v in pytest_config.items() + } + # "pytest.toml" files are always the source of configuration, even if empty. + return {} + + # pyproject.toml uses [tool.pytest] or [tool.pytest.ini_options]. + else: + tool_pytest = config.get("tool", {}).get("pytest", {}) + + # Check for toml mode config: [tool.pytest] with content outside of ini_options. + toml_config = {k: v for k, v in tool_pytest.items() if k != "ini_options"} + # Check for ini mode config: [tool.pytest.ini_options]. + ini_config = tool_pytest.get("ini_options", None) + + if toml_config and ini_config: + raise UsageError( + f"{filepath}: Cannot use both [tool.pytest] (native TOML types) and " + "[tool.pytest.ini_options] (string-based INI format) simultaneously. " + "Please use [tool.pytest] with native TOML types (recommended) " + "or [tool.pytest.ini_options] for backwards compatibility." + ) + + if toml_config: + # TOML mode - preserve native TOML types. + return { + k: ConfigValue(v, origin="file", mode="toml") + for k, v in toml_config.items() + } + + elif ini_config is not None: + # INI mode - TOML supports richer data types than INI files, but we need to + # convert all scalar values to str for compatibility with the INI system. + def make_scalar(v: object) -> str | list[str]: + return v if isinstance(v, list) else str(v) + + return { + k: ConfigValue(make_scalar(v), origin="file", mode="ini") + for k, v in ini_config.items() + } + + return None + + +def locate_config( + invocation_dir: Path, + args: Iterable[Path], +) -> tuple[Path | None, Path | None, ConfigDict, Sequence[str]]: + """Search in the list of arguments for a valid ini-file for pytest, + and return a tuple of (rootdir, inifile, cfg-dict, ignored-config-files), where + ignored-config-files is a list of config basenames found that contain + pytest configuration but were ignored.""" + config_names = [ + "pytest.toml", + ".pytest.toml", + "pytest.ini", + ".pytest.ini", + "pyproject.toml", + "tox.ini", + "setup.cfg", + ] + args = [x for x in args if not str(x).startswith("-")] + if not args: + args = [invocation_dir] + found_pyproject_toml: Path | None = None + ignored_config_files: list[str] = [] + + for arg in args: + argpath = absolutepath(arg) + for base in (argpath, *argpath.parents): + for config_name in config_names: + p = base / config_name + if p.is_file(): + if p.name == "pyproject.toml" and found_pyproject_toml is None: + found_pyproject_toml = p + ini_config = load_config_dict_from_file(p) + if ini_config is not None: + index = config_names.index(config_name) + for remainder in config_names[index + 1 :]: + p2 = base / remainder + if ( + p2.is_file() + and load_config_dict_from_file(p2) is not None + ): + ignored_config_files.append(remainder) + return base, p, ini_config, ignored_config_files + if found_pyproject_toml is not None: + return found_pyproject_toml.parent, found_pyproject_toml, {}, [] + return None, None, {}, [] + + +def get_common_ancestor( + invocation_dir: Path, + paths: Iterable[Path], +) -> Path: + common_ancestor: Path | None = None + for path in paths: + if not path.exists(): + continue + if common_ancestor is None: + common_ancestor = path + else: + if common_ancestor in path.parents or path == common_ancestor: + continue + elif path in common_ancestor.parents: + common_ancestor = path + else: + shared = commonpath(path, common_ancestor) + if shared is not None: + common_ancestor = shared + if common_ancestor is None: + common_ancestor = invocation_dir + elif common_ancestor.is_file(): + common_ancestor = common_ancestor.parent + return common_ancestor + + +def get_dirs_from_args(args: Iterable[str]) -> list[Path]: + def is_option(x: str) -> bool: + return x.startswith("-") + + def get_file_part_from_node_id(x: str) -> str: + return x.split("::")[0] + + def get_dir_from_path(path: Path) -> Path: + if path.is_dir(): + return path + return path.parent + + # These look like paths but may not exist + possible_paths = ( + absolutepath(get_file_part_from_node_id(arg)) + for arg in args + if not is_option(arg) + ) + + return [get_dir_from_path(path) for path in possible_paths if safe_exists(path)] + + +def parse_override_ini(override_ini: Sequence[str] | None) -> ConfigDict: + """Parse the -o/--override-ini command line arguments and return the overrides. + + :raises UsageError: + If one of the values is malformed. + """ + overrides = {} + # override_ini is a list of "ini=value" options. + # Always use the last item if multiple values are set for same ini-name, + # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2. + for ini_config in override_ini or (): + try: + key, user_ini_value = ini_config.split("=", 1) + except ValueError as e: + raise UsageError( + f"-o/--override-ini expects option=value style (got: {ini_config!r})." + ) from e + else: + overrides[key] = ConfigValue(user_ini_value, origin="override", mode="ini") + return overrides + + +CFG_PYTEST_SECTION = "[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead." + + +def determine_setup( + *, + inifile: str | None, + override_ini: Sequence[str] | None, + args: Sequence[str], + rootdir_cmd_arg: str | None, + invocation_dir: Path, +) -> tuple[Path, Path | None, ConfigDict, Sequence[str]]: + """Determine the rootdir, inifile and ini configuration values from the + command line arguments. + + :param inifile: + The `--inifile` command line argument, if given. + :param override_ini: + The -o/--override-ini command line arguments, if given. + :param args: + The free command line arguments. + :param rootdir_cmd_arg: + The `--rootdir` command line argument, if given. + :param invocation_dir: + The working directory when pytest was invoked. + + :raises UsageError: + """ + rootdir = None + dirs = get_dirs_from_args(args) + ignored_config_files: Sequence[str] = [] + + if inifile: + inipath_ = absolutepath(inifile) + inipath: Path | None = inipath_ + inicfg = load_config_dict_from_file(inipath_) or {} + if rootdir_cmd_arg is None: + rootdir = inipath_.parent + else: + ancestor = get_common_ancestor(invocation_dir, dirs) + rootdir, inipath, inicfg, ignored_config_files = locate_config( + invocation_dir, [ancestor] + ) + if rootdir is None and rootdir_cmd_arg is None: + for possible_rootdir in (ancestor, *ancestor.parents): + if (possible_rootdir / "setup.py").is_file(): + rootdir = possible_rootdir + break + else: + if dirs != [ancestor]: + rootdir, inipath, inicfg, _ = locate_config(invocation_dir, dirs) + if rootdir is None: + rootdir = get_common_ancestor( + invocation_dir, [invocation_dir, ancestor] + ) + if is_fs_root(rootdir): + rootdir = ancestor + if rootdir_cmd_arg: + rootdir = absolutepath(os.path.expandvars(rootdir_cmd_arg)) + if not rootdir.is_dir(): + raise UsageError( + f"Directory '{rootdir}' not found. Check your '--rootdir' option." + ) + + ini_overrides = parse_override_ini(override_ini) + inicfg.update(ini_overrides) + + assert rootdir is not None + return rootdir, inipath, inicfg, ignored_config_files + + +def is_fs_root(p: Path) -> bool: + r""" + Return True if the given path is pointing to the root of the + file system ("/" on Unix and "C:\\" on Windows for example). + """ + return os.path.splitdrive(str(p))[1] == os.sep diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/debugging.py b/Backend/venv/lib/python3.12/site-packages/_pytest/debugging.py new file mode 100644 index 00000000..de1b2688 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/debugging.py @@ -0,0 +1,407 @@ +# mypy: allow-untyped-defs +# ruff: noqa: T100 +"""Interactive debugging with PDB, the Python Debugger.""" + +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Generator +import functools +import sys +import types +from typing import Any +import unittest + +from _pytest import outcomes +from _pytest._code import ExceptionInfo +from _pytest.capture import CaptureManager +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.config.exceptions import UsageError +from _pytest.nodes import Node +from _pytest.reports import BaseReport +from _pytest.runner import CallInfo + + +def _validate_usepdb_cls(value: str) -> tuple[str, str]: + """Validate syntax of --pdbcls option.""" + try: + modname, classname = value.split(":") + except ValueError as e: + raise argparse.ArgumentTypeError( + f"{value!r} is not in the format 'modname:classname'" + ) from e + return (modname, classname) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--pdb", + dest="usepdb", + action="store_true", + help="Start the interactive Python debugger on errors or KeyboardInterrupt", + ) + group.addoption( + "--pdbcls", + dest="usepdb_cls", + metavar="modulename:classname", + type=_validate_usepdb_cls, + help="Specify a custom interactive Python debugger for use with --pdb." + "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb", + ) + group.addoption( + "--trace", + dest="trace", + action="store_true", + help="Immediately break when running each test", + ) + + +def pytest_configure(config: Config) -> None: + import pdb + + if config.getvalue("trace"): + config.pluginmanager.register(PdbTrace(), "pdbtrace") + if config.getvalue("usepdb"): + config.pluginmanager.register(PdbInvoke(), "pdbinvoke") + + pytestPDB._saved.append( + (pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config) + ) + pdb.set_trace = pytestPDB.set_trace + pytestPDB._pluginmanager = config.pluginmanager + pytestPDB._config = config + + # NOTE: not using pytest_unconfigure, since it might get called although + # pytest_configure was not (if another plugin raises UsageError). + def fin() -> None: + ( + pdb.set_trace, + pytestPDB._pluginmanager, + pytestPDB._config, + ) = pytestPDB._saved.pop() + + config.add_cleanup(fin) + + +class pytestPDB: + """Pseudo PDB that defers to the real pdb.""" + + _pluginmanager: PytestPluginManager | None = None + _config: Config | None = None + _saved: list[ + tuple[Callable[..., None], PytestPluginManager | None, Config | None] + ] = [] + _recursive_debug = 0 + _wrapped_pdb_cls: tuple[type[Any], type[Any]] | None = None + + @classmethod + def _is_capturing(cls, capman: CaptureManager | None) -> str | bool: + if capman: + return capman.is_capturing() + return False + + @classmethod + def _import_pdb_cls(cls, capman: CaptureManager | None): + if not cls._config: + import pdb + + # Happens when using pytest.set_trace outside of a test. + return pdb.Pdb + + usepdb_cls = cls._config.getvalue("usepdb_cls") + + if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls: + return cls._wrapped_pdb_cls[1] + + if usepdb_cls: + modname, classname = usepdb_cls + + try: + __import__(modname) + mod = sys.modules[modname] + + # Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp). + parts = classname.split(".") + pdb_cls = getattr(mod, parts[0]) + for part in parts[1:]: + pdb_cls = getattr(pdb_cls, part) + except Exception as exc: + value = ":".join((modname, classname)) + raise UsageError( + f"--pdbcls: could not import {value!r}: {exc}" + ) from exc + else: + import pdb + + pdb_cls = pdb.Pdb + + wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman) + cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls) + return wrapped_cls + + @classmethod + def _get_pdb_wrapper_class(cls, pdb_cls, capman: CaptureManager | None): + import _pytest.config + + class PytestPdbWrapper(pdb_cls): + _pytest_capman = capman + _continued = False + + def do_debug(self, arg): + cls._recursive_debug += 1 + ret = super().do_debug(arg) + cls._recursive_debug -= 1 + return ret + + if hasattr(pdb_cls, "do_debug"): + do_debug.__doc__ = pdb_cls.do_debug.__doc__ + + def do_continue(self, arg): + ret = super().do_continue(arg) + if cls._recursive_debug == 0: + assert cls._config is not None + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + capman = self._pytest_capman + capturing = pytestPDB._is_capturing(capman) + if capturing: + if capturing == "global": + tw.sep(">", "PDB continue (IO-capturing resumed)") + else: + tw.sep( + ">", + f"PDB continue (IO-capturing resumed for {capturing})", + ) + assert capman is not None + capman.resume() + else: + tw.sep(">", "PDB continue") + assert cls._pluginmanager is not None + cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self) + self._continued = True + return ret + + if hasattr(pdb_cls, "do_continue"): + do_continue.__doc__ = pdb_cls.do_continue.__doc__ + + do_c = do_cont = do_continue + + def do_quit(self, arg): + # Raise Exit outcome when quit command is used in pdb. + # + # This is a bit of a hack - it would be better if BdbQuit + # could be handled, but this would require to wrap the + # whole pytest run, and adjust the report etc. + ret = super().do_quit(arg) + + if cls._recursive_debug == 0: + outcomes.exit("Quitting debugger") + + return ret + + if hasattr(pdb_cls, "do_quit"): + do_quit.__doc__ = pdb_cls.do_quit.__doc__ + + do_q = do_quit + do_exit = do_quit + + def setup(self, f, tb): + """Suspend on setup(). + + Needed after do_continue resumed, and entering another + breakpoint again. + """ + ret = super().setup(f, tb) + if not ret and self._continued: + # pdb.setup() returns True if the command wants to exit + # from the interaction: do not suspend capturing then. + if self._pytest_capman: + self._pytest_capman.suspend_global_capture(in_=True) + return ret + + def get_stack(self, f, t): + stack, i = super().get_stack(f, t) + if f is None: + # Find last non-hidden frame. + i = max(0, len(stack) - 1) + while i and stack[i][0].f_locals.get("__tracebackhide__", False): + i -= 1 + return stack, i + + return PytestPdbWrapper + + @classmethod + def _init_pdb(cls, method, *args, **kwargs): + """Initialize PDB debugging, dropping any IO capturing.""" + import _pytest.config + + if cls._pluginmanager is None: + capman: CaptureManager | None = None + else: + capman = cls._pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend(in_=True) + + if cls._config: + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + if cls._recursive_debug == 0: + # Handle header similar to pdb.set_trace in py37+. + header = kwargs.pop("header", None) + if header is not None: + tw.sep(">", header) + else: + capturing = cls._is_capturing(capman) + if capturing == "global": + tw.sep(">", f"PDB {method} (IO-capturing turned off)") + elif capturing: + tw.sep( + ">", + f"PDB {method} (IO-capturing turned off for {capturing})", + ) + else: + tw.sep(">", f"PDB {method}") + + _pdb = cls._import_pdb_cls(capman)(**kwargs) + + if cls._pluginmanager: + cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb) + return _pdb + + @classmethod + def set_trace(cls, *args, **kwargs) -> None: + """Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing.""" + frame = sys._getframe().f_back + _pdb = cls._init_pdb("set_trace", *args, **kwargs) + _pdb.set_trace(frame) + + +class PdbInvoke: + def pytest_exception_interact( + self, node: Node, call: CallInfo[Any], report: BaseReport + ) -> None: + capman = node.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stdout.write(err) + assert call.excinfo is not None + + if not isinstance(call.excinfo.value, unittest.SkipTest): + _enter_pdb(node, call.excinfo, report) + + def pytest_internalerror(self, excinfo: ExceptionInfo[BaseException]) -> None: + exc_or_tb = _postmortem_exc_or_tb(excinfo) + post_mortem(exc_or_tb) + + +class PdbTrace: + @hookimpl(wrapper=True) + def pytest_pyfunc_call(self, pyfuncitem) -> Generator[None, object, object]: + wrap_pytest_function_for_tracing(pyfuncitem) + return (yield) + + +def wrap_pytest_function_for_tracing(pyfuncitem) -> None: + """Change the Python function object of the given Function item by a + wrapper which actually enters pdb before calling the python function + itself, effectively leaving the user in the pdb prompt in the first + statement of the function.""" + _pdb = pytestPDB._init_pdb("runcall") + testfunction = pyfuncitem.obj + + # we can't just return `partial(pdb.runcall, testfunction)` because (on + # python < 3.7.4) runcall's first param is `func`, which means we'd get + # an exception if one of the kwargs to testfunction was called `func`. + @functools.wraps(testfunction) + def wrapper(*args, **kwargs) -> None: + func = functools.partial(testfunction, *args, **kwargs) + _pdb.runcall(func) + + pyfuncitem.obj = wrapper + + +def maybe_wrap_pytest_function_for_tracing(pyfuncitem) -> None: + """Wrap the given pytestfunct item for tracing support if --trace was given in + the command line.""" + if pyfuncitem.config.getvalue("trace"): + wrap_pytest_function_for_tracing(pyfuncitem) + + +def _enter_pdb( + node: Node, excinfo: ExceptionInfo[BaseException], rep: BaseReport +) -> BaseReport: + # XXX we reuse the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = node.config.pluginmanager.getplugin("terminalreporter")._tw + tw.line() + + showcapture = node.config.option.showcapture + + for sectionname, content in ( + ("stdout", rep.capstdout), + ("stderr", rep.capstderr), + ("log", rep.caplog), + ): + if showcapture in (sectionname, "all") and content: + tw.sep(">", "captured " + sectionname) + if content[-1:] == "\n": + content = content[:-1] + tw.line(content) + + tw.sep(">", "traceback") + rep.toterminal(tw) + tw.sep(">", "entering PDB") + tb_or_exc = _postmortem_exc_or_tb(excinfo) + rep._pdbshown = True # type: ignore[attr-defined] + post_mortem(tb_or_exc) + return rep + + +def _postmortem_exc_or_tb( + excinfo: ExceptionInfo[BaseException], +) -> types.TracebackType | BaseException: + from doctest import UnexpectedException + + get_exc = sys.version_info >= (3, 13) + if isinstance(excinfo.value, UnexpectedException): + # A doctest.UnexpectedException is not useful for post_mortem. + # Use the underlying exception instead: + underlying_exc = excinfo.value + if get_exc: + return underlying_exc.exc_info[1] + + return underlying_exc.exc_info[2] + elif isinstance(excinfo.value, ConftestImportFailure): + # A config.ConftestImportFailure is not useful for post_mortem. + # Use the underlying exception instead: + cause = excinfo.value.cause + if get_exc: + return cause + + assert cause.__traceback__ is not None + return cause.__traceback__ + else: + assert excinfo._excinfo is not None + if get_exc: + return excinfo._excinfo[1] + + return excinfo._excinfo[2] + + +def post_mortem(tb_or_exc: types.TracebackType | BaseException) -> None: + p = pytestPDB._init_pdb("post_mortem") + p.reset() + p.interaction(None, tb_or_exc) + if p.quitting: + outcomes.exit("Quitting debugger") diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/deprecated.py b/Backend/venv/lib/python3.12/site-packages/_pytest/deprecated.py new file mode 100644 index 00000000..cb5d2e93 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/deprecated.py @@ -0,0 +1,99 @@ +"""Deprecation messages and bits of code used elsewhere in the codebase that +is planned to be removed in the next pytest release. + +Keeping it in a central location makes it easy to track what is deprecated and should +be removed when the time comes. + +All constants defined in this module should be either instances of +:class:`PytestWarning`, or :class:`UnformattedWarning` +in case of warnings which need to format their messages. +""" + +from __future__ import annotations + +from warnings import warn + +from _pytest.warning_types import PytestDeprecationWarning +from _pytest.warning_types import PytestRemovedIn9Warning +from _pytest.warning_types import PytestRemovedIn10Warning +from _pytest.warning_types import UnformattedWarning + + +# set of plugins which have been integrated into the core; we use this list to ignore +# them during registration to avoid conflicts +DEPRECATED_EXTERNAL_PLUGINS = { + "pytest_catchlog", + "pytest_capturelog", + "pytest_faulthandler", + "pytest_subtests", +} + + +# This could have been removed pytest 8, but it's harmless and common, so no rush to remove. +YIELD_FIXTURE = PytestDeprecationWarning( + "@pytest.yield_fixture is deprecated.\n" + "Use @pytest.fixture instead; they are the same." +) + +# This deprecation is never really meant to be removed. +PRIVATE = PytestDeprecationWarning("A private pytest class or function was used.") + + +HOOK_LEGACY_PATH_ARG = UnformattedWarning( + PytestRemovedIn9Warning, + "The ({pylib_path_arg}: py.path.local) argument is deprecated, please use ({pathlib_path_arg}: pathlib.Path)\n" + "see https://docs.pytest.org/en/latest/deprecations.html" + "#py-path-local-arguments-for-hooks-replaced-with-pathlib-path", +) + +NODE_CTOR_FSPATH_ARG = UnformattedWarning( + PytestRemovedIn9Warning, + "The (fspath: py.path.local) argument to {node_type_name} is deprecated. " + "Please use the (path: pathlib.Path) argument instead.\n" + "See https://docs.pytest.org/en/latest/deprecations.html" + "#fspath-argument-for-node-constructors-replaced-with-pathlib-path", +) + +HOOK_LEGACY_MARKING = UnformattedWarning( + PytestDeprecationWarning, + "The hook{type} {fullname} uses old-style configuration options (marks or attributes).\n" + "Please use the pytest.hook{type}({hook_opts}) decorator instead\n" + " to configure the hooks.\n" + " See https://docs.pytest.org/en/latest/deprecations.html" + "#configuring-hook-specs-impls-using-markers", +) + +MARKED_FIXTURE = PytestRemovedIn9Warning( + "Marks applied to fixtures have no effect\n" + "See docs: https://docs.pytest.org/en/stable/deprecations.html#applying-a-mark-to-a-fixture-function" +) + +MONKEYPATCH_LEGACY_NAMESPACE_PACKAGES = PytestRemovedIn10Warning( + "monkeypatch.syspath_prepend() called with pkg_resources legacy namespace packages detected.\n" + "Legacy namespace packages (using pkg_resources.declare_namespace) are deprecated.\n" + "Please use native namespace packages (PEP 420) instead.\n" + "See https://docs.pytest.org/en/stable/deprecations.html#monkeypatch-fixup-namespace-packages" +) + +# You want to make some `__init__` or function "private". +# +# def my_private_function(some, args): +# ... +# +# Do this: +# +# def my_private_function(some, args, *, _ispytest: bool = False): +# check_ispytest(_ispytest) +# ... +# +# Change all internal/allowed calls to +# +# my_private_function(some, args, _ispytest=True) +# +# All other calls will get the default _ispytest=False and trigger +# the warning (possibly error in the future). + + +def check_ispytest(ispytest: bool) -> None: + if not ispytest: + warn(PRIVATE, stacklevel=3) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/doctest.py b/Backend/venv/lib/python3.12/site-packages/_pytest/doctest.py new file mode 100644 index 00000000..cd255f5e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/doctest.py @@ -0,0 +1,736 @@ +# mypy: allow-untyped-defs +"""Discover and run doctests in modules and test files.""" + +from __future__ import annotations + +import bdb +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Sequence +from contextlib import contextmanager +import functools +import inspect +import os +from pathlib import Path +import platform +import re +import sys +import traceback +import types +from typing import Any +from typing import TYPE_CHECKING +import warnings + +from _pytest import outcomes +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import safe_getattr +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.fixtures import fixture +from _pytest.fixtures import TopRequest +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.python import Module +from _pytest.python_api import approx +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + import doctest + + from typing_extensions import Self + +DOCTEST_REPORT_CHOICE_NONE = "none" +DOCTEST_REPORT_CHOICE_CDIFF = "cdiff" +DOCTEST_REPORT_CHOICE_NDIFF = "ndiff" +DOCTEST_REPORT_CHOICE_UDIFF = "udiff" +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure" + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +# Lazy definition of runner class +RUNNER_CLASS = None +# Lazy definition of output checker class +CHECKER_CLASS: type[doctest.OutputChecker] | None = None + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "doctest_optionflags", + "Option flags for doctests", + type="args", + default=["ELLIPSIS"], + ) + parser.addini( + "doctest_encoding", "Encoding used for doctest files", default="utf-8" + ) + group = parser.getgroup("collect") + group.addoption( + "--doctest-modules", + action="store_true", + default=False, + help="Run doctests in all .py modules", + dest="doctestmodules", + ) + group.addoption( + "--doctest-report", + type=str.lower, + default="udiff", + help="Choose another output format for diffs on doctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="doctestreport", + ) + group.addoption( + "--doctest-glob", + action="append", + default=[], + metavar="pat", + help="Doctests file matching pattern, default: test*.txt", + dest="doctestglob", + ) + group.addoption( + "--doctest-ignore-import-errors", + action="store_true", + default=False, + help="Ignore doctest collection errors", + dest="doctest_ignore_import_errors", + ) + group.addoption( + "--doctest-continue-on-failure", + action="store_true", + default=False, + help="For a given doctest, continue to run after the first failure", + dest="doctest_continue_on_failure", + ) + + +def pytest_unconfigure() -> None: + global RUNNER_CLASS + + RUNNER_CLASS = None + + +def pytest_collect_file( + file_path: Path, + parent: Collector, +) -> DoctestModule | DoctestTextfile | None: + config = parent.config + if file_path.suffix == ".py": + if config.option.doctestmodules and not any( + (_is_setup_py(file_path), _is_main_py(file_path)) + ): + return DoctestModule.from_parent(parent, path=file_path) + elif _is_doctest(config, file_path, parent): + return DoctestTextfile.from_parent(parent, path=file_path) + return None + + +def _is_setup_py(path: Path) -> bool: + if path.name != "setup.py": + return False + contents = path.read_bytes() + return b"setuptools" in contents or b"distutils" in contents + + +def _is_doctest(config: Config, path: Path, parent: Collector) -> bool: + if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path): + return True + globs = config.getoption("doctestglob") or ["test*.txt"] + return any(fnmatch_ex(glob, path) for glob in globs) + + +def _is_main_py(path: Path) -> bool: + return path.name == "__main__.py" + + +class ReprFailDoctest(TerminalRepr): + def __init__( + self, reprlocation_lines: Sequence[tuple[ReprFileLocation, Sequence[str]]] + ) -> None: + self.reprlocation_lines = reprlocation_lines + + def toterminal(self, tw: TerminalWriter) -> None: + for reprlocation, lines in self.reprlocation_lines: + for line in lines: + tw.line(line) + reprlocation.toterminal(tw) + + +class MultipleDoctestFailures(Exception): + def __init__(self, failures: Sequence[doctest.DocTestFailure]) -> None: + super().__init__() + self.failures = failures + + +def _init_runner_class() -> type[doctest.DocTestRunner]: + import doctest + + class PytestDoctestRunner(doctest.DebugRunner): + """Runner to collect failures. + + Note that the out variable in this case is a list instead of a + stdout-like object. + """ + + def __init__( + self, + checker: doctest.OutputChecker | None = None, + verbose: bool | None = None, + optionflags: int = 0, + continue_on_failure: bool = True, + ) -> None: + super().__init__(checker=checker, verbose=verbose, optionflags=optionflags) + self.continue_on_failure = continue_on_failure + + def report_failure( + self, + out, + test: doctest.DocTest, + example: doctest.Example, + got: str, + ) -> None: + failure = doctest.DocTestFailure(test, example, got) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + def report_unexpected_exception( + self, + out, + test: doctest.DocTest, + example: doctest.Example, + exc_info: tuple[type[BaseException], BaseException, types.TracebackType], + ) -> None: + if isinstance(exc_info[1], OutcomeException): + raise exc_info[1] + if isinstance(exc_info[1], bdb.BdbQuit): + outcomes.exit("Quitting debugger") + failure = doctest.UnexpectedException(test, example, exc_info) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + return PytestDoctestRunner + + +def _get_runner( + checker: doctest.OutputChecker | None = None, + verbose: bool | None = None, + optionflags: int = 0, + continue_on_failure: bool = True, +) -> doctest.DocTestRunner: + # We need this in order to do a lazy import on doctest + global RUNNER_CLASS + if RUNNER_CLASS is None: + RUNNER_CLASS = _init_runner_class() + # Type ignored because the continue_on_failure argument is only defined on + # PytestDoctestRunner, which is lazily defined so can't be used as a type. + return RUNNER_CLASS( # type: ignore + checker=checker, + verbose=verbose, + optionflags=optionflags, + continue_on_failure=continue_on_failure, + ) + + +class DoctestItem(Item): + def __init__( + self, + name: str, + parent: DoctestTextfile | DoctestModule, + runner: doctest.DocTestRunner, + dtest: doctest.DocTest, + ) -> None: + super().__init__(name, parent) + self.runner = runner + self.dtest = dtest + + # Stuff needed for fixture support. + self.obj = None + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(node=self, func=None, cls=None) + self._fixtureinfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + @classmethod + def from_parent( # type: ignore[override] + cls, + parent: DoctestTextfile | DoctestModule, + *, + name: str, + runner: doctest.DocTestRunner, + dtest: doctest.DocTest, + ) -> Self: + # incompatible signature due to imposed limits on subclass + """The public named constructor.""" + return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest) + + def _initrequest(self) -> None: + self.funcargs: dict[str, object] = {} + self._request = TopRequest(self, _ispytest=True) # type: ignore[arg-type] + + def setup(self) -> None: + self._request._fillfixtures() + globs = dict(getfixture=self._request.getfixturevalue) + for name, value in self._request.getfixturevalue("doctest_namespace").items(): + globs[name] = value + self.dtest.globs.update(globs) + + def runtest(self) -> None: + _check_all_skipped(self.dtest) + self._disable_output_capturing_for_darwin() + failures: list[doctest.DocTestFailure] = [] + # Type ignored because we change the type of `out` from what + # doctest expects. + self.runner.run(self.dtest, out=failures) # type: ignore[arg-type] + if failures: + raise MultipleDoctestFailures(failures) + + def _disable_output_capturing_for_darwin(self) -> None: + """Disable output capturing. Otherwise, stdout is lost to doctest (#985).""" + if platform.system() != "Darwin": + return + capman = self.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> str | TerminalRepr: + import doctest + + failures: ( + Sequence[doctest.DocTestFailure | doctest.UnexpectedException] | None + ) = None + if isinstance( + excinfo.value, doctest.DocTestFailure | doctest.UnexpectedException + ): + failures = [excinfo.value] + elif isinstance(excinfo.value, MultipleDoctestFailures): + failures = excinfo.value.failures + + if failures is None: + return super().repr_failure(excinfo) + + reprlocation_lines = [] + for failure in failures: + example = failure.example + test = failure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = type(failure).__name__ + # TODO: ReprFileLocation doesn't expect a None lineno. + reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type] + checker = _get_checker() + report_choice = _get_report_choice(self.config.getoption("doctestreport")) + if lineno is not None: + assert failure.test.docstring is not None + lines = failure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + assert test.lineno is not None + lines = [ + f"{i + test.lineno + 1:03d} {x}" for (i, x) in enumerate(lines) + ] + # trim docstring error lines to 10 + lines = lines[max(example.lineno - 9, 0) : example.lineno + 1] + else: + lines = [ + "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example" + ] + indent = ">>>" + for line in example.source.splitlines(): + lines.append(f"??? {indent} {line}") + indent = "..." + if isinstance(failure, doctest.DocTestFailure): + lines += checker.output_difference( + example, failure.got, report_choice + ).split("\n") + else: + inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info) + lines += [f"UNEXPECTED EXCEPTION: {inner_excinfo.value!r}"] + lines += [ + x.strip("\n") for x in traceback.format_exception(*failure.exc_info) + ] + reprlocation_lines.append((reprlocation, lines)) + return ReprFailDoctest(reprlocation_lines) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + return self.path, self.dtest.lineno, f"[doctest] {self.name}" + + +def _get_flag_lookup() -> dict[str, int]: + import doctest + + return dict( + DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + NUMBER=_get_number_flag(), + ) + + +def get_optionflags(config: Config) -> int: + optionflags_str = config.getini("doctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + + +def _get_continue_on_failure(config: Config) -> bool: + continue_on_failure: bool = config.getvalue("doctest_continue_on_failure") + if continue_on_failure: + # We need to turn off this if we use pdb since we should stop at + # the first failure. + if config.getvalue("usepdb"): + continue_on_failure = False + return continue_on_failure + + +class DoctestTextfile(Module): + obj = None + + def collect(self) -> Iterable[DoctestItem]: + import doctest + + # Inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker. + encoding = self.config.getini("doctest_encoding") + text = self.path.read_text(encoding) + filename = str(self.path) + name = self.path.name + globs = {"__name__": "__main__"} + + optionflags = get_optionflags(self.config) + + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + parser = doctest.DocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _check_all_skipped(test: doctest.DocTest) -> None: + """Raise pytest.skip() if all examples in the given DocTest have the SKIP + option set.""" + import doctest + + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + skip("all tests skipped by +SKIP option") + + +def _is_mocked(obj: object) -> bool: + """Return if an object is possibly a mock object by checking the + existence of a highly improbable attribute.""" + return ( + safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None) + is not None + ) + + +@contextmanager +def _patch_unwrap_mock_aware() -> Generator[None]: + """Context manager which replaces ``inspect.unwrap`` with a version + that's aware of mock objects and doesn't recurse into them.""" + real_unwrap = inspect.unwrap + + def _mock_aware_unwrap( + func: Callable[..., Any], *, stop: Callable[[Any], Any] | None = None + ) -> Any: + try: + if stop is None or stop is _is_mocked: + return real_unwrap(func, stop=_is_mocked) + _stop = stop + return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func)) + except Exception as e: + warnings.warn( + f"Got {e!r} when unwrapping {func!r}. This is usually caused " + "by a violation of Python's object protocol; see e.g. " + "https://github.com/pytest-dev/pytest/issues/5080", + PytestWarning, + ) + raise + + inspect.unwrap = _mock_aware_unwrap + try: + yield + finally: + inspect.unwrap = real_unwrap + + +class DoctestModule(Module): + def collect(self) -> Iterable[DoctestItem]: + import doctest + + class MockAwareDocTestFinder(doctest.DocTestFinder): + py_ver_info_minor = sys.version_info[:2] + is_find_lineno_broken = ( + py_ver_info_minor < (3, 11) + or (py_ver_info_minor == (3, 11) and sys.version_info.micro < 9) + or (py_ver_info_minor == (3, 12) and sys.version_info.micro < 3) + ) + if is_find_lineno_broken: + + def _find_lineno(self, obj, source_lines): + """On older Pythons, doctest code does not take into account + `@property`. https://github.com/python/cpython/issues/61648 + + Moreover, wrapped Doctests need to be unwrapped so the correct + line number is returned. #8796 + """ + if isinstance(obj, property): + obj = getattr(obj, "fget", obj) + + if hasattr(obj, "__wrapped__"): + # Get the main obj in case of it being wrapped + obj = inspect.unwrap(obj) + + # Type ignored because this is a private function. + return super()._find_lineno( # type:ignore[misc] + obj, + source_lines, + ) + + if sys.version_info < (3, 13): + + def _from_module(self, module, object): + """`cached_property` objects are never considered a part + of the 'current module'. As such they are skipped by doctest. + Here we override `_from_module` to check the underlying + function instead. https://github.com/python/cpython/issues/107995 + """ + if isinstance(object, functools.cached_property): + object = object.func + + # Type ignored because this is a private function. + return super()._from_module(module, object) # type: ignore[misc] + + try: + module = self.obj + except Collector.CollectError: + if self.config.getvalue("doctest_ignore_import_errors"): + skip(f"unable to import module {self.path!r}") + else: + raise + + # While doctests currently don't support fixtures directly, we still + # need to pick up autouse fixtures. + self.session._fixturemanager.parsefactories(self) + + # Uses internal doctest module parsing mechanism. + finder = MockAwareDocTestFinder() + optionflags = get_optionflags(self.config) + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + for test in finder.find(module, module.__name__): + if test.examples: # skip empty doctests + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _init_checker_class() -> type[doctest.OutputChecker]: + import doctest + + class LiteralsOutputChecker(doctest.OutputChecker): + # Based on doctest_nose_plugin.py from the nltk project + # (https://github.com/nltk/nltk) and on the "numtest" doctest extension + # by Sebastien Boisgerault (https://github.com/boisgera/numtest). + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + _number_re = re.compile( + r""" + (?P + (?P + (?P [+-]?\d*)\.(?P\d+) + | + (?P [+-]?\d+)\. + ) + (?: + [Ee] + (?P [+-]?\d+) + )? + | + (?P [+-]?\d+) + (?: + [Ee] + (?P [+-]?\d+) + ) + ) + """, + re.VERBOSE, + ) + + def check_output(self, want: str, got: str, optionflags: int) -> bool: + if super().check_output(want, got, optionflags): + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + allow_number = optionflags & _get_number_flag() + + if not allow_unicode and not allow_bytes and not allow_number: + return False + + def remove_prefixes(regex: re.Pattern[str], txt: str) -> str: + return re.sub(regex, r"\1\2", txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + + if allow_number: + got = self._remove_unwanted_precision(want, got) + + return super().check_output(want, got, optionflags) + + def _remove_unwanted_precision(self, want: str, got: str) -> str: + wants = list(self._number_re.finditer(want)) + gots = list(self._number_re.finditer(got)) + if len(wants) != len(gots): + return got + offset = 0 + for w, g in zip(wants, gots, strict=True): + fraction: str | None = w.group("fraction") + exponent: str | None = w.group("exponent1") + if exponent is None: + exponent = w.group("exponent2") + precision = 0 if fraction is None else len(fraction) + if exponent is not None: + precision -= int(exponent) + if float(w.group()) == approx(float(g.group()), abs=10**-precision): + # They're close enough. Replace the text we actually + # got with the text we want, so that it will match when we + # check the string literally. + got = ( + got[: g.start() + offset] + w.group() + got[g.end() + offset :] + ) + offset += w.end() - w.start() - (g.end() - g.start()) + return got + + return LiteralsOutputChecker + + +def _get_checker() -> doctest.OutputChecker: + """Return a doctest.OutputChecker subclass that supports some + additional options: + + * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b'' + prefixes (respectively) in string literals. Useful when the same + doctest should run in Python 2 and Python 3. + + * NUMBER to ignore floating-point differences smaller than the + precision of the literal number in the doctest. + + An inner class is used to avoid importing "doctest" at the module + level. + """ + global CHECKER_CLASS + if CHECKER_CLASS is None: + CHECKER_CLASS = _init_checker_class() + return CHECKER_CLASS() + + +def _get_allow_unicode_flag() -> int: + """Register and return the ALLOW_UNICODE flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_UNICODE") + + +def _get_allow_bytes_flag() -> int: + """Register and return the ALLOW_BYTES flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_BYTES") + + +def _get_number_flag() -> int: + """Register and return the NUMBER flag.""" + import doctest + + return doctest.register_optionflag("NUMBER") + + +def _get_report_choice(key: str) -> int: + """Return the actual `doctest` module flag value. + + We want to do it as late as possible to avoid importing `doctest` and all + its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + + +@fixture(scope="session") +def doctest_namespace() -> dict[str, Any]: + """Fixture that returns a :py:class:`dict` that will be injected into the + namespace of doctests. + + Usually this fixture is used in conjunction with another ``autouse`` fixture: + + .. code-block:: python + + @pytest.fixture(autouse=True) + def add_np(doctest_namespace): + doctest_namespace["np"] = numpy + + For more details: :ref:`doctest_namespace`. + """ + return dict() diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/faulthandler.py b/Backend/venv/lib/python3.12/site-packages/_pytest/faulthandler.py new file mode 100644 index 00000000..080cf583 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/faulthandler.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +from collections.abc import Generator +import os +import sys + +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.stash import StashKey +import pytest + + +fault_handler_original_stderr_fd_key = StashKey[int]() +fault_handler_stderr_fd_key = StashKey[int]() + + +def pytest_addoption(parser: Parser) -> None: + help_timeout = ( + "Dump the traceback of all threads if a test takes " + "more than TIMEOUT seconds to finish" + ) + help_exit_on_timeout = ( + "Exit the test process if a test takes more than " + "faulthandler_timeout seconds to finish" + ) + parser.addini("faulthandler_timeout", help_timeout, default=0.0) + parser.addini( + "faulthandler_exit_on_timeout", help_exit_on_timeout, type="bool", default=False + ) + + +def pytest_configure(config: Config) -> None: + import faulthandler + + # at teardown we want to restore the original faulthandler fileno + # but faulthandler has no api to return the original fileno + # so here we stash the stderr fileno to be used at teardown + # sys.stderr and sys.__stderr__ may be closed or patched during the session + # so we can't rely on their values being good at that point (#11572). + stderr_fileno = get_stderr_fileno() + if faulthandler.is_enabled(): + config.stash[fault_handler_original_stderr_fd_key] = stderr_fileno + config.stash[fault_handler_stderr_fd_key] = os.dup(stderr_fileno) + faulthandler.enable(file=config.stash[fault_handler_stderr_fd_key]) + + +def pytest_unconfigure(config: Config) -> None: + import faulthandler + + faulthandler.disable() + # Close the dup file installed during pytest_configure. + if fault_handler_stderr_fd_key in config.stash: + os.close(config.stash[fault_handler_stderr_fd_key]) + del config.stash[fault_handler_stderr_fd_key] + # Re-enable the faulthandler if it was originally enabled. + if fault_handler_original_stderr_fd_key in config.stash: + faulthandler.enable(config.stash[fault_handler_original_stderr_fd_key]) + del config.stash[fault_handler_original_stderr_fd_key] + + +def get_stderr_fileno() -> int: + try: + fileno = sys.stderr.fileno() + # The Twisted Logger will return an invalid file descriptor since it is not backed + # by an FD. So, let's also forward this to the same code path as with pytest-xdist. + if fileno == -1: + raise AttributeError() + return fileno + except (AttributeError, ValueError): + # pytest-xdist monkeypatches sys.stderr with an object that is not an actual file. + # https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors + # This is potentially dangerous, but the best we can do. + assert sys.__stderr__ is not None + return sys.__stderr__.fileno() + + +def get_timeout_config_value(config: Config) -> float: + return float(config.getini("faulthandler_timeout") or 0.0) + + +def get_exit_on_timeout_config_value(config: Config) -> bool: + exit_on_timeout = config.getini("faulthandler_exit_on_timeout") + assert isinstance(exit_on_timeout, bool) + return exit_on_timeout + + +@pytest.hookimpl(wrapper=True, trylast=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + timeout = get_timeout_config_value(item.config) + exit_on_timeout = get_exit_on_timeout_config_value(item.config) + if timeout > 0: + import faulthandler + + stderr = item.config.stash[fault_handler_stderr_fd_key] + faulthandler.dump_traceback_later(timeout, file=stderr, exit=exit_on_timeout) + try: + return (yield) + finally: + faulthandler.cancel_dump_traceback_later() + else: + return (yield) + + +@pytest.hookimpl(tryfirst=True) +def pytest_enter_pdb() -> None: + """Cancel any traceback dumping due to timeout before entering pdb.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() + + +@pytest.hookimpl(tryfirst=True) +def pytest_exception_interact() -> None: + """Cancel any traceback dumping due to an interactive exception being + raised.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/fixtures.py b/Backend/venv/lib/python3.12/site-packages/_pytest/fixtures.py new file mode 100644 index 00000000..27846db1 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/fixtures.py @@ -0,0 +1,2047 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import abc +from collections import defaultdict +from collections import deque +from collections import OrderedDict +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import MutableMapping +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import dataclasses +import functools +import inspect +import os +from pathlib import Path +import sys +import types +from typing import Any +from typing import cast +from typing import Final +from typing import final +from typing import Generic +from typing import NoReturn +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +import warnings + +import _pytest +from _pytest import nodes +from _pytest._code import getfslineno +from _pytest._code import Source +from _pytest._code.code import FormattedExcinfo +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import assert_never +from _pytest.compat import get_real_func +from _pytest.compat import getfuncargnames +from _pytest.compat import getimfunc +from _pytest.compat import getlocation +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.compat import signature +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import MARKED_FIXTURE +from _pytest.deprecated import YIELD_FIXTURE +from _pytest.main import Session +from _pytest.mark import Mark +from _pytest.mark import ParameterSet +from _pytest.mark.structures import MarkDecorator +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import TEST_OUTCOME +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.scope import _ScopeName +from _pytest.scope import HIGH_SCOPES +from _pytest.scope import Scope +from _pytest.warning_types import PytestRemovedIn9Warning +from _pytest.warning_types import PytestWarning + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +if TYPE_CHECKING: + from _pytest.python import CallSpec2 + from _pytest.python import Function + from _pytest.python import Metafunc + + +# The value of the fixture -- return/yield of the fixture function (type variable). +FixtureValue = TypeVar("FixtureValue", covariant=True) +# The type of the fixture function (type variable). +FixtureFunction = TypeVar("FixtureFunction", bound=Callable[..., object]) +# The type of a fixture function (type alias generic in fixture value). +_FixtureFunc = Callable[..., FixtureValue] | Callable[..., Generator[FixtureValue]] +# The type of FixtureDef.cached_result (type alias generic in fixture value). +_FixtureCachedResult = ( + tuple[ + # The result. + FixtureValue, + # Cache key. + object, + None, + ] + | tuple[ + None, + # Cache key. + object, + # The exception and the original traceback. + tuple[BaseException, types.TracebackType | None], + ] +) + + +def pytest_sessionstart(session: Session) -> None: + session._fixturemanager = FixtureManager(session) + + +def get_scope_package( + node: nodes.Item, + fixturedef: FixtureDef[object], +) -> nodes.Node | None: + from _pytest.python import Package + + for parent in node.iter_parents(): + if isinstance(parent, Package) and parent.nodeid == fixturedef.baseid: + return parent + return node.session + + +def get_scope_node(node: nodes.Node, scope: Scope) -> nodes.Node | None: + """Get the closest parent node (including self) which matches the given + scope. + + If there is no parent node for the scope (e.g. asking for class scope on a + Module, or on a Function when not defined in a class), returns None. + """ + import _pytest.python + + if scope is Scope.Function: + # Type ignored because this is actually safe, see: + # https://github.com/python/mypy/issues/4717 + return node.getparent(nodes.Item) # type: ignore[type-abstract] + elif scope is Scope.Class: + return node.getparent(_pytest.python.Class) + elif scope is Scope.Module: + return node.getparent(_pytest.python.Module) + elif scope is Scope.Package: + return node.getparent(_pytest.python.Package) + elif scope is Scope.Session: + return node.getparent(_pytest.main.Session) + else: + assert_never(scope) + + +# TODO: Try to use FixtureFunctionDefinition instead of the marker +def getfixturemarker(obj: object) -> FixtureFunctionMarker | None: + """Return fixturemarker or None if it doesn't exist""" + if isinstance(obj, FixtureFunctionDefinition): + return obj._fixture_function_marker + return None + + +# Algorithm for sorting on a per-parametrized resource setup basis. +# It is called for Session scope first and performs sorting +# down to the lower scopes such as to minimize number of "high scope" +# setups and teardowns. + + +@dataclasses.dataclass(frozen=True) +class ParamArgKey: + """A key for a high-scoped parameter used by an item. + + For use as a hashable key in `reorder_items`. The combination of fields + is meant to uniquely identify a particular "instance" of a param, + potentially shared by multiple items in a scope. + """ + + #: The param name. + argname: str + param_index: int + #: For scopes Package, Module, Class, the path to the file (directory in + #: Package's case) of the package/module/class where the item is defined. + scoped_item_path: Path | None + #: For Class scope, the class where the item is defined. + item_cls: type | None + + +_V = TypeVar("_V") +OrderedSet = dict[_V, None] + + +def get_param_argkeys(item: nodes.Item, scope: Scope) -> Iterator[ParamArgKey]: + """Return all ParamArgKeys for item matching the specified high scope.""" + assert scope is not Scope.Function + + try: + callspec: CallSpec2 = item.callspec # type: ignore[attr-defined] + except AttributeError: + return + + item_cls = None + if scope is Scope.Session: + scoped_item_path = None + elif scope is Scope.Package: + # Package key = module's directory. + scoped_item_path = item.path.parent + elif scope is Scope.Module: + scoped_item_path = item.path + elif scope is Scope.Class: + scoped_item_path = item.path + item_cls = item.cls # type: ignore[attr-defined] + else: + assert_never(scope) + + for argname in callspec.indices: + if callspec._arg2scope[argname] != scope: + continue + param_index = callspec.indices[argname] + yield ParamArgKey(argname, param_index, scoped_item_path, item_cls) + + +def reorder_items(items: Sequence[nodes.Item]) -> list[nodes.Item]: + argkeys_by_item: dict[Scope, dict[nodes.Item, OrderedSet[ParamArgKey]]] = {} + items_by_argkey: dict[Scope, dict[ParamArgKey, OrderedDict[nodes.Item, None]]] = {} + for scope in HIGH_SCOPES: + scoped_argkeys_by_item = argkeys_by_item[scope] = {} + scoped_items_by_argkey = items_by_argkey[scope] = defaultdict(OrderedDict) + for item in items: + argkeys = dict.fromkeys(get_param_argkeys(item, scope)) + if argkeys: + scoped_argkeys_by_item[item] = argkeys + for argkey in argkeys: + scoped_items_by_argkey[argkey][item] = None + + items_set = dict.fromkeys(items) + return list( + reorder_items_atscope( + items_set, argkeys_by_item, items_by_argkey, Scope.Session + ) + ) + + +def reorder_items_atscope( + items: OrderedSet[nodes.Item], + argkeys_by_item: Mapping[Scope, Mapping[nodes.Item, OrderedSet[ParamArgKey]]], + items_by_argkey: Mapping[ + Scope, Mapping[ParamArgKey, OrderedDict[nodes.Item, None]] + ], + scope: Scope, +) -> OrderedSet[nodes.Item]: + if scope is Scope.Function or len(items) < 3: + return items + + scoped_items_by_argkey = items_by_argkey[scope] + scoped_argkeys_by_item = argkeys_by_item[scope] + + ignore: set[ParamArgKey] = set() + items_deque = deque(items) + items_done: OrderedSet[nodes.Item] = {} + while items_deque: + no_argkey_items: OrderedSet[nodes.Item] = {} + slicing_argkey = None + while items_deque: + item = items_deque.popleft() + if item in items_done or item in no_argkey_items: + continue + argkeys = dict.fromkeys( + k for k in scoped_argkeys_by_item.get(item, ()) if k not in ignore + ) + if not argkeys: + no_argkey_items[item] = None + else: + slicing_argkey, _ = argkeys.popitem() + # We don't have to remove relevant items from later in the + # deque because they'll just be ignored. + matching_items = [ + i for i in scoped_items_by_argkey[slicing_argkey] if i in items + ] + for i in reversed(matching_items): + items_deque.appendleft(i) + # Fix items_by_argkey order. + for other_scope in HIGH_SCOPES: + other_scoped_items_by_argkey = items_by_argkey[other_scope] + for argkey in argkeys_by_item[other_scope].get(i, ()): + argkey_dict = other_scoped_items_by_argkey[argkey] + if not hasattr(sys, "pypy_version_info"): + argkey_dict[i] = None + argkey_dict.move_to_end(i, last=False) + else: + # Work around a bug in PyPy: + # https://github.com/pypy/pypy/issues/5257 + # https://github.com/pytest-dev/pytest/issues/13312 + bkp = argkey_dict.copy() + argkey_dict.clear() + argkey_dict[i] = None + argkey_dict.update(bkp) + break + if no_argkey_items: + reordered_no_argkey_items = reorder_items_atscope( + no_argkey_items, argkeys_by_item, items_by_argkey, scope.next_lower() + ) + items_done.update(reordered_no_argkey_items) + if slicing_argkey is not None: + ignore.add(slicing_argkey) + return items_done + + +@dataclasses.dataclass(frozen=True) +class FuncFixtureInfo: + """Fixture-related information for a fixture-requesting item (e.g. test + function). + + This is used to examine the fixtures which an item requests statically + (known during collection). This includes autouse fixtures, fixtures + requested by the `usefixtures` marker, fixtures requested in the function + parameters, and the transitive closure of these. + + An item may also request fixtures dynamically (using `request.getfixturevalue`); + these are not reflected here. + """ + + __slots__ = ("argnames", "initialnames", "name2fixturedefs", "names_closure") + + # Fixture names that the item requests directly by function parameters. + argnames: tuple[str, ...] + # Fixture names that the item immediately requires. These include + # argnames + fixture names specified via usefixtures and via autouse=True in + # fixture definitions. + initialnames: tuple[str, ...] + # The transitive closure of the fixture names that the item requires. + # Note: can't include dynamic dependencies (`request.getfixturevalue` calls). + names_closure: list[str] + # A map from a fixture name in the transitive closure to the FixtureDefs + # matching the name which are applicable to this function. + # There may be multiple overriding fixtures with the same name. The + # sequence is ordered from furthest to closes to the function. + name2fixturedefs: dict[str, Sequence[FixtureDef[Any]]] + + def prune_dependency_tree(self) -> None: + """Recompute names_closure from initialnames and name2fixturedefs. + + Can only reduce names_closure, which means that the new closure will + always be a subset of the old one. The order is preserved. + + This method is needed because direct parametrization may shadow some + of the fixtures that were included in the originally built dependency + tree. In this way the dependency tree can get pruned, and the closure + of argnames may get reduced. + """ + closure: set[str] = set() + working_set = set(self.initialnames) + while working_set: + argname = working_set.pop() + # Argname may be something not included in the original names_closure, + # in which case we ignore it. This currently happens with pseudo + # FixtureDefs which wrap 'get_direct_param_fixture_func(request)'. + # So they introduce the new dependency 'request' which might have + # been missing in the original tree (closure). + if argname not in closure and argname in self.names_closure: + closure.add(argname) + if argname in self.name2fixturedefs: + working_set.update(self.name2fixturedefs[argname][-1].argnames) + + self.names_closure[:] = sorted(closure, key=self.names_closure.index) + + +class FixtureRequest(abc.ABC): + """The type of the ``request`` fixture. + + A request object gives access to the requesting test context and has a + ``param`` attribute in case the fixture is parametrized. + """ + + def __init__( + self, + pyfuncitem: Function, + fixturename: str | None, + arg2fixturedefs: dict[str, Sequence[FixtureDef[Any]]], + fixture_defs: dict[str, FixtureDef[Any]], + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + #: Fixture for which this request is being performed. + self.fixturename: Final = fixturename + self._pyfuncitem: Final = pyfuncitem + # The FixtureDefs for each fixture name requested by this item. + # Starts from the statically-known fixturedefs resolved during + # collection. Dynamically requested fixtures (using + # `request.getfixturevalue("foo")`) are added dynamically. + self._arg2fixturedefs: Final = arg2fixturedefs + # The evaluated argnames so far, mapping to the FixtureDef they resolved + # to. + self._fixture_defs: Final = fixture_defs + # Notes on the type of `param`: + # -`request.param` is only defined in parametrized fixtures, and will raise + # AttributeError otherwise. Python typing has no notion of "undefined", so + # this cannot be reflected in the type. + # - Technically `param` is only (possibly) defined on SubRequest, not + # FixtureRequest, but the typing of that is still in flux so this cheats. + # - In the future we might consider using a generic for the param type, but + # for now just using Any. + self.param: Any + + @property + def _fixturemanager(self) -> FixtureManager: + return self._pyfuncitem.session._fixturemanager + + @property + @abc.abstractmethod + def _scope(self) -> Scope: + raise NotImplementedError() + + @property + def scope(self) -> _ScopeName: + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + @abc.abstractmethod + def _check_scope( + self, + requested_fixturedef: FixtureDef[object], + requested_scope: Scope, + ) -> None: + raise NotImplementedError() + + @property + def fixturenames(self) -> list[str]: + """Names of all active fixtures in this request.""" + result = list(self._pyfuncitem.fixturenames) + result.extend(set(self._fixture_defs).difference(result)) + return result + + @property + @abc.abstractmethod + def node(self): + """Underlying collection node (depends on current request scope).""" + raise NotImplementedError() + + @property + def config(self) -> Config: + """The pytest config object associated with this request.""" + return self._pyfuncitem.config + + @property + def function(self): + """Test function object if the request has a per-function scope.""" + if self.scope != "function": + raise AttributeError( + f"function not available in {self.scope}-scoped context" + ) + return self._pyfuncitem.obj + + @property + def cls(self): + """Class (can be None) where the test function was collected.""" + if self.scope not in ("class", "function"): + raise AttributeError(f"cls not available in {self.scope}-scoped context") + clscol = self._pyfuncitem.getparent(_pytest.python.Class) + if clscol: + return clscol.obj + + @property + def instance(self): + """Instance (can be None) on which test function was collected.""" + if self.scope != "function": + return None + return getattr(self._pyfuncitem, "instance", None) + + @property + def module(self): + """Python module object where the test function was collected.""" + if self.scope not in ("function", "class", "module"): + raise AttributeError(f"module not available in {self.scope}-scoped context") + mod = self._pyfuncitem.getparent(_pytest.python.Module) + assert mod is not None + return mod.obj + + @property + def path(self) -> Path: + """Path where the test function was collected.""" + if self.scope not in ("function", "class", "module", "package"): + raise AttributeError(f"path not available in {self.scope}-scoped context") + return self._pyfuncitem.path + + @property + def keywords(self) -> MutableMapping[str, Any]: + """Keywords/markers dictionary for the underlying node.""" + node: nodes.Node = self.node + return node.keywords + + @property + def session(self) -> Session: + """Pytest session object.""" + return self._pyfuncitem.session + + @abc.abstractmethod + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + """Add finalizer/teardown function to be called without arguments after + the last test within the requesting test context finished execution.""" + raise NotImplementedError() + + def applymarker(self, marker: str | MarkDecorator) -> None: + """Apply a marker to a single test function invocation. + + This method is useful if you don't want to have a keyword/marker + on all function invocations. + + :param marker: + An object created by a call to ``pytest.mark.NAME(...)``. + """ + self.node.add_marker(marker) + + def raiseerror(self, msg: str | None) -> NoReturn: + """Raise a FixtureLookupError exception. + + :param msg: + An optional custom error message. + """ + raise FixtureLookupError(None, self, msg) + + def getfixturevalue(self, argname: str) -> Any: + """Dynamically run a named fixture function. + + Declaring fixtures via function argument is recommended where possible. + But if you can only decide whether to use another fixture at test + setup time, you may use this function to retrieve it inside a fixture + or test function body. + + This method can be used during the test setup phase or the test run + phase, but during the test teardown phase a fixture's value may not + be available. + + :param argname: + The fixture name. + :raises pytest.FixtureLookupError: + If the given fixture could not be found. + """ + # Note that in addition to the use case described in the docstring, + # getfixturevalue() is also called by pytest itself during item and fixture + # setup to evaluate the fixtures that are requested statically + # (using function parameters, autouse, etc). + + fixturedef = self._get_active_fixturedef(argname) + assert fixturedef.cached_result is not None, ( + f'The fixture value for "{argname}" is not available. ' + "This can happen when the fixture has already been torn down." + ) + return fixturedef.cached_result[0] + + def _iter_chain(self) -> Iterator[SubRequest]: + """Yield all SubRequests in the chain, from self up. + + Note: does *not* yield the TopRequest. + """ + current = self + while isinstance(current, SubRequest): + yield current + current = current._parent_request + + def _get_active_fixturedef(self, argname: str) -> FixtureDef[object]: + if argname == "request": + return RequestFixtureDef(self) + + # If we already finished computing a fixture by this name in this item, + # return it. + fixturedef = self._fixture_defs.get(argname) + if fixturedef is not None: + self._check_scope(fixturedef, fixturedef._scope) + return fixturedef + + # Find the appropriate fixturedef. + fixturedefs = self._arg2fixturedefs.get(argname, None) + if fixturedefs is None: + # We arrive here because of a dynamic call to + # getfixturevalue(argname) which was naturally + # not known at parsing/collection time. + fixturedefs = self._fixturemanager.getfixturedefs(argname, self._pyfuncitem) + if fixturedefs is not None: + self._arg2fixturedefs[argname] = fixturedefs + # No fixtures defined with this name. + if fixturedefs is None: + raise FixtureLookupError(argname, self) + # The are no fixtures with this name applicable for the function. + if not fixturedefs: + raise FixtureLookupError(argname, self) + + # A fixture may override another fixture with the same name, e.g. a + # fixture in a module can override a fixture in a conftest, a fixture in + # a class can override a fixture in the module, and so on. + # An overriding fixture can request its own name (possibly indirectly); + # in this case it gets the value of the fixture it overrides, one level + # up. + # Check how many `argname`s deep we are, and take the next one. + # `fixturedefs` is sorted from furthest to closest, so use negative + # indexing to go in reverse. + index = -1 + for request in self._iter_chain(): + if request.fixturename == argname: + index -= 1 + # If already consumed all of the available levels, fail. + if -index > len(fixturedefs): + raise FixtureLookupError(argname, self) + fixturedef = fixturedefs[index] + + # Prepare a SubRequest object for calling the fixture. + try: + callspec = self._pyfuncitem.callspec + except AttributeError: + callspec = None + if callspec is not None and argname in callspec.params: + param = callspec.params[argname] + param_index = callspec.indices[argname] + # The parametrize invocation scope overrides the fixture's scope. + scope = callspec._arg2scope[argname] + else: + param = NOTSET + param_index = 0 + scope = fixturedef._scope + self._check_fixturedef_without_param(fixturedef) + # The parametrize invocation scope only controls caching behavior while + # allowing wider-scoped fixtures to keep depending on the parametrized + # fixture. Scope control is enforced for parametrized fixtures + # by recreating the whole fixture tree on parameter change. + # Hence `fixturedef._scope`, not `scope`. + self._check_scope(fixturedef, fixturedef._scope) + subrequest = SubRequest( + self, scope, param, param_index, fixturedef, _ispytest=True + ) + + # Make sure the fixture value is cached, running it if it isn't + fixturedef.execute(request=subrequest) + + self._fixture_defs[argname] = fixturedef + return fixturedef + + def _check_fixturedef_without_param(self, fixturedef: FixtureDef[object]) -> None: + """Check that this request is allowed to execute this fixturedef without + a param.""" + funcitem = self._pyfuncitem + has_params = fixturedef.params is not None + fixtures_not_supported = getattr(funcitem, "nofuncargs", False) + if has_params and fixtures_not_supported: + msg = ( + f"{funcitem.name} does not support fixtures, maybe unittest.TestCase subclass?\n" + f"Node id: {funcitem.nodeid}\n" + f"Function type: {type(funcitem).__name__}" + ) + fail(msg, pytrace=False) + if has_params: + frame = inspect.stack()[3] + frameinfo = inspect.getframeinfo(frame[0]) + source_path = absolutepath(frameinfo.filename) + source_lineno = frameinfo.lineno + try: + source_path_str = str(source_path.relative_to(funcitem.config.rootpath)) + except ValueError: + source_path_str = str(source_path) + location = getlocation(fixturedef.func, funcitem.config.rootpath) + msg = ( + "The requested fixture has no parameter defined for test:\n" + f" {funcitem.nodeid}\n\n" + f"Requested fixture '{fixturedef.argname}' defined in:\n" + f"{location}\n\n" + f"Requested here:\n" + f"{source_path_str}:{source_lineno}" + ) + fail(msg, pytrace=False) + + def _get_fixturestack(self) -> list[FixtureDef[Any]]: + values = [request._fixturedef for request in self._iter_chain()] + values.reverse() + return values + + +@final +class TopRequest(FixtureRequest): + """The type of the ``request`` fixture in a test function.""" + + def __init__(self, pyfuncitem: Function, *, _ispytest: bool = False) -> None: + super().__init__( + fixturename=None, + pyfuncitem=pyfuncitem, + arg2fixturedefs=pyfuncitem._fixtureinfo.name2fixturedefs.copy(), + fixture_defs={}, + _ispytest=_ispytest, + ) + + @property + def _scope(self) -> Scope: + return Scope.Function + + def _check_scope( + self, + requested_fixturedef: FixtureDef[object], + requested_scope: Scope, + ) -> None: + # TopRequest always has function scope so always valid. + pass + + @property + def node(self): + return self._pyfuncitem + + def __repr__(self) -> str: + return f"" + + def _fillfixtures(self) -> None: + item = self._pyfuncitem + for argname in item.fixturenames: + if argname not in item.funcargs: + item.funcargs[argname] = self.getfixturevalue(argname) + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self.node.addfinalizer(finalizer) + + +@final +class SubRequest(FixtureRequest): + """The type of the ``request`` fixture in a fixture function requested + (transitively) by a test function.""" + + def __init__( + self, + request: FixtureRequest, + scope: Scope, + param: Any, + param_index: int, + fixturedef: FixtureDef[object], + *, + _ispytest: bool = False, + ) -> None: + super().__init__( + pyfuncitem=request._pyfuncitem, + fixturename=fixturedef.argname, + fixture_defs=request._fixture_defs, + arg2fixturedefs=request._arg2fixturedefs, + _ispytest=_ispytest, + ) + self._parent_request: Final[FixtureRequest] = request + self._scope_field: Final = scope + self._fixturedef: Final[FixtureDef[object]] = fixturedef + if param is not NOTSET: + self.param = param + self.param_index: Final = param_index + + def __repr__(self) -> str: + return f"" + + @property + def _scope(self) -> Scope: + return self._scope_field + + @property + def node(self): + scope = self._scope + if scope is Scope.Function: + # This might also be a non-function Item despite its attribute name. + node: nodes.Node | None = self._pyfuncitem + elif scope is Scope.Package: + node = get_scope_package(self._pyfuncitem, self._fixturedef) + else: + node = get_scope_node(self._pyfuncitem, scope) + if node is None and scope is Scope.Class: + # Fallback to function item itself. + node = self._pyfuncitem + assert node, ( + f'Could not obtain a node for scope "{scope}" for function {self._pyfuncitem!r}' + ) + return node + + def _check_scope( + self, + requested_fixturedef: FixtureDef[object], + requested_scope: Scope, + ) -> None: + if self._scope > requested_scope: + # Try to report something helpful. + argname = requested_fixturedef.argname + fixture_stack = "\n".join( + self._format_fixturedef_line(fixturedef) + for fixturedef in self._get_fixturestack() + ) + requested_fixture = self._format_fixturedef_line(requested_fixturedef) + fail( + f"ScopeMismatch: You tried to access the {requested_scope.value} scoped " + f"fixture {argname} with a {self._scope.value} scoped request object. " + f"Requesting fixture stack:\n{fixture_stack}\n" + f"Requested fixture:\n{requested_fixture}", + pytrace=False, + ) + + def _format_fixturedef_line(self, fixturedef: FixtureDef[object]) -> str: + factory = fixturedef.func + path, lineno = getfslineno(factory) + if isinstance(path, Path): + path = bestrelpath(self._pyfuncitem.session.path, path) + sig = signature(factory) + return f"{path}:{lineno + 1}: def {factory.__name__}{sig}" + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._fixturedef.addfinalizer(finalizer) + + +@final +class FixtureLookupError(LookupError): + """Could not return a requested fixture (missing or invalid).""" + + def __init__( + self, argname: str | None, request: FixtureRequest, msg: str | None = None + ) -> None: + self.argname = argname + self.request = request + self.fixturestack = request._get_fixturestack() + self.msg = msg + + def formatrepr(self) -> FixtureLookupErrorRepr: + tblines: list[str] = [] + addline = tblines.append + stack = [self.request._pyfuncitem.obj] + stack.extend(map(lambda x: x.func, self.fixturestack)) + msg = self.msg + # This function currently makes an assumption that a non-None msg means we + # have a non-empty `self.fixturestack`. This is currently true, but if + # somebody at some point want to extend the use of FixtureLookupError to + # new cases it might break. + # Add the assert to make it clearer to developer that this will fail, otherwise + # it crashes because `fspath` does not get set due to `stack` being empty. + assert self.msg is None or self.fixturestack, ( + "formatrepr assumptions broken, rewrite it to handle it" + ) + if msg is not None: + # The last fixture raise an error, let's present + # it at the requesting side. + stack = stack[:-1] + for function in stack: + fspath, lineno = getfslineno(function) + try: + lines, _ = inspect.getsourcelines(get_real_func(function)) + except (OSError, IndexError, TypeError): + error_msg = "file %s, line %s: source code not available" + addline(error_msg % (fspath, lineno + 1)) + else: + addline(f"file {fspath}, line {lineno + 1}") + for i, line in enumerate(lines): + line = line.rstrip() + addline(" " + line) + if line.lstrip().startswith("def"): + break + + if msg is None: + fm = self.request._fixturemanager + available = set() + parent = self.request._pyfuncitem.parent + assert parent is not None + for name, fixturedefs in fm._arg2fixturedefs.items(): + faclist = list(fm._matchfactories(fixturedefs, parent)) + if faclist: + available.add(name) + if self.argname in available: + msg = ( + f" recursive dependency involving fixture '{self.argname}' detected" + ) + else: + msg = f"fixture '{self.argname}' not found" + msg += "\n available fixtures: {}".format(", ".join(sorted(available))) + msg += "\n use 'pytest --fixtures [testpath]' for help on them." + + return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) + + +class FixtureLookupErrorRepr(TerminalRepr): + def __init__( + self, + filename: str | os.PathLike[str], + firstlineno: int, + tblines: Sequence[str], + errorstring: str, + argname: str | None, + ) -> None: + self.tblines = tblines + self.errorstring = errorstring + self.filename = filename + self.firstlineno = firstlineno + self.argname = argname + + def toterminal(self, tw: TerminalWriter) -> None: + # tw.line("FixtureLookupError: %s" %(self.argname), red=True) + for tbline in self.tblines: + tw.line(tbline.rstrip()) + lines = self.errorstring.split("\n") + if lines: + tw.line( + f"{FormattedExcinfo.fail_marker} {lines[0].strip()}", + red=True, + ) + for line in lines[1:]: + tw.line( + f"{FormattedExcinfo.flow_marker} {line.strip()}", + red=True, + ) + tw.line() + tw.line(f"{os.fspath(self.filename)}:{self.firstlineno + 1}") + + +def call_fixture_func( + fixturefunc: _FixtureFunc[FixtureValue], request: FixtureRequest, kwargs +) -> FixtureValue: + if inspect.isgeneratorfunction(fixturefunc): + fixturefunc = cast(Callable[..., Generator[FixtureValue]], fixturefunc) + generator = fixturefunc(**kwargs) + try: + fixture_result = next(generator) + except StopIteration: + raise ValueError(f"{request.fixturename} did not yield a value") from None + finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, generator) + request.addfinalizer(finalizer) + else: + fixturefunc = cast(Callable[..., FixtureValue], fixturefunc) + fixture_result = fixturefunc(**kwargs) + return fixture_result + + +def _teardown_yield_fixture(fixturefunc, it) -> None: + """Execute the teardown of a fixture function by advancing the iterator + after the yield and ensure the iteration ends (if not it means there is + more than one yield in the function).""" + try: + next(it) + except StopIteration: + pass + else: + fs, lineno = getfslineno(fixturefunc) + fail( + f"fixture function has more than one 'yield':\n\n" + f"{Source(fixturefunc).indent()}\n" + f"{fs}:{lineno + 1}", + pytrace=False, + ) + + +def _eval_scope_callable( + scope_callable: Callable[[str, Config], _ScopeName], + fixture_name: str, + config: Config, +) -> _ScopeName: + try: + # Type ignored because there is no typing mechanism to specify + # keyword arguments, currently. + result = scope_callable(fixture_name=fixture_name, config=config) # type: ignore[call-arg] + except Exception as e: + raise TypeError( + f"Error evaluating {scope_callable} while defining fixture '{fixture_name}'.\n" + "Expected a function with the signature (*, fixture_name, config)" + ) from e + if not isinstance(result, str): + fail( + f"Expected {scope_callable} to return a 'str' while defining fixture '{fixture_name}', but it returned:\n" + f"{result!r}", + pytrace=False, + ) + return result + + +class FixtureDef(Generic[FixtureValue]): + """A container for a fixture definition. + + Note: At this time, only explicitly documented fields and methods are + considered public stable API. + """ + + def __init__( + self, + config: Config, + baseid: str | None, + argname: str, + func: _FixtureFunc[FixtureValue], + scope: Scope | _ScopeName | Callable[[str, Config], _ScopeName] | None, + params: Sequence[object] | None, + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None, + *, + _ispytest: bool = False, + # only used in a deprecationwarning msg, can be removed in pytest9 + _autouse: bool = False, + ) -> None: + check_ispytest(_ispytest) + # The "base" node ID for the fixture. + # + # This is a node ID prefix. A fixture is only available to a node (e.g. + # a `Function` item) if the fixture's baseid is a nodeid of a parent of + # node. + # + # For a fixture found in a Collector's object (e.g. a `Module`s module, + # a `Class`'s class), the baseid is the Collector's nodeid. + # + # For a fixture found in a conftest plugin, the baseid is the conftest's + # directory path relative to the rootdir. + # + # For other plugins, the baseid is the empty string (always matches). + self.baseid: Final = baseid or "" + # Whether the fixture was found from a node or a conftest in the + # collection tree. Will be false for fixtures defined in non-conftest + # plugins. + self.has_location: Final = baseid is not None + # The fixture factory function. + self.func: Final = func + # The name by which the fixture may be requested. + self.argname: Final = argname + if scope is None: + scope = Scope.Function + elif callable(scope): + scope = _eval_scope_callable(scope, argname, config) + if isinstance(scope, str): + scope = Scope.from_user( + scope, descr=f"Fixture '{func.__name__}'", where=baseid + ) + self._scope: Final = scope + # If the fixture is directly parametrized, the parameter values. + self.params: Final = params + # If the fixture is directly parametrized, a tuple of explicit IDs to + # assign to the parameter values, or a callable to generate an ID given + # a parameter value. + self.ids: Final = ids + # The names requested by the fixtures. + self.argnames: Final = getfuncargnames(func, name=argname) + # If the fixture was executed, the current value of the fixture. + # Can change if the fixture is executed with different parameters. + self.cached_result: _FixtureCachedResult[FixtureValue] | None = None + self._finalizers: Final[list[Callable[[], object]]] = [] + + # only used to emit a deprecationwarning, can be removed in pytest9 + self._autouse = _autouse + + @property + def scope(self) -> _ScopeName: + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._finalizers.append(finalizer) + + def finish(self, request: SubRequest) -> None: + exceptions: list[BaseException] = [] + while self._finalizers: + fin = self._finalizers.pop() + try: + fin() + except BaseException as e: + exceptions.append(e) + node = request.node + node.ihook.pytest_fixture_post_finalizer(fixturedef=self, request=request) + # Even if finalization fails, we invalidate the cached fixture + # value and remove all finalizers because they may be bound methods + # which will keep instances alive. + self.cached_result = None + self._finalizers.clear() + if len(exceptions) == 1: + raise exceptions[0] + elif len(exceptions) > 1: + msg = f'errors while tearing down fixture "{self.argname}" of {node}' + raise BaseExceptionGroup(msg, exceptions[::-1]) + + def execute(self, request: SubRequest) -> FixtureValue: + """Return the value of this fixture, executing it if not cached.""" + # Ensure that the dependent fixtures requested by this fixture are loaded. + # This needs to be done before checking if we have a cached value, since + # if a dependent fixture has their cache invalidated, e.g. due to + # parametrization, they finalize themselves and fixtures depending on it + # (which will likely include this fixture) setting `self.cached_result = None`. + # See #4871 + requested_fixtures_that_should_finalize_us = [] + for argname in self.argnames: + fixturedef = request._get_active_fixturedef(argname) + # Saves requested fixtures in a list so we later can add our finalizer + # to them, ensuring that if a requested fixture gets torn down we get torn + # down first. This is generally handled by SetupState, but still currently + # needed when this fixture is not parametrized but depends on a parametrized + # fixture. + requested_fixtures_that_should_finalize_us.append(fixturedef) + + # Check for (and return) cached value/exception. + if self.cached_result is not None: + request_cache_key = self.cache_key(request) + cache_key = self.cached_result[1] + try: + # Attempt to make a normal == check: this might fail for objects + # which do not implement the standard comparison (like numpy arrays -- #6497). + cache_hit = bool(request_cache_key == cache_key) + except (ValueError, RuntimeError): + # If the comparison raises, use 'is' as fallback. + cache_hit = request_cache_key is cache_key + + if cache_hit: + if self.cached_result[2] is not None: + exc, exc_tb = self.cached_result[2] + raise exc.with_traceback(exc_tb) + else: + return self.cached_result[0] + # We have a previous but differently parametrized fixture instance + # so we need to tear it down before creating a new one. + self.finish(request) + assert self.cached_result is None + + # Add finalizer to requested fixtures we saved previously. + # We make sure to do this after checking for cached value to avoid + # adding our finalizer multiple times. (#12135) + finalizer = functools.partial(self.finish, request=request) + for parent_fixture in requested_fixtures_that_should_finalize_us: + parent_fixture.addfinalizer(finalizer) + + ihook = request.node.ihook + try: + # Setup the fixture, run the code in it, and cache the value + # in self.cached_result. + result: FixtureValue = ihook.pytest_fixture_setup( + fixturedef=self, request=request + ) + finally: + # Schedule our finalizer, even if the setup failed. + request.node.addfinalizer(finalizer) + + return result + + def cache_key(self, request: SubRequest) -> object: + return getattr(request, "param", None) + + def __repr__(self) -> str: + return f"" + + +class RequestFixtureDef(FixtureDef[FixtureRequest]): + """A custom FixtureDef for the special "request" fixture. + + A new one is generated on-demand whenever "request" is requested. + """ + + def __init__(self, request: FixtureRequest) -> None: + super().__init__( + config=request.config, + baseid=None, + argname="request", + func=lambda: request, + scope=Scope.Function, + params=None, + _ispytest=True, + ) + self.cached_result = (request, [0], None) + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + pass + + +def resolve_fixture_function( + fixturedef: FixtureDef[FixtureValue], request: FixtureRequest +) -> _FixtureFunc[FixtureValue]: + """Get the actual callable that can be called to obtain the fixture + value.""" + fixturefunc = fixturedef.func + # The fixture function needs to be bound to the actual + # request.instance so that code working with "fixturedef" behaves + # as expected. + instance = request.instance + if instance is not None: + # Handle the case where fixture is defined not in a test class, but some other class + # (for example a plugin class with a fixture), see #2270. + if hasattr(fixturefunc, "__self__") and not isinstance( + instance, + fixturefunc.__self__.__class__, + ): + return fixturefunc + fixturefunc = getimfunc(fixturedef.func) + if fixturefunc != fixturedef.func: + fixturefunc = fixturefunc.__get__(instance) + return fixturefunc + + +def pytest_fixture_setup( + fixturedef: FixtureDef[FixtureValue], request: SubRequest +) -> FixtureValue: + """Execution of fixture setup.""" + kwargs = {} + for argname in fixturedef.argnames: + kwargs[argname] = request.getfixturevalue(argname) + + fixturefunc = resolve_fixture_function(fixturedef, request) + my_cache_key = fixturedef.cache_key(request) + + if inspect.isasyncgenfunction(fixturefunc) or inspect.iscoroutinefunction( + fixturefunc + ): + auto_str = " with autouse=True" if fixturedef._autouse else "" + + warnings.warn( + PytestRemovedIn9Warning( + f"{request.node.name!r} requested an async fixture " + f"{request.fixturename!r}{auto_str}, with no plugin or hook that " + "handled it. This is usually an error, as pytest does not natively " + "support it. " + "This will turn into an error in pytest 9.\n" + "See: https://docs.pytest.org/en/stable/deprecations.html#sync-test-depending-on-async-fixture" + ), + # no stacklevel will point at users code, so we just point here + stacklevel=1, + ) + + try: + result = call_fixture_func(fixturefunc, request, kwargs) + except TEST_OUTCOME as e: + if isinstance(e, skip.Exception): + # The test requested a fixture which caused a skip. + # Don't show the fixture as the skip location, as then the user + # wouldn't know which test skipped. + e._use_item_location = True + fixturedef.cached_result = (None, my_cache_key, (e, e.__traceback__)) + raise + fixturedef.cached_result = (result, my_cache_key, None) + return result + + +@final +@dataclasses.dataclass(frozen=True) +class FixtureFunctionMarker: + scope: _ScopeName | Callable[[str, Config], _ScopeName] + params: tuple[object, ...] | None + autouse: bool = False + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None + name: str | None = None + + _ispytest: dataclasses.InitVar[bool] = False + + def __post_init__(self, _ispytest: bool) -> None: + check_ispytest(_ispytest) + + def __call__(self, function: FixtureFunction) -> FixtureFunctionDefinition: + if inspect.isclass(function): + raise ValueError("class fixtures not supported (maybe in the future)") + + if isinstance(function, FixtureFunctionDefinition): + raise ValueError( + f"@pytest.fixture is being applied more than once to the same function {function.__name__!r}" + ) + + if hasattr(function, "pytestmark"): + warnings.warn(MARKED_FIXTURE, stacklevel=2) + + fixture_definition = FixtureFunctionDefinition( + function=function, fixture_function_marker=self, _ispytest=True + ) + + name = self.name or function.__name__ + if name == "request": + location = getlocation(function) + fail( + f"'request' is a reserved word for fixtures, use another name:\n {location}", + pytrace=False, + ) + + return fixture_definition + + +# TODO: paramspec/return type annotation tracking and storing +class FixtureFunctionDefinition: + def __init__( + self, + *, + function: Callable[..., Any], + fixture_function_marker: FixtureFunctionMarker, + instance: object | None = None, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.name = fixture_function_marker.name or function.__name__ + # In order to show the function that this fixture contains in messages. + # Set the __name__ to be same as the function __name__ or the given fixture name. + self.__name__ = self.name + self._fixture_function_marker = fixture_function_marker + if instance is not None: + self._fixture_function = cast( + Callable[..., Any], function.__get__(instance) + ) + else: + self._fixture_function = function + functools.update_wrapper(self, function) + + def __repr__(self) -> str: + return f"" + + def __get__(self, instance, owner=None): + """Behave like a method if the function it was applied to was a method.""" + return FixtureFunctionDefinition( + function=self._fixture_function, + fixture_function_marker=self._fixture_function_marker, + instance=instance, + _ispytest=True, + ) + + def __call__(self, *args: Any, **kwds: Any) -> Any: + message = ( + f'Fixture "{self.name}" called directly. Fixtures are not meant to be called directly,\n' + "but are created automatically when test functions request them as parameters.\n" + "See https://docs.pytest.org/en/stable/explanation/fixtures.html for more information about fixtures, and\n" + "https://docs.pytest.org/en/stable/deprecations.html#calling-fixtures-directly" + ) + fail(message, pytrace=False) + + def _get_wrapped_function(self) -> Callable[..., Any]: + return self._fixture_function + + +@overload +def fixture( + fixture_function: Callable[..., object], + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = ..., + params: Iterable[object] | None = ..., + autouse: bool = ..., + ids: Sequence[object | None] | Callable[[Any], object | None] | None = ..., + name: str | None = ..., +) -> FixtureFunctionDefinition: ... + + +@overload +def fixture( + fixture_function: None = ..., + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = ..., + params: Iterable[object] | None = ..., + autouse: bool = ..., + ids: Sequence[object | None] | Callable[[Any], object | None] | None = ..., + name: str | None = None, +) -> FixtureFunctionMarker: ... + + +def fixture( + fixture_function: FixtureFunction | None = None, + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = "function", + params: Iterable[object] | None = None, + autouse: bool = False, + ids: Sequence[object | None] | Callable[[Any], object | None] | None = None, + name: str | None = None, +) -> FixtureFunctionMarker | FixtureFunctionDefinition: + """Decorator to mark a fixture factory function. + + This decorator can be used, with or without parameters, to define a + fixture function. + + The name of the fixture function can later be referenced to cause its + invocation ahead of running tests: test modules or classes can use the + ``pytest.mark.usefixtures(fixturename)`` marker. + + Test functions can directly use fixture names as input arguments in which + case the fixture instance returned from the fixture function will be + injected. + + Fixtures can provide their values to test functions using ``return`` or + ``yield`` statements. When using ``yield`` the code block after the + ``yield`` statement is executed as teardown code regardless of the test + outcome, and must yield exactly once. + + :param scope: + The scope for which this fixture is shared; one of ``"function"`` + (default), ``"class"``, ``"module"``, ``"package"`` or ``"session"``. + + This parameter may also be a callable which receives ``(fixture_name, config)`` + as parameters, and must return a ``str`` with one of the values mentioned above. + + See :ref:`dynamic scope` in the docs for more information. + + :param params: + An optional list of parameters which will cause multiple invocations + of the fixture function and all of the tests using it. The current + parameter is available in ``request.param``. + + :param autouse: + If True, the fixture func is activated for all tests that can see it. + If False (the default), an explicit reference is needed to activate + the fixture. + + :param ids: + Sequence of ids each corresponding to the params so that they are + part of the test id. If no ids are provided they will be generated + automatically from the params. + + :param name: + The name of the fixture. This defaults to the name of the decorated + function. If a fixture is used in the same module in which it is + defined, the function name of the fixture will be shadowed by the + function arg that requests the fixture; one way to resolve this is to + name the decorated function ``fixture_`` and then use + ``@pytest.fixture(name='')``. + """ + fixture_marker = FixtureFunctionMarker( + scope=scope, + params=tuple(params) if params is not None else None, + autouse=autouse, + ids=None if ids is None else ids if callable(ids) else tuple(ids), + name=name, + _ispytest=True, + ) + + # Direct decoration. + if fixture_function: + return fixture_marker(fixture_function) + + return fixture_marker + + +def yield_fixture( + fixture_function=None, + *args, + scope="function", + params=None, + autouse=False, + ids=None, + name=None, +): + """(Return a) decorator to mark a yield-fixture factory function. + + .. deprecated:: 3.0 + Use :py:func:`pytest.fixture` directly instead. + """ + warnings.warn(YIELD_FIXTURE, stacklevel=2) + return fixture( + fixture_function, + *args, + scope=scope, + params=params, + autouse=autouse, + ids=ids, + name=name, + ) + + +@fixture(scope="session") +def pytestconfig(request: FixtureRequest) -> Config: + """Session-scoped fixture that returns the session's :class:`pytest.Config` + object. + + Example:: + + def test_foo(pytestconfig): + if pytestconfig.get_verbosity() > 0: + ... + + """ + return request.config + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "usefixtures", + type="args", + default=[], + help="List of default fixtures to be used with this project", + ) + group = parser.getgroup("general") + group.addoption( + "--fixtures", + "--funcargs", + action="store_true", + dest="showfixtures", + default=False, + help="Show available fixtures, sorted by plugin appearance " + "(fixtures with leading '_' are only shown with '-v')", + ) + group.addoption( + "--fixtures-per-test", + action="store_true", + dest="show_fixtures_per_test", + default=False, + help="Show fixtures per test", + ) + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.showfixtures: + showfixtures(config) + return 0 + if config.option.show_fixtures_per_test: + show_fixtures_per_test(config) + return 0 + return None + + +def _get_direct_parametrize_args(node: nodes.Node) -> set[str]: + """Return all direct parametrization arguments of a node, so we don't + mistake them for fixtures. + + Check https://github.com/pytest-dev/pytest/issues/5036. + + These things are done later as well when dealing with parametrization + so this could be improved. + """ + parametrize_argnames: set[str] = set() + for marker in node.iter_markers(name="parametrize"): + if not marker.kwargs.get("indirect", False): + p_argnames, _ = ParameterSet._parse_parametrize_args( + *marker.args, **marker.kwargs + ) + parametrize_argnames.update(p_argnames) + return parametrize_argnames + + +def deduplicate_names(*seqs: Iterable[str]) -> tuple[str, ...]: + """De-duplicate the sequence of names while keeping the original order.""" + # Ideally we would use a set, but it does not preserve insertion order. + return tuple(dict.fromkeys(name for seq in seqs for name in seq)) + + +class FixtureManager: + """pytest fixture definitions and information is stored and managed + from this class. + + During collection fm.parsefactories() is called multiple times to parse + fixture function definitions into FixtureDef objects and internal + data structures. + + During collection of test functions, metafunc-mechanics instantiate + a FuncFixtureInfo object which is cached per node/func-name. + This FuncFixtureInfo object is later retrieved by Function nodes + which themselves offer a fixturenames attribute. + + The FuncFixtureInfo object holds information about fixtures and FixtureDefs + relevant for a particular function. An initial list of fixtures is + assembled like this: + + - config-defined usefixtures + - autouse-marked fixtures along the collection chain up from the function + - usefixtures markers at module/class/function level + - test function funcargs + + Subsequently the funcfixtureinfo.fixturenames attribute is computed + as the closure of the fixtures needed to setup the initial fixtures, + i.e. fixtures needed by fixture functions themselves are appended + to the fixturenames list. + + Upon the test-setup phases all fixturenames are instantiated, retrieved + by a lookup of their FuncFixtureInfo. + """ + + def __init__(self, session: Session) -> None: + self.session = session + self.config: Config = session.config + # Maps a fixture name (argname) to all of the FixtureDefs in the test + # suite/plugins defined with this name. Populated by parsefactories(). + # TODO: The order of the FixtureDefs list of each arg is significant, + # explain. + self._arg2fixturedefs: Final[dict[str, list[FixtureDef[Any]]]] = {} + self._holderobjseen: Final[set[object]] = set() + # A mapping from a nodeid to a list of autouse fixtures it defines. + self._nodeid_autousenames: Final[dict[str, list[str]]] = { + "": self.config.getini("usefixtures"), + } + session.config.pluginmanager.register(self, "funcmanage") + + def getfixtureinfo( + self, + node: nodes.Item, + func: Callable[..., object] | None, + cls: type | None, + ) -> FuncFixtureInfo: + """Calculate the :class:`FuncFixtureInfo` for an item. + + If ``func`` is None, or if the item sets an attribute + ``nofuncargs = True``, then ``func`` is not examined at all. + + :param node: + The item requesting the fixtures. + :param func: + The item's function. + :param cls: + If the function is a method, the method's class. + """ + if func is not None and not getattr(node, "nofuncargs", False): + argnames = getfuncargnames(func, name=node.name, cls=cls) + else: + argnames = () + usefixturesnames = self._getusefixturesnames(node) + autousenames = self._getautousenames(node) + initialnames = deduplicate_names(autousenames, usefixturesnames, argnames) + + direct_parametrize_args = _get_direct_parametrize_args(node) + + names_closure, arg2fixturedefs = self.getfixtureclosure( + parentnode=node, + initialnames=initialnames, + ignore_args=direct_parametrize_args, + ) + + return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin, plugin_name: str) -> None: + # Fixtures defined in conftest plugins are only visible to within the + # conftest's directory. This is unlike fixtures in non-conftest plugins + # which have global visibility. So for conftests, construct the base + # nodeid from the plugin name (which is the conftest path). + if plugin_name and plugin_name.endswith("conftest.py"): + # Note: we explicitly do *not* use `plugin.__file__` here -- The + # difference is that plugin_name has the correct capitalization on + # case-insensitive systems (Windows) and other normalization issues + # (issue #11816). + conftestpath = absolutepath(plugin_name) + try: + nodeid = str(conftestpath.parent.relative_to(self.config.rootpath)) + except ValueError: + nodeid = "" + if nodeid == ".": + nodeid = "" + if os.sep != nodes.SEP: + nodeid = nodeid.replace(os.sep, nodes.SEP) + else: + nodeid = None + + self.parsefactories(plugin, nodeid) + + def _getautousenames(self, node: nodes.Node) -> Iterator[str]: + """Return the names of autouse fixtures applicable to node.""" + for parentnode in node.listchain(): + basenames = self._nodeid_autousenames.get(parentnode.nodeid) + if basenames: + yield from basenames + + def _getusefixturesnames(self, node: nodes.Item) -> Iterator[str]: + """Return the names of usefixtures fixtures applicable to node.""" + for marker_node, mark in node.iter_markers_with_node(name="usefixtures"): + if not mark.args: + marker_node.warn( + PytestWarning( + f"usefixtures() in {node.nodeid} without arguments has no effect" + ) + ) + yield from mark.args + + def getfixtureclosure( + self, + parentnode: nodes.Node, + initialnames: tuple[str, ...], + ignore_args: AbstractSet[str], + ) -> tuple[list[str], dict[str, Sequence[FixtureDef[Any]]]]: + # Collect the closure of all fixtures, starting with the given + # fixturenames as the initial set. As we have to visit all + # factory definitions anyway, we also return an arg2fixturedefs + # mapping so that the caller can reuse it and does not have + # to re-discover fixturedefs again for each fixturename + # (discovering matching fixtures for a given name/node is expensive). + + fixturenames_closure = list(initialnames) + + arg2fixturedefs: dict[str, Sequence[FixtureDef[Any]]] = {} + + # Track the index for each fixture name in the simulated stack. + # Needed for handling override chains correctly, similar to _get_active_fixturedef. + # Using negative indices: -1 is the most specific (last), -2 is second to last, etc. + current_indices: dict[str, int] = {} + + def process_argname(argname: str) -> None: + # Optimization: already processed this argname. + if current_indices.get(argname) == -1: + return + + if argname not in fixturenames_closure: + fixturenames_closure.append(argname) + + if argname in ignore_args: + return + + fixturedefs = arg2fixturedefs.get(argname) + if not fixturedefs: + fixturedefs = self.getfixturedefs(argname, parentnode) + if not fixturedefs: + # Fixture not defined or not visible (will error during runtest). + return + arg2fixturedefs[argname] = fixturedefs + + index = current_indices.get(argname, -1) + if -index > len(fixturedefs): + # Exhausted the override chain (will error during runtest). + return + fixturedef = fixturedefs[index] + + current_indices[argname] = index - 1 + for dep in fixturedef.argnames: + process_argname(dep) + current_indices[argname] = index + + for name in initialnames: + process_argname(name) + + def sort_by_scope(arg_name: str) -> Scope: + try: + fixturedefs = arg2fixturedefs[arg_name] + except KeyError: + return Scope.Function + else: + return fixturedefs[-1]._scope + + fixturenames_closure.sort(key=sort_by_scope, reverse=True) + return fixturenames_closure, arg2fixturedefs + + def pytest_generate_tests(self, metafunc: Metafunc) -> None: + """Generate new tests based on parametrized fixtures used by the given metafunc""" + + def get_parametrize_mark_argnames(mark: Mark) -> Sequence[str]: + args, _ = ParameterSet._parse_parametrize_args(*mark.args, **mark.kwargs) + return args + + for argname in metafunc.fixturenames: + # Get the FixtureDefs for the argname. + fixture_defs = metafunc._arg2fixturedefs.get(argname) + if not fixture_defs: + # Will raise FixtureLookupError at setup time if not parametrized somewhere + # else (e.g @pytest.mark.parametrize) + continue + + # If the test itself parametrizes using this argname, give it + # precedence. + if any( + argname in get_parametrize_mark_argnames(mark) + for mark in metafunc.definition.iter_markers("parametrize") + ): + continue + + # In the common case we only look at the fixture def with the + # closest scope (last in the list). But if the fixture overrides + # another fixture, while requesting the super fixture, keep going + # in case the super fixture is parametrized (#1953). + for fixturedef in reversed(fixture_defs): + # Fixture is parametrized, apply it and stop. + if fixturedef.params is not None: + metafunc.parametrize( + argname, + fixturedef.params, + indirect=True, + scope=fixturedef.scope, + ids=fixturedef.ids, + ) + break + + # Not requesting the overridden super fixture, stop. + if argname not in fixturedef.argnames: + break + + # Try next super fixture, if any. + + def pytest_collection_modifyitems(self, items: list[nodes.Item]) -> None: + # Separate parametrized setups. + items[:] = reorder_items(items) + + def _register_fixture( + self, + *, + name: str, + func: _FixtureFunc[object], + nodeid: str | None, + scope: Scope | _ScopeName | Callable[[str, Config], _ScopeName] = "function", + params: Sequence[object] | None = None, + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None, + autouse: bool = False, + ) -> None: + """Register a fixture + + :param name: + The fixture's name. + :param func: + The fixture's implementation function. + :param nodeid: + The visibility of the fixture. The fixture will be available to the + node with this nodeid and its children in the collection tree. + None means that the fixture is visible to the entire collection tree, + e.g. a fixture defined for general use in a plugin. + :param scope: + The fixture's scope. + :param params: + The fixture's parametrization params. + :param ids: + The fixture's IDs. + :param autouse: + Whether this is an autouse fixture. + """ + fixture_def = FixtureDef( + config=self.config, + baseid=nodeid, + argname=name, + func=func, + scope=scope, + params=params, + ids=ids, + _ispytest=True, + _autouse=autouse, + ) + + faclist = self._arg2fixturedefs.setdefault(name, []) + if fixture_def.has_location: + faclist.append(fixture_def) + else: + # fixturedefs with no location are at the front + # so this inserts the current fixturedef after the + # existing fixturedefs from external plugins but + # before the fixturedefs provided in conftests. + i = len([f for f in faclist if not f.has_location]) + faclist.insert(i, fixture_def) + if autouse: + self._nodeid_autousenames.setdefault(nodeid or "", []).append(name) + + @overload + def parsefactories( + self, + node_or_obj: nodes.Node, + ) -> None: + raise NotImplementedError() + + @overload + def parsefactories( + self, + node_or_obj: object, + nodeid: str | None, + ) -> None: + raise NotImplementedError() + + def parsefactories( + self, + node_or_obj: nodes.Node | object, + nodeid: str | NotSetType | None = NOTSET, + ) -> None: + """Collect fixtures from a collection node or object. + + Found fixtures are parsed into `FixtureDef`s and saved. + + If `node_or_object` is a collection node (with an underlying Python + object), the node's object is traversed and the node's nodeid is used to + determine the fixtures' visibility. `nodeid` must not be specified in + this case. + + If `node_or_object` is an object (e.g. a plugin), the object is + traversed and the given `nodeid` is used to determine the fixtures' + visibility. `nodeid` must be specified in this case; None and "" mean + total visibility. + """ + if nodeid is not NOTSET: + holderobj = node_or_obj + else: + assert isinstance(node_or_obj, nodes.Node) + holderobj = cast(object, node_or_obj.obj) # type: ignore[attr-defined] + assert isinstance(node_or_obj.nodeid, str) + nodeid = node_or_obj.nodeid + if holderobj in self._holderobjseen: + return + + # Avoid accessing `@property` (and other descriptors) when iterating fixtures. + if not safe_isclass(holderobj) and not isinstance(holderobj, types.ModuleType): + holderobj_tp: object = type(holderobj) + else: + holderobj_tp = holderobj + + self._holderobjseen.add(holderobj) + for name in dir(holderobj): + # The attribute can be an arbitrary descriptor, so the attribute + # access below can raise. safe_getattr() ignores such exceptions. + obj_ub = safe_getattr(holderobj_tp, name, None) + if type(obj_ub) is FixtureFunctionDefinition: + marker = obj_ub._fixture_function_marker + if marker.name: + fixture_name = marker.name + else: + fixture_name = name + + # OK we know it is a fixture -- now safe to look up on the _instance_. + try: + obj = getattr(holderobj, name) + # if the fixture is named in the decorator we cannot find it in the module + except AttributeError: + obj = obj_ub + + func = obj._get_wrapped_function() + + self._register_fixture( + name=fixture_name, + nodeid=nodeid, + func=func, + scope=marker.scope, + params=marker.params, + ids=marker.ids, + autouse=marker.autouse, + ) + + def getfixturedefs( + self, argname: str, node: nodes.Node + ) -> Sequence[FixtureDef[Any]] | None: + """Get FixtureDefs for a fixture name which are applicable + to a given node. + + Returns None if there are no fixtures at all defined with the given + name. (This is different from the case in which there are fixtures + with the given name, but none applicable to the node. In this case, + an empty result is returned). + + :param argname: Name of the fixture to search for. + :param node: The requesting Node. + """ + try: + fixturedefs = self._arg2fixturedefs[argname] + except KeyError: + return None + return tuple(self._matchfactories(fixturedefs, node)) + + def _matchfactories( + self, fixturedefs: Iterable[FixtureDef[Any]], node: nodes.Node + ) -> Iterator[FixtureDef[Any]]: + parentnodeids = {n.nodeid for n in node.iter_parents()} + for fixturedef in fixturedefs: + if fixturedef.baseid in parentnodeids: + yield fixturedef + + +def show_fixtures_per_test(config: Config) -> int | ExitCode: + from _pytest.main import wrap_session + + return wrap_session(config, _show_fixtures_per_test) + + +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def _pretty_fixture_path(invocation_dir: Path, func) -> str: + loc = Path(getlocation(func, invocation_dir)) + prefix = Path("...", "_pytest") + try: + return str(prefix / loc.relative_to(_PYTEST_DIR)) + except ValueError: + return bestrelpath(invocation_dir, loc) + + +def _show_fixtures_per_test(config: Config, session: Session) -> None: + import _pytest.config + + session.perform_collect() + invocation_dir = config.invocation_params.dir + tw = _pytest.config.create_terminal_writer(config) + verbose = config.get_verbosity() + + def get_best_relpath(func) -> str: + loc = getlocation(func, invocation_dir) + return bestrelpath(invocation_dir, Path(loc)) + + def write_fixture(fixture_def: FixtureDef[object]) -> None: + argname = fixture_def.argname + if verbose <= 0 and argname.startswith("_"): + return + prettypath = _pretty_fixture_path(invocation_dir, fixture_def.func) + tw.write(f"{argname}", green=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + fixture_doc = inspect.getdoc(fixture_def.func) + if fixture_doc: + write_docstring( + tw, + fixture_doc.split("\n\n", maxsplit=1)[0] + if verbose <= 0 + else fixture_doc, + ) + else: + tw.line(" no docstring available", red=True) + + def write_item(item: nodes.Item) -> None: + # Not all items have _fixtureinfo attribute. + info: FuncFixtureInfo | None = getattr(item, "_fixtureinfo", None) + if info is None or not info.name2fixturedefs: + # This test item does not use any fixtures. + return + tw.line() + tw.sep("-", f"fixtures used by {item.name}") + # TODO: Fix this type ignore. + tw.sep("-", f"({get_best_relpath(item.function)})") # type: ignore[attr-defined] + # dict key not used in loop but needed for sorting. + for _, fixturedefs in sorted(info.name2fixturedefs.items()): + assert fixturedefs is not None + if not fixturedefs: + continue + # Last item is expected to be the one used by the test item. + write_fixture(fixturedefs[-1]) + + for session_item in session.items: + write_item(session_item) + + +def showfixtures(config: Config) -> int | ExitCode: + from _pytest.main import wrap_session + + return wrap_session(config, _showfixtures_main) + + +def _showfixtures_main(config: Config, session: Session) -> None: + import _pytest.config + + session.perform_collect() + invocation_dir = config.invocation_params.dir + tw = _pytest.config.create_terminal_writer(config) + verbose = config.get_verbosity() + + fm = session._fixturemanager + + available = [] + seen: set[tuple[str, str]] = set() + + for argname, fixturedefs in fm._arg2fixturedefs.items(): + assert fixturedefs is not None + if not fixturedefs: + continue + for fixturedef in fixturedefs: + loc = getlocation(fixturedef.func, invocation_dir) + if (fixturedef.argname, loc) in seen: + continue + seen.add((fixturedef.argname, loc)) + available.append( + ( + len(fixturedef.baseid), + fixturedef.func.__module__, + _pretty_fixture_path(invocation_dir, fixturedef.func), + fixturedef.argname, + fixturedef, + ) + ) + + available.sort() + currentmodule = None + for baseid, module, prettypath, argname, fixturedef in available: + if currentmodule != module: + if not module.startswith("_pytest."): + tw.line() + tw.sep("-", f"fixtures defined from {module}") + currentmodule = module + if verbose <= 0 and argname.startswith("_"): + continue + tw.write(f"{argname}", green=True) + if fixturedef.scope != "function": + tw.write(f" [{fixturedef.scope} scope]", cyan=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + doc = inspect.getdoc(fixturedef.func) + if doc: + write_docstring( + tw, doc.split("\n\n", maxsplit=1)[0] if verbose <= 0 else doc + ) + else: + tw.line(" no docstring available", red=True) + tw.line() + + +def write_docstring(tw: TerminalWriter, doc: str, indent: str = " ") -> None: + for line in doc.split("\n"): + tw.line(indent + line) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/freeze_support.py b/Backend/venv/lib/python3.12/site-packages/_pytest/freeze_support.py new file mode 100644 index 00000000..959ff071 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/freeze_support.py @@ -0,0 +1,45 @@ +"""Provides a function to report all internal modules for using freezing +tools.""" + +from __future__ import annotations + +from collections.abc import Iterator +import types + + +def freeze_includes() -> list[str]: + """Return a list of module names used by pytest that should be + included by cx_freeze.""" + import _pytest + + result = list(_iter_all_modules(_pytest)) + return result + + +def _iter_all_modules( + package: str | types.ModuleType, + prefix: str = "", +) -> Iterator[str]: + """Iterate over the names of all modules that can be found in the given + package, recursively. + + >>> import _pytest + >>> list(_iter_all_modules(_pytest)) + ['_pytest._argcomplete', '_pytest._code.code', ...] + """ + import os + import pkgutil + + if isinstance(package, str): + path = package + else: + # Type ignored because typeshed doesn't define ModuleType.__path__ + # (only defined on packages). + package_path = package.__path__ + path, prefix = package_path[0], package.__name__ + "." + for _, name, is_package in pkgutil.iter_modules([path]): + if is_package: + for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."): + yield prefix + m + else: + yield prefix + name diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/helpconfig.py b/Backend/venv/lib/python3.12/site-packages/_pytest/helpconfig.py new file mode 100644 index 00000000..6a22c9f5 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/helpconfig.py @@ -0,0 +1,293 @@ +# mypy: allow-untyped-defs +"""Version info, help messages, tracing configuration.""" + +from __future__ import annotations + +import argparse +from collections.abc import Generator +from collections.abc import Sequence +import os +import sys +from typing import Any + +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import PrintHelp +from _pytest.config.argparsing import Parser +from _pytest.terminal import TerminalReporter +import pytest + + +class HelpAction(argparse.Action): + """An argparse Action that will raise a PrintHelp exception in order to skip + the rest of the argument parsing when --help is passed. + + This prevents argparse from raising UsageError when `--help` is used along + with missing required arguments when any are defined, for example by + ``pytest_addoption``. This is similar to the way that the builtin argparse + --help option is implemented by raising SystemExit. + + To opt in to this behavior, the parse caller must set + `namespace._raise_print_help = True`. Otherwise it just sets the option. + """ + + def __init__( + self, option_strings: Sequence[str], dest: str, *, help: str | None = None + ) -> None: + super().__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + const=True, + default=False, + help=help, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> None: + setattr(namespace, self.dest, self.const) + + if getattr(namespace, "_raise_print_help", False): + raise PrintHelp + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--version", + "-V", + action="count", + default=0, + dest="version", + help="Display pytest version and information about plugins. " + "When given twice, also display information about plugins.", + ) + group._addoption( # private to use reserved lower-case short option + "-h", + "--help", + action=HelpAction, + dest="help", + help="Show help message and configuration info", + ) + group._addoption( # private to use reserved lower-case short option + "-p", + action="append", + dest="plugins", + default=[], + metavar="name", + help="Early-load given plugin module name or entry point (multi-allowed). " + "To avoid loading of plugins, use the `no:` prefix, e.g. " + "`no:doctest`. See also --disable-plugin-autoload.", + ) + group.addoption( + "--disable-plugin-autoload", + action="store_true", + default=False, + help="Disable plugin auto-loading through entry point packaging metadata. " + "Only plugins explicitly specified in -p or env var PYTEST_PLUGINS will be loaded.", + ) + group.addoption( + "--traceconfig", + "--trace-config", + action="store_true", + default=False, + help="Trace considerations of conftest.py files", + ) + group.addoption( + "--debug", + action="store", + nargs="?", + const="pytestdebug.log", + dest="debug", + metavar="DEBUG_FILE_NAME", + help="Store internal tracing debug information in this log file. " + "This file is opened with 'w' and truncated as a result, care advised. " + "Default: pytestdebug.log.", + ) + group._addoption( # private to use reserved lower-case short option + "-o", + "--override-ini", + dest="override_ini", + action="append", + help='Override configuration option with "option=value" style, ' + "e.g. `-o strict_xfail=True -o cache_dir=cache`.", + ) + + +@pytest.hookimpl(wrapper=True) +def pytest_cmdline_parse() -> Generator[None, Config, Config]: + config = yield + + if config.option.debug: + # --debug | --debug was provided. + path = config.option.debug + debugfile = open(path, "w", encoding="utf-8") + debugfile.write( + "versions pytest-{}, " + "python-{}\ninvocation_dir={}\ncwd={}\nargs={}\n\n".format( + pytest.__version__, + ".".join(map(str, sys.version_info)), + config.invocation_params.dir, + os.getcwd(), + config.invocation_params.args, + ) + ) + config.trace.root.setwriter(debugfile.write) + undo_tracing = config.pluginmanager.enable_tracing() + sys.stderr.write(f"writing pytest debug information to {path}\n") + + def unset_tracing() -> None: + debugfile.close() + sys.stderr.write(f"wrote pytest debug information to {debugfile.name}\n") + config.trace.root.setwriter(None) + undo_tracing() + + config.add_cleanup(unset_tracing) + + return config + + +def show_version_verbose(config: Config) -> None: + """Show verbose pytest version installation, including plugins.""" + sys.stdout.write( + f"This is pytest version {pytest.__version__}, imported from {pytest.__file__}\n" + ) + plugininfo = getpluginversioninfo(config) + if plugininfo: + for line in plugininfo: + sys.stdout.write(line + "\n") + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + # Note: a single `--version` argument is handled directly by `Config.main()` to avoid starting up the entire + # pytest infrastructure just to display the version (#13574). + if config.option.version > 1: + show_version_verbose(config) + return ExitCode.OK + elif config.option.help: + config._do_configure() + showhelp(config) + config._ensure_unconfigure() + return ExitCode.OK + return None + + +def showhelp(config: Config) -> None: + import textwrap + + reporter: TerminalReporter | None = config.pluginmanager.get_plugin( + "terminalreporter" + ) + assert reporter is not None + tw = reporter._tw + tw.write(config._parser.optparser.format_help()) + tw.line() + tw.line( + "[pytest] configuration options in the first " + "pytest.toml|pytest.ini|tox.ini|setup.cfg|pyproject.toml file found:" + ) + tw.line() + + columns = tw.fullwidth # costly call + indent_len = 24 # based on argparse's max_help_position=24 + indent = " " * indent_len + for name in config._parser._inidict: + help, type, _default = config._parser._inidict[name] + if help is None: + raise TypeError(f"help argument cannot be None for {name}") + spec = f"{name} ({type}):" + tw.write(f" {spec}") + spec_len = len(spec) + if spec_len > (indent_len - 3): + # Display help starting at a new line. + tw.line() + helplines = textwrap.wrap( + help, + columns, + initial_indent=indent, + subsequent_indent=indent, + break_on_hyphens=False, + ) + + for line in helplines: + tw.line(line) + else: + # Display help starting after the spec, following lines indented. + tw.write(" " * (indent_len - spec_len - 2)) + wrapped = textwrap.wrap(help, columns - indent_len, break_on_hyphens=False) + + if wrapped: + tw.line(wrapped[0]) + for line in wrapped[1:]: + tw.line(indent + line) + + tw.line() + tw.line("Environment variables:") + vars = [ + ( + "CI", + "When set to a non-empty value, pytest knows it is running in a " + "CI process and does not truncate summary info", + ), + ("BUILD_NUMBER", "Equivalent to CI"), + ("PYTEST_ADDOPTS", "Extra command line options"), + ("PYTEST_PLUGINS", "Comma-separated plugins to load during startup"), + ("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "Set to disable plugin auto-loading"), + ("PYTEST_DEBUG", "Set to enable debug tracing of pytest's internals"), + ("PYTEST_DEBUG_TEMPROOT", "Override the system temporary directory"), + ("PYTEST_THEME", "The Pygments style to use for code output"), + ("PYTEST_THEME_MODE", "Set the PYTEST_THEME to be either 'dark' or 'light'"), + ] + for name, help in vars: + tw.line(f" {name:<24} {help}") + tw.line() + tw.line() + + tw.line("to see available markers type: pytest --markers") + tw.line("to see available fixtures type: pytest --fixtures") + tw.line( + "(shown according to specified file_or_dir or current dir " + "if not specified; fixtures with leading '_' are only shown " + "with the '-v' option" + ) + + for warningreport in reporter.stats.get("warnings", []): + tw.line("warning : " + warningreport.message, red=True) + + +def getpluginversioninfo(config: Config) -> list[str]: + lines = [] + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + lines.append("registered third-party plugins:") + for plugin, dist in plugininfo: + loc = getattr(plugin, "__file__", repr(plugin)) + content = f"{dist.project_name}-{dist.version} at {loc}" + lines.append(" " + content) + return lines + + +def pytest_report_header(config: Config) -> list[str]: + lines = [] + if config.option.debug or config.option.traceconfig: + lines.append(f"using: pytest-{pytest.__version__}") + + verinfo = getpluginversioninfo(config) + if verinfo: + lines.extend(verinfo) + + if config.option.traceconfig: + lines.append("active plugins:") + items = config.pluginmanager.list_name_plugin() + for name, plugin in items: + if hasattr(plugin, "__file__"): + r = plugin.__file__ + else: + r = repr(plugin) + lines.append(f" {name:<20}: {r}") + return lines diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/hookspec.py b/Backend/venv/lib/python3.12/site-packages/_pytest/hookspec.py new file mode 100644 index 00000000..c5bcc36a --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/hookspec.py @@ -0,0 +1,1342 @@ +# mypy: allow-untyped-defs +# ruff: noqa: T100 +"""Hook specifications for pytest plugins which are invoked by pytest itself +and by builtin plugins.""" + +from __future__ import annotations + +from collections.abc import Mapping +from collections.abc import Sequence +from pathlib import Path +from typing import Any +from typing import TYPE_CHECKING + +from pluggy import HookspecMarker + +from .deprecated import HOOK_LEGACY_PATH_ARG + + +if TYPE_CHECKING: + import pdb + from typing import Literal + import warnings + + from _pytest._code.code import ExceptionInfo + from _pytest._code.code import ExceptionRepr + from _pytest.compat import LEGACY_PATH + from _pytest.config import _PluggyPlugin + from _pytest.config import Config + from _pytest.config import ExitCode + from _pytest.config import PytestPluginManager + from _pytest.config.argparsing import Parser + from _pytest.fixtures import FixtureDef + from _pytest.fixtures import SubRequest + from _pytest.main import Session + from _pytest.nodes import Collector + from _pytest.nodes import Item + from _pytest.outcomes import Exit + from _pytest.python import Class + from _pytest.python import Function + from _pytest.python import Metafunc + from _pytest.python import Module + from _pytest.reports import CollectReport + from _pytest.reports import TestReport + from _pytest.runner import CallInfo + from _pytest.terminal import TerminalReporter + from _pytest.terminal import TestShortLogReport + + +hookspec = HookspecMarker("pytest") + +# ------------------------------------------------------------------------- +# Initialization hooks called for every plugin +# ------------------------------------------------------------------------- + + +@hookspec(historic=True) +def pytest_addhooks(pluginmanager: PytestPluginManager) -> None: + """Called at plugin registration time to allow adding new hooks via a call to + :func:`pluginmanager.add_hookspecs(module_or_class, prefix) `. + + :param pluginmanager: The pytest plugin manager. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered. + """ + + +@hookspec(historic=True) +def pytest_plugin_registered( + plugin: _PluggyPlugin, + plugin_name: str, + manager: PytestPluginManager, +) -> None: + """A new pytest plugin got registered. + + :param plugin: The plugin module or instance. + :param plugin_name: The name by which the plugin is registered. + :param manager: The pytest plugin manager. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered, once for each plugin registered thus far + (including itself!), and for all plugins thereafter when they are + registered. + """ + + +@hookspec(historic=True) +def pytest_addoption(parser: Parser, pluginmanager: PytestPluginManager) -> None: + """Register argparse-style options and config-style config values, + called once at the beginning of a test run. + + :param parser: + To add command line options, call + :py:func:`parser.addoption(...) `. + To add config-file values call :py:func:`parser.addini(...) + `. + + :param pluginmanager: + The pytest plugin manager, which can be used to install :py:func:`~pytest.hookspec`'s + or :py:func:`~pytest.hookimpl`'s and allow one plugin to call another plugin's hooks + to change how command line options are added. + + Options can later be accessed through the + :py:class:`config ` object, respectively: + + - :py:func:`config.getoption(name) ` to + retrieve the value of a command line option. + + - :py:func:`config.getini(name) ` to retrieve + a value read from a configuration file. + + The config object is passed around on many internal objects via the ``.config`` + attribute or can be retrieved as the ``pytestconfig`` fixture. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered. + + This hook is only called for :ref:`initial conftests `. + """ + + +@hookspec(historic=True) +def pytest_configure(config: Config) -> None: + """Allow plugins and conftest files to perform initial configuration. + + .. note:: + This hook is incompatible with hook wrappers. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + This hook is called for every :ref:`initial conftest ` file + after command line options have been parsed. After that, the hook is called + for other conftest files as they are registered. + """ + + +# ------------------------------------------------------------------------- +# Bootstrapping hooks called for plugins registered early enough: +# internal and 3rd party plugins. +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_cmdline_parse( + pluginmanager: PytestPluginManager, args: list[str] +) -> Config | None: + """Return an initialized :class:`~pytest.Config`, parsing the specified args. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + This hook is only called for plugin classes passed to the + ``plugins`` arg when using `pytest.main`_ to perform an in-process + test run. + + :param pluginmanager: The pytest plugin manager. + :param args: List of arguments passed on the command line. + :returns: A pytest config object. + + Use in conftest plugins + ======================= + + This hook is not called for conftest files. + """ + + +def pytest_load_initial_conftests( + early_config: Config, parser: Parser, args: list[str] +) -> None: + """Called to implement the loading of :ref:`initial conftest files + ` ahead of command line option parsing. + + :param early_config: The pytest config object. + :param args: Arguments passed on the command line. + :param parser: To add command line options. + + Use in conftest plugins + ======================= + + This hook is not called for conftest files. + """ + + +@hookspec(firstresult=True) +def pytest_cmdline_main(config: Config) -> ExitCode | int | None: + """Called for performing the main command line action. + + The default implementation will invoke the configure hooks and + :hook:`pytest_runtestloop`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :returns: The exit code. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +# ------------------------------------------------------------------------- +# collection hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_collection(session: Session) -> object | None: + """Perform the collection phase for the given session. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + The default collection phase is this (see individual hooks for full details): + + 1. Starting from ``session`` as the initial collector: + + 1. ``pytest_collectstart(collector)`` + 2. ``report = pytest_make_collect_report(collector)`` + 3. ``pytest_exception_interact(collector, call, report)`` if an interactive exception occurred + 4. For each collected node: + + 1. If an item, ``pytest_itemcollected(item)`` + 2. If a collector, recurse into it. + + 5. ``pytest_collectreport(report)`` + + 2. ``pytest_collection_modifyitems(session, config, items)`` + + 1. ``pytest_deselected(items)`` for any deselected items (may be called multiple times) + + 3. ``pytest_collection_finish(session)`` + 4. Set ``session.items`` to the list of collected items + 5. Set ``session.testscollected`` to the number of collected items + + You can implement this hook to only perform some action before collection, + for example the terminal plugin uses it to start displaying the collection + counter (and returns `None`). + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +def pytest_collection_modifyitems( + session: Session, config: Config, items: list[Item] +) -> None: + """Called after collection has been performed. May filter or re-order + the items in-place. + + When items are deselected (filtered out from ``items``), + the hook :hook:`pytest_deselected` must be called explicitly + with the deselected items to properly notify other plugins, + e.g. with ``config.hook.pytest_deselected(items=deselected_items)``. + + :param session: The pytest session object. + :param config: The pytest config object. + :param items: List of item objects. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_collection_finish(session: Session) -> None: + """Called after collection has been performed and modified. + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec( + firstresult=True, + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="collection_path" + ), + }, +) +def pytest_ignore_collect( + collection_path: Path, path: LEGACY_PATH, config: Config +) -> bool | None: + """Return ``True`` to ignore this path for collection. + + Return ``None`` to let other plugins ignore the path for collection. + + Returning ``False`` will forcefully *not* ignore this path for collection, + without giving a chance for other plugins to ignore this path. + + This hook is consulted for all files and directories prior to calling + more specific hooks. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collection_path: The path to analyze. + :type collection_path: pathlib.Path + :param path: The path to analyze (deprecated). + :param config: The pytest config object. + + .. versionchanged:: 7.0.0 + The ``collection_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collection path, only + conftest files in parent directories of the collection path are consulted + (if the path is a directory, its own conftest file is *not* consulted - a + directory cannot ignore itself!). + """ + + +@hookspec(firstresult=True) +def pytest_collect_directory(path: Path, parent: Collector) -> Collector | None: + """Create a :class:`~pytest.Collector` for the given directory, or None if + not relevant. + + .. versionadded:: 8.0 + + For best results, the returned collector should be a subclass of + :class:`~pytest.Directory`, but this is not required. + + The new node needs to have the specified ``parent`` as a parent. + + Stops at first non-None result, see :ref:`firstresult`. + + :param path: The path to analyze. + :type path: pathlib.Path + + See :ref:`custom directory collectors` for a simple example of use of this + hook. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collection path, only + conftest files in parent directories of the collection path are consulted + (if the path is a directory, its own conftest file is *not* consulted - a + directory cannot collect itself!). + """ + + +@hookspec( + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="file_path" + ), + }, +) +def pytest_collect_file( + file_path: Path, path: LEGACY_PATH, parent: Collector +) -> Collector | None: + """Create a :class:`~pytest.Collector` for the given path, or None if not relevant. + + For best results, the returned collector should be a subclass of + :class:`~pytest.File`, but this is not required. + + The new node needs to have the specified ``parent`` as a parent. + + :param file_path: The path to analyze. + :type file_path: pathlib.Path + :param path: The path to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``file_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given file path, only + conftest files in parent directories of the file path are consulted. + """ + + +# logging hooks for collection + + +def pytest_collectstart(collector: Collector) -> None: + """Collector starts collecting. + + :param collector: + The collector. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +def pytest_itemcollected(item: Item) -> None: + """We just collected a test item. + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_collectreport(report: CollectReport) -> None: + """Collector finished collecting. + + :param report: + The collect report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +def pytest_deselected(items: Sequence[Item]) -> None: + """Called for deselected test items, e.g. by keyword. + + Note that this hook has two integration aspects for plugins: + + - it can be *implemented* to be notified of deselected items + - it must be *called* from :hook:`pytest_collection_modifyitems` + implementations when items are deselected (to properly notify other plugins). + + May be called multiple times. + + :param items: + The items. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_make_collect_report(collector: Collector) -> CollectReport | None: + """Perform :func:`collector.collect() ` and return + a :class:`~pytest.CollectReport`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The collector. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +# ------------------------------------------------------------------------- +# Python test function related hooks +# ------------------------------------------------------------------------- + + +@hookspec( + firstresult=True, + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="module_path" + ), + }, +) +def pytest_pycollect_makemodule( + module_path: Path, path: LEGACY_PATH, parent +) -> Module | None: + """Return a :class:`pytest.Module` collector or None for the given path. + + This hook will be called for each matching test module path. + The :hook:`pytest_collect_file` hook needs to be used if you want to + create test modules for files that do not match as a test module. + + Stops at first non-None result, see :ref:`firstresult`. + + :param module_path: The path of the module to collect. + :type module_path: pathlib.Path + :param path: The path of the module to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``module_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. + + The ``path`` parameter has been deprecated in favor of ``fspath``. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given parent collector, + only conftest files in the collector's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> None | Item | Collector | list[Item | Collector]: + """Return a custom item/collector for a Python object in a module, or None. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The module/class collector. + :param name: + The name of the object in the module/class. + :param obj: + The object. + :returns: + The created items/collectors. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_pyfunc_call(pyfuncitem: Function) -> object | None: + """Call underlying test function. + + Stops at first non-None result, see :ref:`firstresult`. + + :param pyfuncitem: + The function item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only + conftest files in the item's directory and its parent directories + are consulted. + """ + + +def pytest_generate_tests(metafunc: Metafunc) -> None: + """Generate (multiple) parametrized calls to a test function. + + :param metafunc: + The :class:`~pytest.Metafunc` helper for the test function. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given function definition, + only conftest files in the functions's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_make_parametrize_id(config: Config, val: object, argname: str) -> str | None: + """Return a user-friendly string representation of the given ``val`` + that will be used by @pytest.mark.parametrize calls, or None if the hook + doesn't know about ``val``. + + The parameter name is available as ``argname``, if required. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :param val: The parametrized value. + :param argname: The automatic parameter name produced by pytest. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +# ------------------------------------------------------------------------- +# runtest related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_runtestloop(session: Session) -> object | None: + """Perform the main runtest loop (after collection finished). + + The default hook implementation performs the runtest protocol for all items + collected in the session (``session.items``), unless the collection failed + or the ``collectonly`` pytest option is set. + + If at any point :py:func:`pytest.exit` is called, the loop is + terminated immediately. + + If at any point ``session.shouldfail`` or ``session.shouldstop`` are set, the + loop is terminated after the runtest protocol for the current item is finished. + + :param session: The pytest session object. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_protocol(item: Item, nextitem: Item | None) -> object | None: + """Perform the runtest protocol for a single test item. + + The default runtest protocol is this (see individual hooks for full details): + + - ``pytest_runtest_logstart(nodeid, location)`` + + - Setup phase: + - ``call = pytest_runtest_setup(item)`` (wrapped in ``CallInfo(when="setup")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Call phase, if the setup passed and the ``setuponly`` pytest option is not set: + - ``call = pytest_runtest_call(item)`` (wrapped in ``CallInfo(when="call")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Teardown phase: + - ``call = pytest_runtest_teardown(item, nextitem)`` (wrapped in ``CallInfo(when="teardown")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - ``pytest_runtest_logfinish(nodeid, location)`` + + :param item: Test item for which the runtest protocol is performed. + :param nextitem: The scheduled-to-be-next test item (or None if this is the end my friend). + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +def pytest_runtest_logstart(nodeid: str, location: tuple[str, int | None, str]) -> None: + """Called at the start of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_logfinish( + nodeid: str, location: tuple[str, int | None, str] +) -> None: + """Called at the end of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_setup(item: Item) -> None: + """Called to perform the setup phase for a test item. + + The default implementation runs ``setup()`` on ``item`` and all of its + parents (which haven't been setup yet). This includes obtaining the + values of fixtures required by the item (which haven't been obtained + yet). + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_call(item: Item) -> None: + """Called to run the test for test item (the call phase). + + The default implementation calls ``item.runtest()``. + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_teardown(item: Item, nextitem: Item | None) -> None: + """Called to perform the teardown phase for a test item. + + The default implementation runs the finalizers and calls ``teardown()`` + on ``item`` and all of its parents (which need to be torn down). This + includes running the teardown phase of fixtures required by the item (if + they go out of scope). + + :param item: + The item. + :param nextitem: + The scheduled-to-be-next test item (None if no further test item is + scheduled). This argument is used to perform exact teardowns, i.e. + calling just enough finalizers so that nextitem only needs to call + setup functions. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport | None: + """Called to create a :class:`~pytest.TestReport` for each of + the setup, call and teardown runtest phases of a test item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param item: The item. + :param call: The :class:`~pytest.CallInfo` for the phase. + + Stops at first non-None result, see :ref:`firstresult`. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_logreport(report: TestReport) -> None: + """Process the :class:`~pytest.TestReport` produced for each + of the setup, call and teardown runtest phases of an item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_report_to_serializable( + config: Config, + report: CollectReport | TestReport, +) -> dict[str, Any] | None: + """Serialize the given report object into a data structure suitable for + sending over the wire, e.g. converted to JSON. + + :param config: The pytest config object. + :param report: The report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. The exact details may depend + on the plugin which calls the hook. + """ + + +@hookspec(firstresult=True) +def pytest_report_from_serializable( + config: Config, + data: dict[str, Any], +) -> CollectReport | TestReport | None: + """Restore a report object previously serialized with + :hook:`pytest_report_to_serializable`. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. The exact details may depend + on the plugin which calls the hook. + """ + + +# ------------------------------------------------------------------------- +# Fixture related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[Any], request: SubRequest +) -> object | None: + """Perform fixture setup execution. + + :param fixturedef: + The fixture definition object. + :param request: + The fixture request object. + :returns: + The return value of the call to the fixture function. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + If the fixture function returns None, other implementations of + this hook function will continue to be called, according to the + behavior of the :ref:`firstresult` option. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given fixture, only + conftest files in the fixture scope's directory and its parent directories + are consulted. + """ + + +def pytest_fixture_post_finalizer( + fixturedef: FixtureDef[Any], request: SubRequest +) -> None: + """Called after fixture teardown, but before the cache is cleared, so + the fixture result ``fixturedef.cached_result`` is still available (not + ``None``). + + :param fixturedef: + The fixture definition object. + :param request: + The fixture request object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given fixture, only + conftest files in the fixture scope's directory and its parent directories + are consulted. + """ + + +# ------------------------------------------------------------------------- +# test session related hooks +# ------------------------------------------------------------------------- + + +def pytest_sessionstart(session: Session) -> None: + """Called after the ``Session`` object has been created and before performing collection + and entering the run test loop. + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +def pytest_sessionfinish( + session: Session, + exitstatus: int | ExitCode, +) -> None: + """Called after whole test run finished, right before returning the exit status to the system. + + :param session: The pytest session object. + :param exitstatus: The status which pytest will return to the system. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +def pytest_unconfigure(config: Config) -> None: + """Called before test process is exited. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +# ------------------------------------------------------------------------- +# hooks for customizing the assert methods +# ------------------------------------------------------------------------- + + +def pytest_assertrepr_compare( + config: Config, op: str, left: object, right: object +) -> list[str] | None: + """Return explanation for comparisons in failing assert expressions. + + Return None for no custom explanation, otherwise return a list + of strings. The strings will be joined by newlines but any newlines + *in* a string will be escaped. Note that all but the first line will + be indented slightly, the intention is for the first line to be a summary. + + :param config: The pytest config object. + :param op: The operator, e.g. `"=="`, `"!="`, `"not in"`. + :param left: The left operand. + :param right: The right operand. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_assertion_pass(item: Item, lineno: int, orig: str, expl: str) -> None: + """Called whenever an assertion passes. + + .. versionadded:: 5.0 + + Use this hook to do some processing after a passing assertion. + The original assertion information is available in the `orig` string + and the pytest introspected assertion information is available in the + `expl` string. + + This hook must be explicitly enabled by the :confval:`enable_assertion_pass_hook` + configuration option: + + .. tab:: toml + + .. code-block:: toml + + [pytest] + enable_assertion_pass_hook = true + + .. tab:: ini + + .. code-block:: ini + + [pytest] + enable_assertion_pass_hook = true + + You need to **clean the .pyc** files in your project directory and interpreter libraries + when enabling this option, as assertions will require to be re-written. + + :param item: pytest item object of current test. + :param lineno: Line number of the assert statement. + :param orig: String with the original assertion. + :param expl: String with the assert explanation. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing reporting (invoked from _pytest_terminal). +# ------------------------------------------------------------------------- + + +@hookspec( + warn_on_impl_args={ + "startdir": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="startdir", pathlib_path_arg="start_path" + ), + }, +) +def pytest_report_header( # type:ignore[empty-body] + config: Config, start_path: Path, startdir: LEGACY_PATH +) -> str | list[str]: + """Return a string or list of strings to be displayed as header info for terminal reporting. + + :param config: The pytest config object. + :param start_path: The starting dir. + :type start_path: pathlib.Path + :param startdir: The starting dir (deprecated). + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +@hookspec( + warn_on_impl_args={ + "startdir": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="startdir", pathlib_path_arg="start_path" + ), + }, +) +def pytest_report_collectionfinish( # type:ignore[empty-body] + config: Config, + start_path: Path, + startdir: LEGACY_PATH, + items: Sequence[Item], +) -> str | list[str]: + """Return a string or list of strings to be displayed after collection + has finished successfully. + + These strings will be displayed after the standard "collected X items" message. + + .. versionadded:: 3.2 + + :param config: The pytest config object. + :param start_path: The starting dir. + :type start_path: pathlib.Path + :param startdir: The starting dir (deprecated). + :param items: List of pytest items that are going to be executed; this list should not be modified. + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_report_teststatus( # type:ignore[empty-body] + report: CollectReport | TestReport, config: Config +) -> TestShortLogReport | tuple[str, str, str | tuple[str, Mapping[str, bool]]]: + """Return result-category, shortletter and verbose word for status + reporting. + + The result-category is a category in which to count the result, for + example "passed", "skipped", "error" or the empty string. + + The shortletter is shown as testing progresses, for example ".", "s", + "E" or the empty string. + + The verbose word is shown as testing progresses in verbose mode, for + example "PASSED", "SKIPPED", "ERROR" or the empty string. + + pytest may style these implicitly according to the report outcome. + To provide explicit styling, return a tuple for the verbose word, + for example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :param report: The report object whose status is to be returned. + :param config: The pytest config object. + :returns: The test status. + + Stops at first non-None result, see :ref:`firstresult`. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_terminal_summary( + terminalreporter: TerminalReporter, + exitstatus: ExitCode, + config: Config, +) -> None: + """Add a section to terminal summary reporting. + + :param terminalreporter: The internal terminal reporter object. + :param exitstatus: The exit status that will be reported back to the OS. + :param config: The pytest config object. + + .. versionadded:: 4.2 + The ``config`` parameter. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec(historic=True) +def pytest_warning_recorded( + warning_message: warnings.WarningMessage, + when: Literal["config", "collect", "runtest"], + nodeid: str, + location: tuple[str, int, str] | None, +) -> None: + """Process a warning captured by the internal pytest warnings plugin. + + :param warning_message: + The captured warning. This is the same object produced by :class:`warnings.catch_warnings`, + and contains the same attributes as the parameters of :py:func:`warnings.showwarning`. + + :param when: + Indicates when the warning was captured. Possible values: + + * ``"config"``: during pytest configuration/initialization stage. + * ``"collect"``: during test collection. + * ``"runtest"``: during test execution. + + :param nodeid: + Full id of the item. Empty string for warnings that are not specific to + a particular node. + + :param location: + When available, holds information about the execution context of the captured + warning (filename, linenumber, function). ``function`` evaluates to + when the execution context is at the module level. + + .. versionadded:: 6.0 + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. If the warning is specific to a + particular node, only conftest files in parent directories of the node are + consulted. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing skipping +# ------------------------------------------------------------------------- + + +def pytest_markeval_namespace( # type:ignore[empty-body] + config: Config, +) -> dict[str, Any]: + """Called when constructing the globals dictionary used for + evaluating string conditions in xfail/skipif markers. + + This is useful when the condition for a marker requires + objects that are expensive or impossible to obtain during + collection time, which is required by normal boolean + conditions. + + .. versionadded:: 6.2 + + :param config: The pytest config object. + :returns: A dictionary of additional globals to add. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in parent directories of the item are consulted. + """ + + +# ------------------------------------------------------------------------- +# error handling and internal debugging hooks +# ------------------------------------------------------------------------- + + +def pytest_internalerror( + excrepr: ExceptionRepr, + excinfo: ExceptionInfo[BaseException], +) -> bool | None: + """Called for internal errors. + + Return True to suppress the fallback handling of printing an + INTERNALERROR message directly to sys.stderr. + + :param excrepr: The exception repr object. + :param excinfo: The exception info. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_keyboard_interrupt( + excinfo: ExceptionInfo[KeyboardInterrupt | Exit], +) -> None: + """Called for keyboard interrupt. + + :param excinfo: The exception info. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_exception_interact( + node: Item | Collector, + call: CallInfo[Any], + report: CollectReport | TestReport, +) -> None: + """Called when an exception was raised which can potentially be + interactively handled. + + May be called during collection (see :hook:`pytest_make_collect_report`), + in which case ``report`` is a :class:`~pytest.CollectReport`. + + May be called during runtest of an item (see :hook:`pytest_runtest_protocol`), + in which case ``report`` is a :class:`~pytest.TestReport`. + + This hook is not called if the exception that was raised is an internal + exception like ``skip.Exception``. + + :param node: + The item or collector. + :param call: + The call information. Contains the exception. + :param report: + The collection or test report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given node, only conftest + files in parent directories of the node are consulted. + """ + + +def pytest_enter_pdb(config: Config, pdb: pdb.Pdb) -> None: + """Called upon pdb.set_trace(). + + Can be used by plugins to take special action just before the python + debugger enters interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_leave_pdb(config: Config, pdb: pdb.Pdb) -> None: + """Called when leaving pdb (e.g. with continue after pdb.set_trace()). + + Can be used by plugins to take special action just after the python + debugger leaves interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/junitxml.py b/Backend/venv/lib/python3.12/site-packages/_pytest/junitxml.py new file mode 100644 index 00000000..ae8d2b94 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/junitxml.py @@ -0,0 +1,695 @@ +# mypy: allow-untyped-defs +"""Report test results in JUnit-XML format, for use with Jenkins and build +integration servers. + +Based on initial code from Ross Lawley. + +Output conforms to +https://github.com/jenkinsci/xunit-plugin/blob/master/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd +""" + +from __future__ import annotations + +from collections.abc import Callable +import functools +import os +import platform +import re +import xml.etree.ElementTree as ET + +from _pytest import nodes +from _pytest import timing +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprFileLocation +from _pytest.config import Config +from _pytest.config import filename_arg +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureRequest +from _pytest.reports import TestReport +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter +import pytest + + +xml_key = StashKey["LogXML"]() + + +def bin_xml_escape(arg: object) -> str: + r"""Visually escape invalid XML characters. + + For example, transforms + 'hello\aworld\b' + into + 'hello#x07world#x08' + Note that the #xABs are *not* XML escapes - missing the ampersand «. + The idea is to escape visually for the user rather than for XML itself. + """ + + def repl(matchobj: re.Match[str]) -> str: + i = ord(matchobj.group()) + if i <= 0xFF: + return f"#x{i:02X}" + else: + return f"#x{i:04X}" + + # The spec range of valid chars is: + # Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] + # For an unknown(?) reason, we disallow #x7F (DEL) as well. + illegal_xml_re = ( + "[^\u0009\u000a\u000d\u0020-\u007e\u0080-\ud7ff\ue000-\ufffd\u10000-\u10ffff]" + ) + return re.sub(illegal_xml_re, repl, str(arg)) + + +def merge_family(left, right) -> None: + result = {} + for kl, vl in left.items(): + for kr, vr in right.items(): + if not isinstance(vl, list): + raise TypeError(type(vl)) + result[kl] = vl + vr + left.update(result) + + +families = { # pylint: disable=dict-init-mutate + "_base": {"testcase": ["classname", "name"]}, + "_base_legacy": {"testcase": ["file", "line", "url"]}, +} +# xUnit 1.x inherits legacy attributes. +families["xunit1"] = families["_base"].copy() +merge_family(families["xunit1"], families["_base_legacy"]) + +# xUnit 2.x uses strict base attributes. +families["xunit2"] = families["_base"] + + +class _NodeReporter: + def __init__(self, nodeid: str | TestReport, xml: LogXML) -> None: + self.id = nodeid + self.xml = xml + self.add_stats = self.xml.add_stats + self.family = self.xml.family + self.duration = 0.0 + self.properties: list[tuple[str, str]] = [] + self.nodes: list[ET.Element] = [] + self.attrs: dict[str, str] = {} + + def append(self, node: ET.Element) -> None: + self.xml.add_stats(node.tag) + self.nodes.append(node) + + def add_property(self, name: str, value: object) -> None: + self.properties.append((str(name), bin_xml_escape(value))) + + def add_attribute(self, name: str, value: object) -> None: + self.attrs[str(name)] = bin_xml_escape(value) + + def make_properties_node(self) -> ET.Element | None: + """Return a Junit node containing custom properties, if any.""" + if self.properties: + properties = ET.Element("properties") + for name, value in self.properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None + + def record_testreport(self, testreport: TestReport) -> None: + names = mangle_test_address(testreport.nodeid) + existing_attrs = self.attrs + classnames = names[:-1] + if self.xml.prefix: + classnames.insert(0, self.xml.prefix) + attrs: dict[str, str] = { + "classname": ".".join(classnames), + "name": bin_xml_escape(names[-1]), + "file": testreport.location[0], + } + if testreport.location[1] is not None: + attrs["line"] = str(testreport.location[1]) + if hasattr(testreport, "url"): + attrs["url"] = testreport.url + self.attrs = attrs + self.attrs.update(existing_attrs) # Restore any user-defined attributes. + + # Preserve legacy testcase behavior. + if self.family == "xunit1": + return + + # Filter out attributes not permitted by this test family. + # Including custom attributes because they are not valid here. + temp_attrs = {} + for key in self.attrs: + if key in families[self.family]["testcase"]: + temp_attrs[key] = self.attrs[key] + self.attrs = temp_attrs + + def to_xml(self) -> ET.Element: + testcase = ET.Element("testcase", self.attrs, time=f"{self.duration:.3f}") + properties = self.make_properties_node() + if properties is not None: + testcase.append(properties) + testcase.extend(self.nodes) + return testcase + + def _add_simple(self, tag: str, message: str, data: str | None = None) -> None: + node = ET.Element(tag, message=message) + node.text = bin_xml_escape(data) + self.append(node) + + def write_captured_output(self, report: TestReport) -> None: + if not self.xml.log_passing_tests and report.passed: + return + + content_out = report.capstdout + content_log = report.caplog + content_err = report.capstderr + if self.xml.logging == "no": + return + content_all = "" + if self.xml.logging in ["log", "all"]: + content_all = self._prepare_content(content_log, " Captured Log ") + if self.xml.logging in ["system-out", "out-err", "all"]: + content_all += self._prepare_content(content_out, " Captured Out ") + self._write_content(report, content_all, "system-out") + content_all = "" + if self.xml.logging in ["system-err", "out-err", "all"]: + content_all += self._prepare_content(content_err, " Captured Err ") + self._write_content(report, content_all, "system-err") + content_all = "" + if content_all: + self._write_content(report, content_all, "system-out") + + def _prepare_content(self, content: str, header: str) -> str: + return "\n".join([header.center(80, "-"), content, ""]) + + def _write_content(self, report: TestReport, content: str, jheader: str) -> None: + tag = ET.Element(jheader) + tag.text = bin_xml_escape(content) + self.append(tag) + + def append_pass(self, report: TestReport) -> None: + self.add_stats("passed") + + def append_failure(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + if hasattr(report, "wasxfail"): + self._add_simple("skipped", "xfail-marked test passes unexpectedly") + else: + assert report.longrepr is not None + reprcrash: ReprFileLocation | None = getattr( + report.longrepr, "reprcrash", None + ) + if reprcrash is not None: + message = reprcrash.message + else: + message = str(report.longrepr) + message = bin_xml_escape(message) + self._add_simple("failure", message, str(report.longrepr)) + + def append_collect_error(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + assert report.longrepr is not None + self._add_simple("error", "collection failure", str(report.longrepr)) + + def append_collect_skipped(self, report: TestReport) -> None: + self._add_simple("skipped", "collection skipped", str(report.longrepr)) + + def append_error(self, report: TestReport) -> None: + assert report.longrepr is not None + reprcrash: ReprFileLocation | None = getattr(report.longrepr, "reprcrash", None) + if reprcrash is not None: + reason = reprcrash.message + else: + reason = str(report.longrepr) + + if report.when == "teardown": + msg = f'failed on teardown with "{reason}"' + else: + msg = f'failed on setup with "{reason}"' + self._add_simple("error", bin_xml_escape(msg), str(report.longrepr)) + + def append_skipped(self, report: TestReport) -> None: + if hasattr(report, "wasxfail"): + xfailreason = report.wasxfail + if xfailreason.startswith("reason: "): + xfailreason = xfailreason[8:] + xfailreason = bin_xml_escape(xfailreason) + skipped = ET.Element("skipped", type="pytest.xfail", message=xfailreason) + self.append(skipped) + else: + assert isinstance(report.longrepr, tuple) + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = skipreason[9:] + details = f"{filename}:{lineno}: {skipreason}" + + skipped = ET.Element( + "skipped", type="pytest.skip", message=bin_xml_escape(skipreason) + ) + skipped.text = bin_xml_escape(details) + self.append(skipped) + self.write_captured_output(report) + + def finalize(self) -> None: + data = self.to_xml() + self.__dict__.clear() + # Type ignored because mypy doesn't like overriding a method. + # Also the return value doesn't match... + self.to_xml = lambda: data # type: ignore[method-assign] + + +def _warn_incompatibility_with_xunit2( + request: FixtureRequest, fixture_name: str +) -> None: + """Emit a PytestWarning about the given fixture being incompatible with newer xunit revisions.""" + from _pytest.warning_types import PytestWarning + + xml = request.config.stash.get(xml_key, None) + if xml is not None and xml.family not in ("xunit1", "legacy"): + request.node.warn( + PytestWarning( + f"{fixture_name} is incompatible with junit_family '{xml.family}' (use 'legacy' or 'xunit1')" + ) + ) + + +@pytest.fixture +def record_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra properties to the calling test. + + User properties become part of the test report and are available to the + configured reporters, like JUnit XML. + + The fixture is callable with ``name, value``. The value is automatically + XML-encoded. + + Example:: + + def test_function(record_property): + record_property("example_key", 1) + """ + _warn_incompatibility_with_xunit2(request, "record_property") + + def append_property(name: str, value: object) -> None: + request.node.user_properties.append((name, value)) + + return append_property + + +@pytest.fixture +def record_xml_attribute(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra xml attributes to the tag for the calling test. + + The fixture is callable with ``name, value``. The value is + automatically XML-encoded. + """ + from _pytest.warning_types import PytestExperimentalApiWarning + + request.node.warn( + PytestExperimentalApiWarning("record_xml_attribute is an experimental feature") + ) + + _warn_incompatibility_with_xunit2(request, "record_xml_attribute") + + # Declare noop + def add_attr_noop(name: str, value: object) -> None: + pass + + attr_func = add_attr_noop + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + node_reporter = xml.node_reporter(request.node.nodeid) + attr_func = node_reporter.add_attribute + + return attr_func + + +def _check_record_param_type(param: str, v: str) -> None: + """Used by record_testsuite_property to check that the given parameter name is of the proper + type.""" + __tracebackhide__ = True + if not isinstance(v, str): + msg = "{param} parameter needs to be a string, but {g} given" # type: ignore[unreachable] + raise TypeError(msg.format(param=param, g=type(v).__name__)) + + +@pytest.fixture(scope="session") +def record_testsuite_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Record a new ```` tag as child of the root ````. + + This is suitable to writing global information regarding the entire test + suite, and is compatible with ``xunit2`` JUnit family. + + This is a ``session``-scoped fixture which is called with ``(name, value)``. Example: + + .. code-block:: python + + def test_foo(record_testsuite_property): + record_testsuite_property("ARCH", "PPC") + record_testsuite_property("STORAGE_TYPE", "CEPH") + + :param name: + The property name. + :param value: + The property value. Will be converted to a string. + + .. warning:: + + Currently this fixture **does not work** with the + `pytest-xdist `__ plugin. See + :issue:`7767` for details. + """ + __tracebackhide__ = True + + def record_func(name: str, value: object) -> None: + """No-op function in case --junit-xml was not passed in the command-line.""" + __tracebackhide__ = True + _check_record_param_type("name", name) + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + record_func = xml.add_global_property + return record_func + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group.addoption( + "--junitxml", + "--junit-xml", + action="store", + dest="xmlpath", + metavar="path", + type=functools.partial(filename_arg, optname="--junitxml"), + default=None, + help="Create junit-xml style report file at given path", + ) + group.addoption( + "--junitprefix", + "--junit-prefix", + action="store", + metavar="str", + default=None, + help="Prepend prefix to classnames in junit-xml output", + ) + parser.addini( + "junit_suite_name", "Test suite name for JUnit report", default="pytest" + ) + parser.addini( + "junit_logging", + "Write captured log messages to JUnit report: " + "one of no|log|system-out|system-err|out-err|all", + default="no", + ) + parser.addini( + "junit_log_passing_tests", + "Capture log information for passing tests to JUnit report: ", + type="bool", + default=True, + ) + parser.addini( + "junit_duration_report", + "Duration time to report: one of total|call", + default="total", + ) # choices=['total', 'call']) + parser.addini( + "junit_family", + "Emit XML for schema: one of legacy|xunit1|xunit2", + default="xunit2", + ) + + +def pytest_configure(config: Config) -> None: + xmlpath = config.option.xmlpath + # Prevent opening xmllog on worker nodes (xdist). + if xmlpath and not hasattr(config, "workerinput"): + junit_family = config.getini("junit_family") + config.stash[xml_key] = LogXML( + xmlpath, + config.option.junitprefix, + config.getini("junit_suite_name"), + config.getini("junit_logging"), + config.getini("junit_duration_report"), + junit_family, + config.getini("junit_log_passing_tests"), + ) + config.pluginmanager.register(config.stash[xml_key]) + + +def pytest_unconfigure(config: Config) -> None: + xml = config.stash.get(xml_key, None) + if xml: + del config.stash[xml_key] + config.pluginmanager.unregister(xml) + + +def mangle_test_address(address: str) -> list[str]: + path, possible_open_bracket, params = address.partition("[") + names = path.split("::") + # Convert file path to dotted path. + names[0] = names[0].replace(nodes.SEP, ".") + names[0] = re.sub(r"\.py$", "", names[0]) + # Put any params back. + names[-1] += possible_open_bracket + params + return names + + +class LogXML: + def __init__( + self, + logfile, + prefix: str | None, + suite_name: str = "pytest", + logging: str = "no", + report_duration: str = "total", + family="xunit1", + log_passing_tests: bool = True, + ) -> None: + logfile = os.path.expanduser(os.path.expandvars(logfile)) + self.logfile = os.path.normpath(os.path.abspath(logfile)) + self.prefix = prefix + self.suite_name = suite_name + self.logging = logging + self.log_passing_tests = log_passing_tests + self.report_duration = report_duration + self.family = family + self.stats: dict[str, int] = dict.fromkeys( + ["error", "passed", "failure", "skipped"], 0 + ) + self.node_reporters: dict[tuple[str | TestReport, object], _NodeReporter] = {} + self.node_reporters_ordered: list[_NodeReporter] = [] + self.global_properties: list[tuple[str, str]] = [] + + # List of reports that failed on call but teardown is pending. + self.open_reports: list[TestReport] = [] + self.cnt_double_fail_tests = 0 + + # Replaces convenience family with real family. + if self.family == "legacy": + self.family = "xunit1" + + def finalize(self, report: TestReport) -> None: + nodeid = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + reporter = self.node_reporters.pop((nodeid, workernode)) + + for propname, propvalue in report.user_properties: + reporter.add_property(propname, str(propvalue)) + + if reporter is not None: + reporter.finalize() + + def node_reporter(self, report: TestReport | str) -> _NodeReporter: + nodeid: str | TestReport = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + + key = nodeid, workernode + + if key in self.node_reporters: + # TODO: breaks for --dist=each + return self.node_reporters[key] + + reporter = _NodeReporter(nodeid, self) + + self.node_reporters[key] = reporter + self.node_reporters_ordered.append(reporter) + + return reporter + + def add_stats(self, key: str) -> None: + if key in self.stats: + self.stats[key] += 1 + + def _opentestcase(self, report: TestReport) -> _NodeReporter: + reporter = self.node_reporter(report) + reporter.record_testreport(report) + return reporter + + def pytest_runtest_logreport(self, report: TestReport) -> None: + """Handle a setup/call/teardown report, generating the appropriate + XML tags as necessary. + + Note: due to plugins like xdist, this hook may be called in interlaced + order with reports from other nodes. For example: + + Usual call order: + -> setup node1 + -> call node1 + -> teardown node1 + -> setup node2 + -> call node2 + -> teardown node2 + + Possible call order in xdist: + -> setup node1 + -> call node1 + -> setup node2 + -> call node2 + -> teardown node2 + -> teardown node1 + """ + close_report = None + if report.passed: + if report.when == "call": # ignore setup/teardown + reporter = self._opentestcase(report) + reporter.append_pass(report) + elif report.failed: + if report.when == "teardown": + # The following vars are needed when xdist plugin is used. + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + # We need to open new testcase in case we have failure in + # call and error in teardown in order to follow junit + # schema. + self.finalize(close_report) + self.cnt_double_fail_tests += 1 + reporter = self._opentestcase(report) + if report.when == "call": + reporter.append_failure(report) + self.open_reports.append(report) + if not self.log_passing_tests: + reporter.write_captured_output(report) + else: + reporter.append_error(report) + elif report.skipped: + reporter = self._opentestcase(report) + reporter.append_skipped(report) + self.update_testcase_duration(report) + if report.when == "teardown": + reporter = self._opentestcase(report) + reporter.write_captured_output(report) + + self.finalize(report) + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + self.open_reports.remove(close_report) + + def update_testcase_duration(self, report: TestReport) -> None: + """Accumulate total duration for nodeid from given report and update + the Junit.testcase with the new total if already created.""" + if self.report_duration in {"total", report.when}: + reporter = self.node_reporter(report) + reporter.duration += getattr(report, "duration", 0.0) + + def pytest_collectreport(self, report: TestReport) -> None: + if not report.passed: + reporter = self._opentestcase(report) + if report.failed: + reporter.append_collect_error(report) + else: + reporter.append_collect_skipped(report) + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> None: + reporter = self.node_reporter("internal") + reporter.attrs.update(classname="pytest", name="internal") + reporter._add_simple("error", "internal error", str(excrepr)) + + def pytest_sessionstart(self) -> None: + self.suite_start = timing.Instant() + + def pytest_sessionfinish(self) -> None: + dirname = os.path.dirname(os.path.abspath(self.logfile)) + # exist_ok avoids filesystem race conditions between checking path existence and requesting creation + os.makedirs(dirname, exist_ok=True) + + with open(self.logfile, "w", encoding="utf-8") as logfile: + duration = self.suite_start.elapsed() + + numtests = ( + self.stats["passed"] + + self.stats["failure"] + + self.stats["skipped"] + + self.stats["error"] + - self.cnt_double_fail_tests + ) + logfile.write('') + + suite_node = ET.Element( + "testsuite", + name=self.suite_name, + errors=str(self.stats["error"]), + failures=str(self.stats["failure"]), + skipped=str(self.stats["skipped"]), + tests=str(numtests), + time=f"{duration.seconds:.3f}", + timestamp=self.suite_start.as_utc().astimezone().isoformat(), + hostname=platform.node(), + ) + global_properties = self._get_global_properties_node() + if global_properties is not None: + suite_node.append(global_properties) + for node_reporter in self.node_reporters_ordered: + suite_node.append(node_reporter.to_xml()) + testsuites = ET.Element("testsuites") + testsuites.set("name", "pytest tests") + testsuites.append(suite_node) + logfile.write(ET.tostring(testsuites, encoding="unicode")) + + def pytest_terminal_summary( + self, terminalreporter: TerminalReporter, config: pytest.Config + ) -> None: + if config.get_verbosity() >= 0: + terminalreporter.write_sep("-", f"generated xml file: {self.logfile}") + + def add_global_property(self, name: str, value: object) -> None: + __tracebackhide__ = True + _check_record_param_type("name", name) + self.global_properties.append((name, bin_xml_escape(value))) + + def _get_global_properties_node(self) -> ET.Element | None: + """Return a Junit node containing custom properties, if any.""" + if self.global_properties: + properties = ET.Element("properties") + for name, value in self.global_properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/legacypath.py b/Backend/venv/lib/python3.12/site-packages/_pytest/legacypath.py new file mode 100644 index 00000000..59e8ef6e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/legacypath.py @@ -0,0 +1,468 @@ +# mypy: allow-untyped-defs +"""Add backward compatibility support for the legacy py path type.""" + +from __future__ import annotations + +import dataclasses +from pathlib import Path +import shlex +import subprocess +from typing import Final +from typing import final +from typing import TYPE_CHECKING + +from iniconfig import SectionWrapper + +from _pytest.cacheprovider import Cache +from _pytest.compat import LEGACY_PATH +from _pytest.compat import legacy_path +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pytester import HookRecorder +from _pytest.pytester import Pytester +from _pytest.pytester import RunResult +from _pytest.terminal import TerminalReporter +from _pytest.tmpdir import TempPathFactory + + +if TYPE_CHECKING: + import pexpect + + +@final +class Testdir: + """ + Similar to :class:`Pytester`, but this class works with legacy legacy_path objects instead. + + All methods just forward to an internal :class:`Pytester` instance, converting results + to `legacy_path` objects as necessary. + """ + + __test__ = False + + CLOSE_STDIN: Final = Pytester.CLOSE_STDIN + TimeoutExpired: Final = Pytester.TimeoutExpired + + def __init__(self, pytester: Pytester, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._pytester = pytester + + @property + def tmpdir(self) -> LEGACY_PATH: + """Temporary directory where tests are executed.""" + return legacy_path(self._pytester.path) + + @property + def test_tmproot(self) -> LEGACY_PATH: + return legacy_path(self._pytester._test_tmproot) + + @property + def request(self): + return self._pytester._request + + @property + def plugins(self): + return self._pytester.plugins + + @plugins.setter + def plugins(self, plugins): + self._pytester.plugins = plugins + + @property + def monkeypatch(self) -> MonkeyPatch: + return self._pytester._monkeypatch + + def make_hook_recorder(self, pluginmanager) -> HookRecorder: + """See :meth:`Pytester.make_hook_recorder`.""" + return self._pytester.make_hook_recorder(pluginmanager) + + def chdir(self) -> None: + """See :meth:`Pytester.chdir`.""" + return self._pytester.chdir() + + def finalize(self) -> None: + return self._pytester._finalize() + + def makefile(self, ext, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makefile`.""" + if ext and not ext.startswith("."): + # pytester.makefile is going to throw a ValueError in a way that + # testdir.makefile did not, because + # pathlib.Path is stricter suffixes than py.path + # This ext arguments is likely user error, but since testdir has + # allowed this, we will prepend "." as a workaround to avoid breaking + # testdir usage that worked before + ext = "." + ext + return legacy_path(self._pytester.makefile(ext, *args, **kwargs)) + + def makeconftest(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeconftest`.""" + return legacy_path(self._pytester.makeconftest(source)) + + def makeini(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeini`.""" + return legacy_path(self._pytester.makeini(source)) + + def getinicfg(self, source: str) -> SectionWrapper: + """See :meth:`Pytester.getinicfg`.""" + return self._pytester.getinicfg(source) + + def makepyprojecttoml(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makepyprojecttoml`.""" + return legacy_path(self._pytester.makepyprojecttoml(source)) + + def makepyfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makepyfile`.""" + return legacy_path(self._pytester.makepyfile(*args, **kwargs)) + + def maketxtfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.maketxtfile`.""" + return legacy_path(self._pytester.maketxtfile(*args, **kwargs)) + + def syspathinsert(self, path=None) -> None: + """See :meth:`Pytester.syspathinsert`.""" + return self._pytester.syspathinsert(path) + + def mkdir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkdir`.""" + return legacy_path(self._pytester.mkdir(name)) + + def mkpydir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkpydir`.""" + return legacy_path(self._pytester.mkpydir(name)) + + def copy_example(self, name=None) -> LEGACY_PATH: + """See :meth:`Pytester.copy_example`.""" + return legacy_path(self._pytester.copy_example(name)) + + def getnode(self, config: Config, arg) -> Item | Collector | None: + """See :meth:`Pytester.getnode`.""" + return self._pytester.getnode(config, arg) + + def getpathnode(self, path): + """See :meth:`Pytester.getpathnode`.""" + return self._pytester.getpathnode(path) + + def genitems(self, colitems: list[Item | Collector]) -> list[Item]: + """See :meth:`Pytester.genitems`.""" + return self._pytester.genitems(colitems) + + def runitem(self, source): + """See :meth:`Pytester.runitem`.""" + return self._pytester.runitem(source) + + def inline_runsource(self, source, *cmdlineargs): + """See :meth:`Pytester.inline_runsource`.""" + return self._pytester.inline_runsource(source, *cmdlineargs) + + def inline_genitems(self, *args): + """See :meth:`Pytester.inline_genitems`.""" + return self._pytester.inline_genitems(*args) + + def inline_run(self, *args, plugins=(), no_reraise_ctrlc: bool = False): + """See :meth:`Pytester.inline_run`.""" + return self._pytester.inline_run( + *args, plugins=plugins, no_reraise_ctrlc=no_reraise_ctrlc + ) + + def runpytest_inprocess(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest_inprocess`.""" + return self._pytester.runpytest_inprocess(*args, **kwargs) + + def runpytest(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest`.""" + return self._pytester.runpytest(*args, **kwargs) + + def parseconfig(self, *args) -> Config: + """See :meth:`Pytester.parseconfig`.""" + return self._pytester.parseconfig(*args) + + def parseconfigure(self, *args) -> Config: + """See :meth:`Pytester.parseconfigure`.""" + return self._pytester.parseconfigure(*args) + + def getitem(self, source, funcname="test_func"): + """See :meth:`Pytester.getitem`.""" + return self._pytester.getitem(source, funcname) + + def getitems(self, source): + """See :meth:`Pytester.getitems`.""" + return self._pytester.getitems(source) + + def getmodulecol(self, source, configargs=(), withinit=False): + """See :meth:`Pytester.getmodulecol`.""" + return self._pytester.getmodulecol( + source, configargs=configargs, withinit=withinit + ) + + def collect_by_name(self, modcol: Collector, name: str) -> Item | Collector | None: + """See :meth:`Pytester.collect_by_name`.""" + return self._pytester.collect_by_name(modcol, name) + + def popen( + self, + cmdargs, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=CLOSE_STDIN, + **kw, + ): + """See :meth:`Pytester.popen`.""" + return self._pytester.popen(cmdargs, stdout, stderr, stdin, **kw) + + def run(self, *cmdargs, timeout=None, stdin=CLOSE_STDIN) -> RunResult: + """See :meth:`Pytester.run`.""" + return self._pytester.run(*cmdargs, timeout=timeout, stdin=stdin) + + def runpython(self, script) -> RunResult: + """See :meth:`Pytester.runpython`.""" + return self._pytester.runpython(script) + + def runpython_c(self, command): + """See :meth:`Pytester.runpython_c`.""" + return self._pytester.runpython_c(command) + + def runpytest_subprocess(self, *args, timeout=None) -> RunResult: + """See :meth:`Pytester.runpytest_subprocess`.""" + return self._pytester.runpytest_subprocess(*args, timeout=timeout) + + def spawn_pytest(self, string: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """See :meth:`Pytester.spawn_pytest`.""" + return self._pytester.spawn_pytest(string, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """See :meth:`Pytester.spawn`.""" + return self._pytester.spawn(cmd, expect_timeout=expect_timeout) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return str(self.tmpdir) + + +class LegacyTestdirPlugin: + @staticmethod + @fixture + def testdir(pytester: Pytester) -> Testdir: + """ + Identical to :fixture:`pytester`, and provides an instance whose methods return + legacy ``LEGACY_PATH`` objects instead when applicable. + + New code should avoid using :fixture:`testdir` in favor of :fixture:`pytester`. + """ + return Testdir(pytester, _ispytest=True) + + +@final +@dataclasses.dataclass +class TempdirFactory: + """Backward compatibility wrapper that implements ``py.path.local`` + for :class:`TempPathFactory`. + + .. note:: + These days, it is preferred to use ``tmp_path_factory``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + """ + + _tmppath_factory: TempPathFactory + + def __init__( + self, tmppath_factory: TempPathFactory, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._tmppath_factory = tmppath_factory + + def mktemp(self, basename: str, numbered: bool = True) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.mktemp`, but returns a ``py.path.local`` object.""" + return legacy_path(self._tmppath_factory.mktemp(basename, numbered).resolve()) + + def getbasetemp(self) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.getbasetemp`, but returns a ``py.path.local`` object.""" + return legacy_path(self._tmppath_factory.getbasetemp().resolve()) + + +class LegacyTmpdirPlugin: + @staticmethod + @fixture(scope="session") + def tmpdir_factory(request: FixtureRequest) -> TempdirFactory: + """Return a :class:`pytest.TempdirFactory` instance for the test session.""" + # Set dynamically by pytest_configure(). + return request.config._tmpdirhandler # type: ignore + + @staticmethod + @fixture + def tmpdir(tmp_path: Path) -> LEGACY_PATH: + """Return a temporary directory (as `legacy_path`_ object) + which is unique to each test function invocation. + The temporary directory is created as a subdirectory + of the base temporary directory, with configurable retention, + as discussed in :ref:`temporary directory location and retention`. + + .. note:: + These days, it is preferred to use ``tmp_path``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + .. _legacy_path: https://py.readthedocs.io/en/latest/path.html + """ + return legacy_path(tmp_path) + + +def Cache_makedir(self: Cache, name: str) -> LEGACY_PATH: + """Return a directory path object with the given name. + + Same as :func:`mkdir`, but returns a legacy py path instance. + """ + return legacy_path(self.mkdir(name)) + + +def FixtureRequest_fspath(self: FixtureRequest) -> LEGACY_PATH: + """(deprecated) The file system path of the test module which collected this test.""" + return legacy_path(self.path) + + +def TerminalReporter_startdir(self: TerminalReporter) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config_invocation_dir(self: Config) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use :attr:`invocation_params.dir `, + which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.invocation_params.dir)) + + +def Config_rootdir(self: Config) -> LEGACY_PATH: + """The path to the :ref:`rootdir `. + + Prefer to use :attr:`rootpath`, which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.rootpath)) + + +def Config_inifile(self: Config) -> LEGACY_PATH | None: + """The path to the :ref:`configfile `. + + Prefer to use :attr:`inipath`, which is a :class:`pathlib.Path`. + + :type: Optional[LEGACY_PATH] + """ + return legacy_path(str(self.inipath)) if self.inipath else None + + +def Session_startdir(self: Session) -> LEGACY_PATH: + """The path from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config__getini_unknown_type(self, name: str, type: str, value: str | list[str]): + if type == "pathlist": + # TODO: This assert is probably not valid in all cases. + assert self.inipath is not None + dp = self.inipath.parent + input_values = shlex.split(value) if isinstance(value, str) else value + return [legacy_path(str(dp / x)) for x in input_values] + else: + raise ValueError(f"unknown configuration type: {type}", value) + + +def Node_fspath(self: Node) -> LEGACY_PATH: + """(deprecated) returns a legacy_path copy of self.path""" + return legacy_path(self.path) + + +def Node_fspath_set(self: Node, value: LEGACY_PATH) -> None: + self.path = Path(value) + + +@hookimpl(tryfirst=True) +def pytest_load_initial_conftests(early_config: Config) -> None: + """Monkeypatch legacy path attributes in several classes, as early as possible.""" + mp = MonkeyPatch() + early_config.add_cleanup(mp.undo) + + # Add Cache.makedir(). + mp.setattr(Cache, "makedir", Cache_makedir, raising=False) + + # Add FixtureRequest.fspath property. + mp.setattr(FixtureRequest, "fspath", property(FixtureRequest_fspath), raising=False) + + # Add TerminalReporter.startdir property. + mp.setattr( + TerminalReporter, "startdir", property(TerminalReporter_startdir), raising=False + ) + + # Add Config.{invocation_dir,rootdir,inifile} properties. + mp.setattr(Config, "invocation_dir", property(Config_invocation_dir), raising=False) + mp.setattr(Config, "rootdir", property(Config_rootdir), raising=False) + mp.setattr(Config, "inifile", property(Config_inifile), raising=False) + + # Add Session.startdir property. + mp.setattr(Session, "startdir", property(Session_startdir), raising=False) + + # Add pathlist configuration type. + mp.setattr(Config, "_getini_unknown_type", Config__getini_unknown_type) + + # Add Node.fspath property. + mp.setattr(Node, "fspath", property(Node_fspath, Node_fspath_set), raising=False) + + +@hookimpl +def pytest_configure(config: Config) -> None: + """Installs the LegacyTmpdirPlugin if the ``tmpdir`` plugin is also installed.""" + if config.pluginmanager.has_plugin("tmpdir"): + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + # Create TmpdirFactory and attach it to the config object. + # + # This is to comply with existing plugins which expect the handler to be + # available at pytest_configure time, but ideally should be moved entirely + # to the tmpdir_factory session fixture. + try: + tmp_path_factory = config._tmp_path_factory # type: ignore[attr-defined] + except AttributeError: + # tmpdir plugin is blocked. + pass + else: + _tmpdirhandler = TempdirFactory(tmp_path_factory, _ispytest=True) + mp.setattr(config, "_tmpdirhandler", _tmpdirhandler, raising=False) + + config.pluginmanager.register(LegacyTmpdirPlugin, "legacypath-tmpdir") + + +@hookimpl +def pytest_plugin_registered(plugin: object, manager: PytestPluginManager) -> None: + # pytester is not loaded by default and is commonly loaded from a conftest, + # so checking for it in `pytest_configure` is not enough. + is_pytester = plugin is manager.get_plugin("pytester") + if is_pytester and not manager.is_registered(LegacyTestdirPlugin): + manager.register(LegacyTestdirPlugin, "legacypath-pytester") diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/logging.py b/Backend/venv/lib/python3.12/site-packages/_pytest/logging.py new file mode 100644 index 00000000..e4fed579 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/logging.py @@ -0,0 +1,960 @@ +# mypy: allow-untyped-defs +"""Access and control log capturing.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import Set as AbstractSet +from contextlib import contextmanager +from contextlib import nullcontext +from datetime import datetime +from datetime import timedelta +from datetime import timezone +import io +from io import StringIO +import logging +from logging import LogRecord +import os +from pathlib import Path +import re +from types import TracebackType +from typing import final +from typing import Generic +from typing import Literal +from typing import TYPE_CHECKING +from typing import TypeVar + +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.capture import CaptureManager +from _pytest.config import _strtobool +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter + + +if TYPE_CHECKING: + logging_StreamHandler = logging.StreamHandler[StringIO] +else: + logging_StreamHandler = logging.StreamHandler + +DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s" +DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S" +_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m") +caplog_handler_key = StashKey["LogCaptureHandler"]() +caplog_records_key = StashKey[dict[str, list[logging.LogRecord]]]() + + +def _remove_ansi_escape_sequences(text: str) -> str: + return _ANSI_ESCAPE_SEQ.sub("", text) + + +class DatetimeFormatter(logging.Formatter): + """A logging formatter which formats record with + :func:`datetime.datetime.strftime` formatter instead of + :func:`time.strftime` in case of microseconds in format string. + """ + + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: + if datefmt and "%f" in datefmt: + ct = self.converter(record.created) + tz = timezone(timedelta(seconds=ct.tm_gmtoff), ct.tm_zone) + # Construct `datetime.datetime` object from `struct_time` + # and msecs information from `record` + # Using int() instead of round() to avoid it exceeding 1_000_000 and causing a ValueError (#11861). + dt = datetime(*ct[0:6], microsecond=int(record.msecs * 1000), tzinfo=tz) + return dt.strftime(datefmt) + # Use `logging.Formatter` for non-microsecond formats + return super().formatTime(record, datefmt) + + +class ColoredLevelFormatter(DatetimeFormatter): + """A logging formatter which colorizes the %(levelname)..s part of the + log format passed to __init__.""" + + LOGLEVEL_COLOROPTS: Mapping[int, AbstractSet[str]] = { + logging.CRITICAL: {"red"}, + logging.ERROR: {"red", "bold"}, + logging.WARNING: {"yellow"}, + logging.WARN: {"yellow"}, + logging.INFO: {"green"}, + logging.DEBUG: {"purple"}, + logging.NOTSET: set(), + } + LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-.]?\d*(?:\.\d+)?s)") + + def __init__(self, terminalwriter: TerminalWriter, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._terminalwriter = terminalwriter + self._original_fmt = self._style._fmt + self._level_to_fmt_mapping: dict[int, str] = {} + + for level, color_opts in self.LOGLEVEL_COLOROPTS.items(): + self.add_color_level(level, *color_opts) + + def add_color_level(self, level: int, *color_opts: str) -> None: + """Add or update color opts for a log level. + + :param level: + Log level to apply a style to, e.g. ``logging.INFO``. + :param color_opts: + ANSI escape sequence color options. Capitalized colors indicates + background color, i.e. ``'green', 'Yellow', 'bold'`` will give bold + green text on yellow background. + + .. warning:: + This is an experimental API. + """ + assert self._fmt is not None + levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt) + if not levelname_fmt_match: + return + levelname_fmt = levelname_fmt_match.group() + + formatted_levelname = levelname_fmt % {"levelname": logging.getLevelName(level)} + + # add ANSI escape sequences around the formatted levelname + color_kwargs = {name: True for name in color_opts} + colorized_formatted_levelname = self._terminalwriter.markup( + formatted_levelname, **color_kwargs + ) + self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub( + colorized_formatted_levelname, self._fmt + ) + + def format(self, record: logging.LogRecord) -> str: + fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt) + self._style._fmt = fmt + return super().format(record) + + +class PercentStyleMultiline(logging.PercentStyle): + """A logging style with special support for multiline messages. + + If the message of a record consists of multiple lines, this style + formats the message as if each line were logged separately. + """ + + def __init__(self, fmt: str, auto_indent: int | str | bool | None) -> None: + super().__init__(fmt) + self._auto_indent = self._get_auto_indent(auto_indent) + + @staticmethod + def _get_auto_indent(auto_indent_option: int | str | bool | None) -> int: + """Determine the current auto indentation setting. + + Specify auto indent behavior (on/off/fixed) by passing in + extra={"auto_indent": [value]} to the call to logging.log() or + using a --log-auto-indent [value] command line or the + log_auto_indent [value] config option. + + Default behavior is auto-indent off. + + Using the string "True" or "on" or the boolean True as the value + turns auto indent on, using the string "False" or "off" or the + boolean False or the int 0 turns it off, and specifying a + positive integer fixes the indentation position to the value + specified. + + Any other values for the option are invalid, and will silently be + converted to the default. + + :param None|bool|int|str auto_indent_option: + User specified option for indentation from command line, config + or extra kwarg. Accepts int, bool or str. str option accepts the + same range of values as boolean config options, as well as + positive integers represented in str form. + + :returns: + Indentation value, which can be + -1 (automatically determine indentation) or + 0 (auto-indent turned off) or + >0 (explicitly set indentation position). + """ + if auto_indent_option is None: + return 0 + elif isinstance(auto_indent_option, bool): + if auto_indent_option: + return -1 + else: + return 0 + elif isinstance(auto_indent_option, int): + return int(auto_indent_option) + elif isinstance(auto_indent_option, str): + try: + return int(auto_indent_option) + except ValueError: + pass + try: + if _strtobool(auto_indent_option): + return -1 + except ValueError: + return 0 + + return 0 + + def format(self, record: logging.LogRecord) -> str: + if "\n" in record.message: + if hasattr(record, "auto_indent"): + # Passed in from the "extra={}" kwarg on the call to logging.log(). + auto_indent = self._get_auto_indent(record.auto_indent) + else: + auto_indent = self._auto_indent + + if auto_indent: + lines = record.message.splitlines() + formatted = self._fmt % {**record.__dict__, "message": lines[0]} + + if auto_indent < 0: + indentation = _remove_ansi_escape_sequences(formatted).find( + lines[0] + ) + else: + # Optimizes logging by allowing a fixed indentation. + indentation = auto_indent + lines[0] = formatted + return ("\n" + " " * indentation).join(lines) + return self._fmt % record.__dict__ + + +def get_option_ini(config: Config, *names: str): + for name in names: + ret = config.getoption(name) # 'default' arg won't work as expected + if ret is None: + ret = config.getini(name) + if ret: + return ret + + +def pytest_addoption(parser: Parser) -> None: + """Add options to control log capturing.""" + group = parser.getgroup("logging") + + def add_option_ini(option, dest, default=None, type=None, **kwargs): + parser.addini( + dest, default=default, type=type, help="Default value for " + option + ) + group.addoption(option, dest=dest, **kwargs) + + add_option_ini( + "--log-level", + dest="log_level", + default=None, + metavar="LEVEL", + help=( + "Level of messages to catch/display." + " Not set by default, so it depends on the root/parent log handler's" + ' effective level, where it is "WARNING" by default.' + ), + ) + add_option_ini( + "--log-format", + dest="log_format", + default=DEFAULT_LOG_FORMAT, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-date-format", + dest="log_date_format", + default=DEFAULT_LOG_DATE_FORMAT, + help="Log date format used by the logging module", + ) + parser.addini( + "log_cli", + default=False, + type="bool", + help='Enable log display during test run (also known as "live logging")', + ) + add_option_ini( + "--log-cli-level", dest="log_cli_level", default=None, help="CLI logging level" + ) + add_option_ini( + "--log-cli-format", + dest="log_cli_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-cli-date-format", + dest="log_cli_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-file", + dest="log_file", + default=None, + help="Path to a file when logging will be written to", + ) + add_option_ini( + "--log-file-mode", + dest="log_file_mode", + default="w", + choices=["w", "a"], + help="Log file open mode", + ) + add_option_ini( + "--log-file-level", + dest="log_file_level", + default=None, + help="Log file logging level", + ) + add_option_ini( + "--log-file-format", + dest="log_file_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-file-date-format", + dest="log_file_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-auto-indent", + dest="log_auto_indent", + default=None, + help="Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.", + ) + group.addoption( + "--log-disable", + action="append", + default=[], + dest="logger_disable", + help="Disable a logger by name. Can be passed multiple times.", + ) + + +_HandlerType = TypeVar("_HandlerType", bound=logging.Handler) + + +# Not using @contextmanager for performance reasons. +class catching_logs(Generic[_HandlerType]): + """Context manager that prepares the whole logging machinery properly.""" + + __slots__ = ("handler", "level", "orig_level") + + def __init__(self, handler: _HandlerType, level: int | None = None) -> None: + self.handler = handler + self.level = level + + def __enter__(self) -> _HandlerType: + root_logger = logging.getLogger() + if self.level is not None: + self.handler.setLevel(self.level) + root_logger.addHandler(self.handler) + if self.level is not None: + self.orig_level = root_logger.level + root_logger.setLevel(min(self.orig_level, self.level)) + return self.handler + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + root_logger = logging.getLogger() + if self.level is not None: + root_logger.setLevel(self.orig_level) + root_logger.removeHandler(self.handler) + + +class LogCaptureHandler(logging_StreamHandler): + """A logging handler that stores log records and the log text.""" + + def __init__(self) -> None: + """Create a new log handler.""" + super().__init__(StringIO()) + self.records: list[logging.LogRecord] = [] + + def emit(self, record: logging.LogRecord) -> None: + """Keep the log records in a list in addition to the log text.""" + self.records.append(record) + super().emit(record) + + def reset(self) -> None: + self.records = [] + self.stream = StringIO() + + def clear(self) -> None: + self.records.clear() + self.stream = StringIO() + + def handleError(self, record: logging.LogRecord) -> None: + if logging.raiseExceptions: + # Fail the test if the log message is bad (emit failed). + # The default behavior of logging is to print "Logging error" + # to stderr with the call stack and some extra details. + # pytest wants to make such mistakes visible during testing. + raise # noqa: PLE0704 + + +@final +class LogCaptureFixture: + """Provides access and control of log capturing.""" + + def __init__(self, item: nodes.Node, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._item = item + self._initial_handler_level: int | None = None + # Dict of log name -> log level. + self._initial_logger_levels: dict[str | None, int] = {} + self._initial_disabled_logging_level: int | None = None + + def _finalize(self) -> None: + """Finalize the fixture. + + This restores the log levels and the disabled logging levels changed by :meth:`set_level`. + """ + # Restore log levels. + if self._initial_handler_level is not None: + self.handler.setLevel(self._initial_handler_level) + for logger_name, level in self._initial_logger_levels.items(): + logger = logging.getLogger(logger_name) + logger.setLevel(level) + # Disable logging at the original disabled logging level. + if self._initial_disabled_logging_level is not None: + logging.disable(self._initial_disabled_logging_level) + self._initial_disabled_logging_level = None + + @property + def handler(self) -> LogCaptureHandler: + """Get the logging handler used by the fixture.""" + return self._item.stash[caplog_handler_key] + + def get_records( + self, when: Literal["setup", "call", "teardown"] + ) -> list[logging.LogRecord]: + """Get the logging records for one of the possible test phases. + + :param when: + Which test phase to obtain the records from. + Valid values are: "setup", "call" and "teardown". + + :returns: The list of captured records at the given stage. + + .. versionadded:: 3.4 + """ + return self._item.stash[caplog_records_key].get(when, []) + + @property + def text(self) -> str: + """The formatted log text.""" + return _remove_ansi_escape_sequences(self.handler.stream.getvalue()) + + @property + def records(self) -> list[logging.LogRecord]: + """The list of log records.""" + return self.handler.records + + @property + def record_tuples(self) -> list[tuple[str, int, str]]: + """A list of a stripped down version of log records intended + for use in assertion comparison. + + The format of the tuple is: + + (logger_name, log_level, message) + """ + return [(r.name, r.levelno, r.getMessage()) for r in self.records] + + @property + def messages(self) -> list[str]: + """A list of format-interpolated log messages. + + Unlike 'records', which contains the format string and parameters for + interpolation, log messages in this list are all interpolated. + + Unlike 'text', which contains the output from the handler, log + messages in this list are unadorned with levels, timestamps, etc, + making exact comparisons more reliable. + + Note that traceback or stack info (from :func:`logging.exception` or + the `exc_info` or `stack_info` arguments to the logging functions) is + not included, as this is added by the formatter in the handler. + + .. versionadded:: 3.7 + """ + return [r.getMessage() for r in self.records] + + def clear(self) -> None: + """Reset the list of log records and the captured log text.""" + self.handler.clear() + + def _force_enable_logging( + self, level: int | str, logger_obj: logging.Logger + ) -> int: + """Enable the desired logging level if the global level was disabled via ``logging.disabled``. + + Only enables logging levels greater than or equal to the requested ``level``. + + Does nothing if the desired ``level`` wasn't disabled. + + :param level: + The logger level caplog should capture. + All logging is enabled if a non-standard logging level string is supplied. + Valid level strings are in :data:`logging._nameToLevel`. + :param logger_obj: The logger object to check. + + :return: The original disabled logging level. + """ + original_disable_level: int = logger_obj.manager.disable + + if isinstance(level, str): + # Try to translate the level string to an int for `logging.disable()` + level = logging.getLevelName(level) + + if not isinstance(level, int): + # The level provided was not valid, so just un-disable all logging. + logging.disable(logging.NOTSET) + elif not logger_obj.isEnabledFor(level): + # Each level is `10` away from other levels. + # https://docs.python.org/3/library/logging.html#logging-levels + disable_level = max(level - 10, logging.NOTSET) + logging.disable(disable_level) + + return original_disable_level + + def set_level(self, level: int | str, logger: str | None = None) -> None: + """Set the threshold level of a logger for the duration of a test. + + Logging messages which are less severe than this level will not be captured. + + .. versionchanged:: 3.4 + The levels of the loggers changed by this function will be + restored to their initial values at the end of the test. + + Will enable the requested logging level if it was disabled via :func:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + # Save the original log-level to restore it during teardown. + self._initial_logger_levels.setdefault(logger, logger_obj.level) + logger_obj.setLevel(level) + if self._initial_handler_level is None: + self._initial_handler_level = self.handler.level + self.handler.setLevel(level) + initial_disabled_logging_level = self._force_enable_logging(level, logger_obj) + if self._initial_disabled_logging_level is None: + self._initial_disabled_logging_level = initial_disabled_logging_level + + @contextmanager + def at_level(self, level: int | str, logger: str | None = None) -> Generator[None]: + """Context manager that sets the level for capturing of logs. After + the end of the 'with' statement the level is restored to its original + value. + + Will enable the requested logging level if it was disabled via :func:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + orig_level = logger_obj.level + logger_obj.setLevel(level) + handler_orig_level = self.handler.level + self.handler.setLevel(level) + original_disable_level = self._force_enable_logging(level, logger_obj) + try: + yield + finally: + logger_obj.setLevel(orig_level) + self.handler.setLevel(handler_orig_level) + logging.disable(original_disable_level) + + @contextmanager + def filtering(self, filter_: logging.Filter) -> Generator[None]: + """Context manager that temporarily adds the given filter to the caplog's + :meth:`handler` for the 'with' statement block, and removes that filter at the + end of the block. + + :param filter_: A custom :class:`logging.Filter` object. + + .. versionadded:: 7.5 + """ + self.handler.addFilter(filter_) + try: + yield + finally: + self.handler.removeFilter(filter_) + + +@fixture +def caplog(request: FixtureRequest) -> Generator[LogCaptureFixture]: + """Access and control log capturing. + + Captured logs are available through the following properties/methods:: + + * caplog.messages -> list of format-interpolated log messages + * caplog.text -> string containing formatted log output + * caplog.records -> list of logging.LogRecord instances + * caplog.record_tuples -> list of (logger_name, level, message) tuples + * caplog.clear() -> clear captured records and formatted log output string + """ + result = LogCaptureFixture(request.node, _ispytest=True) + yield result + result._finalize() + + +def get_log_level_for_setting(config: Config, *setting_names: str) -> int | None: + for setting_name in setting_names: + log_level = config.getoption(setting_name) + if log_level is None: + log_level = config.getini(setting_name) + if log_level: + break + else: + return None + + if isinstance(log_level, str): + log_level = log_level.upper() + try: + return int(getattr(logging, log_level, log_level)) + except ValueError as e: + # Python logging does not recognise this as a logging level + raise UsageError( + f"'{log_level}' is not recognized as a logging level name for " + f"'{setting_name}'. Please consider passing the " + "logging level num instead." + ) from e + + +# run after terminalreporter/capturemanager are configured +@hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + config.pluginmanager.register(LoggingPlugin(config), "logging-plugin") + + +class LoggingPlugin: + """Attaches to the logging module and captures log messages for each test.""" + + def __init__(self, config: Config) -> None: + """Create a new plugin to capture log messages. + + The formatter can be safely shared across all handlers so + create a single one for the entire test session here. + """ + self._config = config + + # Report logging. + self.formatter = self._create_formatter( + get_option_ini(config, "log_format"), + get_option_ini(config, "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_level = get_log_level_for_setting(config, "log_level") + self.caplog_handler = LogCaptureHandler() + self.caplog_handler.setFormatter(self.formatter) + self.report_handler = LogCaptureHandler() + self.report_handler.setFormatter(self.formatter) + + # File logging. + self.log_file_level = get_log_level_for_setting( + config, "log_file_level", "log_level" + ) + log_file = get_option_ini(config, "log_file") or os.devnull + if log_file != os.devnull: + directory = os.path.dirname(os.path.abspath(log_file)) + if not os.path.isdir(directory): + os.makedirs(directory) + + self.log_file_mode = get_option_ini(config, "log_file_mode") or "w" + self.log_file_handler = _FileHandler( + log_file, mode=self.log_file_mode, encoding="UTF-8" + ) + log_file_format = get_option_ini(config, "log_file_format", "log_format") + log_file_date_format = get_option_ini( + config, "log_file_date_format", "log_date_format" + ) + + log_file_formatter = DatetimeFormatter( + log_file_format, datefmt=log_file_date_format + ) + self.log_file_handler.setFormatter(log_file_formatter) + + # CLI/live logging. + self.log_cli_level = get_log_level_for_setting( + config, "log_cli_level", "log_level" + ) + if self._log_cli_enabled(): + terminal_reporter = config.pluginmanager.get_plugin("terminalreporter") + # Guaranteed by `_log_cli_enabled()`. + assert terminal_reporter is not None + capture_manager = config.pluginmanager.get_plugin("capturemanager") + # if capturemanager plugin is disabled, live logging still works. + self.log_cli_handler: ( + _LiveLoggingStreamHandler | _LiveLoggingNullHandler + ) = _LiveLoggingStreamHandler(terminal_reporter, capture_manager) + else: + self.log_cli_handler = _LiveLoggingNullHandler() + log_cli_formatter = self._create_formatter( + get_option_ini(config, "log_cli_format", "log_format"), + get_option_ini(config, "log_cli_date_format", "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_cli_handler.setFormatter(log_cli_formatter) + self._disable_loggers(loggers_to_disable=config.option.logger_disable) + + def _disable_loggers(self, loggers_to_disable: list[str]) -> None: + if not loggers_to_disable: + return + + for name in loggers_to_disable: + logger = logging.getLogger(name) + logger.disabled = True + + def _create_formatter(self, log_format, log_date_format, auto_indent): + # Color option doesn't exist if terminal plugin is disabled. + color = getattr(self._config.option, "color", "no") + if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search( + log_format + ): + formatter: logging.Formatter = ColoredLevelFormatter( + create_terminal_writer(self._config), log_format, log_date_format + ) + else: + formatter = DatetimeFormatter(log_format, log_date_format) + + formatter._style = PercentStyleMultiline( + formatter._style._fmt, auto_indent=auto_indent + ) + + return formatter + + def set_log_path(self, fname: str) -> None: + """Set the filename parameter for Logging.FileHandler(). + + Creates parent directory if it does not exist. + + .. warning:: + This is an experimental API. + """ + fpath = Path(fname) + + if not fpath.is_absolute(): + fpath = self._config.rootpath / fpath + + if not fpath.parent.exists(): + fpath.parent.mkdir(exist_ok=True, parents=True) + + # https://github.com/python/mypy/issues/11193 + stream: io.TextIOWrapper = fpath.open(mode=self.log_file_mode, encoding="UTF-8") # type: ignore[assignment] + old_stream = self.log_file_handler.setStream(stream) + if old_stream: + old_stream.close() + + def _log_cli_enabled(self) -> bool: + """Return whether live logging is enabled.""" + enabled = self._config.getoption( + "--log-cli-level" + ) is not None or self._config.getini("log_cli") + if not enabled: + return False + + terminal_reporter = self._config.pluginmanager.get_plugin("terminalreporter") + if terminal_reporter is None: + # terminal reporter is disabled e.g. by pytest-xdist. + return False + + return True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_sessionstart(self) -> Generator[None]: + self.log_cli_handler.set_when("sessionstart") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection(self) -> Generator[None]: + self.log_cli_handler.set_when("collection") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtestloop(self, session: Session) -> Generator[None, object, object]: + if session.config.option.collectonly: + return (yield) + + if self._log_cli_enabled() and self._config.get_verbosity() < 1: + # The verbose flag is needed to avoid messy test progress output. + self._config.option.verbose = 1 + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) # Run all the tests. + + @hookimpl + def pytest_runtest_logstart(self) -> None: + self.log_cli_handler.reset() + self.log_cli_handler.set_when("start") + + @hookimpl + def pytest_runtest_logreport(self) -> None: + self.log_cli_handler.set_when("logreport") + + @contextmanager + def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None]: + """Implement the internals of the pytest_runtest_xxx() hooks.""" + with ( + catching_logs( + self.caplog_handler, + level=self.log_level, + ) as caplog_handler, + catching_logs( + self.report_handler, + level=self.log_level, + ) as report_handler, + ): + caplog_handler.reset() + report_handler.reset() + item.stash[caplog_records_key][when] = caplog_handler.records + item.stash[caplog_handler_key] = caplog_handler + + try: + yield + finally: + log = report_handler.stream.getvalue().strip() + item.add_report_section(when, "log", log) + + @hookimpl(wrapper=True) + def pytest_runtest_setup(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("setup") + + empty: dict[str, list[logging.LogRecord]] = {} + item.stash[caplog_records_key] = empty + with self._runtest_for(item, "setup"): + yield + + @hookimpl(wrapper=True) + def pytest_runtest_call(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("call") + + with self._runtest_for(item, "call"): + yield + + @hookimpl(wrapper=True) + def pytest_runtest_teardown(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("teardown") + + try: + with self._runtest_for(item, "teardown"): + yield + finally: + del item.stash[caplog_records_key] + del item.stash[caplog_handler_key] + + @hookimpl + def pytest_runtest_logfinish(self) -> None: + self.log_cli_handler.set_when("finish") + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_sessionfinish(self) -> Generator[None]: + self.log_cli_handler.set_when("sessionfinish") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl + def pytest_unconfigure(self) -> None: + # Close the FileHandler explicitly. + # (logging.shutdown might have lost the weakref?!) + self.log_file_handler.close() + + +class _FileHandler(logging.FileHandler): + """A logging FileHandler with pytest tweaks.""" + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingStreamHandler(logging_StreamHandler): + """A logging StreamHandler used by the live logging feature: it will + write a newline before the first log message in each test. + + During live logging we must also explicitly disable stdout/stderr + capturing otherwise it will get captured and won't appear in the + terminal. + """ + + # Officially stream needs to be a IO[str], but TerminalReporter + # isn't. So force it. + stream: TerminalReporter = None # type: ignore + + def __init__( + self, + terminal_reporter: TerminalReporter, + capture_manager: CaptureManager | None, + ) -> None: + super().__init__(stream=terminal_reporter) # type: ignore[arg-type] + self.capture_manager = capture_manager + self.reset() + self.set_when(None) + self._test_outcome_written = False + + def reset(self) -> None: + """Reset the handler; should be called before the start of each test.""" + self._first_record_emitted = False + + def set_when(self, when: str | None) -> None: + """Prepare for the given test phase (setup/call/teardown).""" + self._when = when + self._section_name_shown = False + if when == "start": + self._test_outcome_written = False + + def emit(self, record: logging.LogRecord) -> None: + ctx_manager = ( + self.capture_manager.global_and_fixture_disabled() + if self.capture_manager + else nullcontext() + ) + with ctx_manager: + if not self._first_record_emitted: + self.stream.write("\n") + self._first_record_emitted = True + elif self._when in ("teardown", "finish"): + if not self._test_outcome_written: + self._test_outcome_written = True + self.stream.write("\n") + if not self._section_name_shown and self._when: + self.stream.section("live log " + self._when, sep="-", bold=True) + self._section_name_shown = True + super().emit(record) + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingNullHandler(logging.NullHandler): + """A logging handler used when live logging is disabled.""" + + def reset(self) -> None: + pass + + def set_when(self, when: str) -> None: + pass + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/main.py b/Backend/venv/lib/python3.12/site-packages/_pytest/main.py new file mode 100644 index 00000000..9bc930df --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/main.py @@ -0,0 +1,1203 @@ +"""Core implementation of the testing process: init, session, runtest loop.""" + +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import dataclasses +import fnmatch +import functools +import importlib +import importlib.util +import os +from pathlib import Path +import sys +from typing import final +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import warnings + +import pluggy + +from _pytest import nodes +import _pytest._code +from _pytest.config import Config +from _pytest.config import directory_arg +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config import UsageError +from _pytest.config.argparsing import OverrideIniAction +from _pytest.config.argparsing import Parser +from _pytest.config.compat import PathAwareHookProxy +from _pytest.outcomes import exit +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import safe_exists +from _pytest.pathlib import samefile_nofollow +from _pytest.pathlib import scandir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.runner import collect_one_node +from _pytest.runner import SetupState +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + from _pytest.fixtures import FixtureManager + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( # private to use reserved lower-case short option + "-x", + "--exitfirst", + action="store_const", + dest="maxfail", + const=1, + help="Exit instantly on first error or failed test", + ) + group.addoption( + "--maxfail", + metavar="num", + action="store", + type=int, + dest="maxfail", + default=0, + help="Exit after first num failures or errors", + ) + group.addoption( + "--strict-config", + action=OverrideIniAction, + ini_option="strict_config", + ini_value="true", + help="Enables the strict_config option", + ) + group.addoption( + "--strict-markers", + action=OverrideIniAction, + ini_option="strict_markers", + ini_value="true", + help="Enables the strict_markers option", + ) + group.addoption( + "--strict", + action=OverrideIniAction, + ini_option="strict", + ini_value="true", + help="Enables the strict option", + ) + parser.addini( + "strict_config", + "Any warnings encountered while parsing the `pytest` section of the " + "configuration file raise errors", + type="bool", + # None => fallback to `strict`. + default=None, + ) + parser.addini( + "strict_markers", + "Markers not registered in the `markers` section of the configuration " + "file raise errors", + type="bool", + # None => fallback to `strict`. + default=None, + ) + parser.addini( + "strict", + "Enables all strictness options, currently: " + "strict_config, strict_markers, strict_xfail, strict_parametrization_ids", + type="bool", + default=False, + ) + + group = parser.getgroup("pytest-warnings") + group.addoption( + "-W", + "--pythonwarnings", + action="append", + help="Set which warnings to report, see -W option of Python itself", + ) + parser.addini( + "filterwarnings", + type="linelist", + help="Each line specifies a pattern for " + "warnings.filterwarnings. " + "Processed after -W/--pythonwarnings.", + ) + + group = parser.getgroup("collect", "collection") + group.addoption( + "--collectonly", + "--collect-only", + "--co", + action="store_true", + help="Only collect tests, don't execute them", + ) + group.addoption( + "--pyargs", + action="store_true", + help="Try to interpret all arguments as Python packages", + ) + group.addoption( + "--ignore", + action="append", + metavar="path", + help="Ignore path during collection (multi-allowed)", + ) + group.addoption( + "--ignore-glob", + action="append", + metavar="path", + help="Ignore path pattern during collection (multi-allowed)", + ) + group.addoption( + "--deselect", + action="append", + metavar="nodeid_prefix", + help="Deselect item (via node id prefix) during collection (multi-allowed)", + ) + group.addoption( + "--confcutdir", + dest="confcutdir", + default=None, + metavar="dir", + type=functools.partial(directory_arg, optname="--confcutdir"), + help="Only load conftest.py's relative to specified dir", + ) + group.addoption( + "--noconftest", + action="store_true", + dest="noconftest", + default=False, + help="Don't load any conftest.py files", + ) + group.addoption( + "--keepduplicates", + "--keep-duplicates", + action="store_true", + dest="keepduplicates", + default=False, + help="Keep duplicate tests", + ) + group.addoption( + "--collect-in-virtualenv", + action="store_true", + dest="collect_in_virtualenv", + default=False, + help="Don't ignore tests in a local virtualenv directory", + ) + group.addoption( + "--continue-on-collection-errors", + action="store_true", + default=False, + dest="continue_on_collection_errors", + help="Force test execution even if collection errors occur", + ) + group.addoption( + "--import-mode", + default="prepend", + choices=["prepend", "append", "importlib"], + dest="importmode", + help="Prepend/append to sys.path when importing test modules and conftest " + "files. Default: prepend.", + ) + parser.addini( + "norecursedirs", + "Directory patterns to avoid for recursion", + type="args", + default=[ + "*.egg", + ".*", + "_darcs", + "build", + "CVS", + "dist", + "node_modules", + "venv", + "{arch}", + ], + ) + parser.addini( + "testpaths", + "Directories to search for tests when no files or directories are given on the " + "command line", + type="args", + default=[], + ) + parser.addini( + "collect_imported_tests", + "Whether to collect tests in imported modules outside `testpaths`", + type="bool", + default=True, + ) + parser.addini( + "consider_namespace_packages", + type="bool", + default=False, + help="Consider namespace packages when resolving module names during import", + ) + + group = parser.getgroup("debugconfig", "test session debugging and configuration") + group._addoption( # private to use reserved lower-case short option + "-c", + "--config-file", + metavar="FILE", + type=str, + dest="inifilename", + help="Load configuration from `FILE` instead of trying to locate one of the " + "implicit configuration files.", + ) + group.addoption( + "--rootdir", + action="store", + dest="rootdir", + help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', " + "'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: " + "'$HOME/root_dir'.", + ) + group.addoption( + "--basetemp", + dest="basetemp", + default=None, + type=validate_basetemp, + metavar="dir", + help=( + "Base temporary directory for this test run. " + "(Warning: this directory is removed if it exists.)" + ), + ) + + +def validate_basetemp(path: str) -> str: + # GH 7119 + msg = "basetemp must not be empty, the current working directory or any parent directory of it" + + # empty path + if not path: + raise argparse.ArgumentTypeError(msg) + + def is_ancestor(base: Path, query: Path) -> bool: + """Return whether query is an ancestor of base.""" + if base == query: + return True + return query in base.parents + + # check if path is an ancestor of cwd + if is_ancestor(Path.cwd(), Path(path).absolute()): + raise argparse.ArgumentTypeError(msg) + + # check symlinks for ancestors + if is_ancestor(Path.cwd().resolve(), Path(path).resolve()): + raise argparse.ArgumentTypeError(msg) + + return path + + +def wrap_session( + config: Config, doit: Callable[[Config, Session], int | ExitCode | None] +) -> int | ExitCode: + """Skeleton command line program.""" + session = Session.from_config(config) + session.exitstatus = ExitCode.OK + initstate = 0 + try: + try: + config._do_configure() + initstate = 1 + config.hook.pytest_sessionstart(session=session) + initstate = 2 + session.exitstatus = doit(config, session) or 0 + except UsageError: + session.exitstatus = ExitCode.USAGE_ERROR + raise + except Failed: + session.exitstatus = ExitCode.TESTS_FAILED + except (KeyboardInterrupt, exit.Exception): + excinfo = _pytest._code.ExceptionInfo.from_current() + exitstatus: int | ExitCode = ExitCode.INTERRUPTED + if isinstance(excinfo.value, exit.Exception): + if excinfo.value.returncode is not None: + exitstatus = excinfo.value.returncode + if initstate < 2: + sys.stderr.write(f"{excinfo.typename}: {excinfo.value.msg}\n") + config.hook.pytest_keyboard_interrupt(excinfo=excinfo) + session.exitstatus = exitstatus + except BaseException: + session.exitstatus = ExitCode.INTERNAL_ERROR + excinfo = _pytest._code.ExceptionInfo.from_current() + try: + config.notify_exception(excinfo, config.option) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + else: + if isinstance(excinfo.value, SystemExit): + sys.stderr.write("mainloop: caught unexpected SystemExit!\n") + + finally: + # Explicitly break reference cycle. + excinfo = None # type: ignore + os.chdir(session.startpath) + if initstate >= 2: + try: + config.hook.pytest_sessionfinish( + session=session, exitstatus=session.exitstatus + ) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + config._ensure_unconfigure() + return session.exitstatus + + +def pytest_cmdline_main(config: Config) -> int | ExitCode: + return wrap_session(config, _main) + + +def _main(config: Config, session: Session) -> int | ExitCode | None: + """Default command line protocol for initialization, session, + running tests and reporting.""" + config.hook.pytest_collection(session=session) + config.hook.pytest_runtestloop(session=session) + + if session.testsfailed: + return ExitCode.TESTS_FAILED + elif session.testscollected == 0: + return ExitCode.NO_TESTS_COLLECTED + return None + + +def pytest_collection(session: Session) -> None: + session.perform_collect() + + +def pytest_runtestloop(session: Session) -> bool: + if session.testsfailed and not session.config.option.continue_on_collection_errors: + raise session.Interrupted( + f"{session.testsfailed} error{'s' if session.testsfailed != 1 else ''} during collection" + ) + + if session.config.option.collectonly: + return True + + for i, item in enumerate(session.items): + nextitem = session.items[i + 1] if i + 1 < len(session.items) else None + item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) + if session.shouldfail: + raise session.Failed(session.shouldfail) + if session.shouldstop: + raise session.Interrupted(session.shouldstop) + return True + + +def _in_venv(path: Path) -> bool: + """Attempt to detect if ``path`` is the root of a Virtual Environment by + checking for the existence of the pyvenv.cfg file. + + [https://peps.python.org/pep-0405/] + + For regression protection we also check for conda environments that do not include pyenv.cfg yet -- + https://github.com/conda/conda/issues/13337 is the conda issue tracking adding pyenv.cfg. + + Checking for the `conda-meta/history` file per https://github.com/pytest-dev/pytest/issues/12652#issuecomment-2246336902. + + """ + try: + return ( + path.joinpath("pyvenv.cfg").is_file() + or path.joinpath("conda-meta", "history").is_file() + ) + except OSError: + return False + + +def pytest_ignore_collect(collection_path: Path, config: Config) -> bool | None: + if collection_path.name == "__pycache__": + return True + + ignore_paths = config._getconftest_pathlist( + "collect_ignore", path=collection_path.parent + ) + ignore_paths = ignore_paths or [] + excludeopt = config.getoption("ignore") + if excludeopt: + ignore_paths.extend(absolutepath(x) for x in excludeopt) + + if collection_path in ignore_paths: + return True + + ignore_globs = config._getconftest_pathlist( + "collect_ignore_glob", path=collection_path.parent + ) + ignore_globs = ignore_globs or [] + excludeglobopt = config.getoption("ignore_glob") + if excludeglobopt: + ignore_globs.extend(absolutepath(x) for x in excludeglobopt) + + if any(fnmatch.fnmatch(str(collection_path), str(glob)) for glob in ignore_globs): + return True + + allow_in_venv = config.getoption("collect_in_virtualenv") + if not allow_in_venv and _in_venv(collection_path): + return True + + if collection_path.is_dir(): + norecursepatterns = config.getini("norecursedirs") + if any(fnmatch_ex(pat, collection_path) for pat in norecursepatterns): + return True + + return None + + +def pytest_collect_directory( + path: Path, parent: nodes.Collector +) -> nodes.Collector | None: + return Dir.from_parent(parent, path=path) + + +def pytest_collection_modifyitems(items: list[nodes.Item], config: Config) -> None: + deselect_prefixes = tuple(config.getoption("deselect") or []) + if not deselect_prefixes: + return + + remaining = [] + deselected = [] + for colitem in items: + if colitem.nodeid.startswith(deselect_prefixes): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +class FSHookProxy: + def __init__( + self, + pm: PytestPluginManager, + remove_mods: AbstractSet[object], + ) -> None: + self.pm = pm + self.remove_mods = remove_mods + + def __getattr__(self, name: str) -> pluggy.HookCaller: + x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) + self.__dict__[name] = x + return x + + +class Interrupted(KeyboardInterrupt): + """Signals that the test run was interrupted.""" + + __module__ = "builtins" # For py3. + + +class Failed(Exception): + """Signals a stop as failed test run.""" + + +@dataclasses.dataclass +class _bestrelpath_cache(dict[Path, str]): + __slots__ = ("path",) + + path: Path + + def __missing__(self, path: Path) -> str: + r = bestrelpath(self.path, path) + self[path] = r + return r + + +@final +class Dir(nodes.Directory): + """Collector of files in a file system directory. + + .. versionadded:: 8.0 + + .. note:: + + Python directories with an `__init__.py` file are instead collected by + :class:`~pytest.Package` by default. Both are :class:`~pytest.Directory` + collectors. + """ + + @classmethod + def from_parent( # type: ignore[override] + cls, + parent: nodes.Collector, + *, + path: Path, + ) -> Self: + """The public constructor. + + :param parent: The parent collector of this Dir. + :param path: The directory's path. + :type path: pathlib.Path + """ + return super().from_parent(parent=parent, path=path) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + config = self.config + col: nodes.Collector | None + cols: Sequence[nodes.Collector] + ihook = self.ihook + for direntry in scandir(self.path): + if direntry.is_dir(): + path = Path(direntry.path) + if not self.session.isinitpath(path, with_parents=True): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + col = ihook.pytest_collect_directory(path=path, parent=self) + if col is not None: + yield col + + elif direntry.is_file(): + path = Path(direntry.path) + if not self.session.isinitpath(path): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + cols = ihook.pytest_collect_file(file_path=path, parent=self) + yield from cols + + +@final +class Session(nodes.Collector): + """The root of the collection tree. + + ``Session`` collects the initial paths given as arguments to pytest. + """ + + Interrupted = Interrupted + Failed = Failed + # Set on the session by runner.pytest_sessionstart. + _setupstate: SetupState + # Set on the session by fixtures.pytest_sessionstart. + _fixturemanager: FixtureManager + exitstatus: int | ExitCode + + def __init__(self, config: Config) -> None: + super().__init__( + name="", + path=config.rootpath, + fspath=None, + parent=None, + config=config, + session=self, + nodeid="", + ) + self.testsfailed = 0 + self.testscollected = 0 + self._shouldstop: bool | str = False + self._shouldfail: bool | str = False + self.trace = config.trace.root.get("collection") + self._initialpaths: frozenset[Path] = frozenset() + self._initialpaths_with_parents: frozenset[Path] = frozenset() + self._notfound: list[tuple[str, Sequence[nodes.Collector]]] = [] + self._initial_parts: list[CollectionArgument] = [] + self._collection_cache: dict[nodes.Collector, CollectReport] = {} + self.items: list[nodes.Item] = [] + + self._bestrelpathcache: dict[Path, str] = _bestrelpath_cache(config.rootpath) + + self.config.pluginmanager.register(self, name="session") + + @classmethod + def from_config(cls, config: Config) -> Session: + session: Session = cls._create(config=config) + return session + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} {self.name} " + f"exitstatus=%r " + f"testsfailed={self.testsfailed} " + f"testscollected={self.testscollected}>" + ) % getattr(self, "exitstatus", "") + + @property + def shouldstop(self) -> bool | str: + return self._shouldstop + + @shouldstop.setter + def shouldstop(self, value: bool | str) -> None: + # The runner checks shouldfail and assumes that if it is set we are + # definitely stopping, so prevent unsetting it. + if value is False and self._shouldstop: + warnings.warn( + PytestWarning( + "session.shouldstop cannot be unset after it has been set; ignoring." + ), + stacklevel=2, + ) + return + self._shouldstop = value + + @property + def shouldfail(self) -> bool | str: + return self._shouldfail + + @shouldfail.setter + def shouldfail(self, value: bool | str) -> None: + # The runner checks shouldfail and assumes that if it is set we are + # definitely stopping, so prevent unsetting it. + if value is False and self._shouldfail: + warnings.warn( + PytestWarning( + "session.shouldfail cannot be unset after it has been set; ignoring." + ), + stacklevel=2, + ) + return + self._shouldfail = value + + @property + def startpath(self) -> Path: + """The path from which pytest was invoked. + + .. versionadded:: 7.0.0 + """ + return self.config.invocation_params.dir + + def _node_location_to_relpath(self, node_path: Path) -> str: + # bestrelpath is a quite slow function. + return self._bestrelpathcache[node_path] + + @hookimpl(tryfirst=True) + def pytest_collectstart(self) -> None: + if self.shouldfail: + raise self.Failed(self.shouldfail) + if self.shouldstop: + raise self.Interrupted(self.shouldstop) + + @hookimpl(tryfirst=True) + def pytest_runtest_logreport(self, report: TestReport | CollectReport) -> None: + if report.failed and not hasattr(report, "wasxfail"): + self.testsfailed += 1 + maxfail = self.config.getvalue("maxfail") + if maxfail and self.testsfailed >= maxfail: + self.shouldfail = f"stopping after {self.testsfailed} failures" + + pytest_collectreport = pytest_runtest_logreport + + def isinitpath( + self, + path: str | os.PathLike[str], + *, + with_parents: bool = False, + ) -> bool: + """Is path an initial path? + + An initial path is a path explicitly given to pytest on the command + line. + + :param with_parents: + If set, also return True if the path is a parent of an initial path. + + .. versionchanged:: 8.0 + Added the ``with_parents`` parameter. + """ + # Optimization: Path(Path(...)) is much slower than isinstance. + path_ = path if isinstance(path, Path) else Path(path) + if with_parents: + return path_ in self._initialpaths_with_parents + else: + return path_ in self._initialpaths + + def gethookproxy(self, fspath: os.PathLike[str]) -> pluggy.HookRelay: + # Optimization: Path(Path(...)) is much slower than isinstance. + path = fspath if isinstance(fspath, Path) else Path(fspath) + pm = self.config.pluginmanager + # Check if we have the common case of running + # hooks with all conftest.py files. + my_conftestmodules = pm._getconftestmodules(path) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + proxy: pluggy.HookRelay + if remove_mods: + # One or more conftests are not in use at this path. + proxy = PathAwareHookProxy(FSHookProxy(pm, remove_mods)) # type: ignore[arg-type,assignment] + else: + # All plugins are active for this fspath. + proxy = self.config.hook + return proxy + + def _collect_path( + self, + path: Path, + path_cache: dict[Path, Sequence[nodes.Collector]], + ) -> Sequence[nodes.Collector]: + """Create a Collector for the given path. + + `path_cache` makes it so the same Collectors are returned for the same + path. + """ + if path in path_cache: + return path_cache[path] + + if path.is_dir(): + ihook = self.gethookproxy(path.parent) + col: nodes.Collector | None = ihook.pytest_collect_directory( + path=path, parent=self + ) + cols: Sequence[nodes.Collector] = (col,) if col is not None else () + + elif path.is_file(): + ihook = self.gethookproxy(path) + cols = ihook.pytest_collect_file(file_path=path, parent=self) + + else: + # Broken symlink or invalid/missing file. + cols = () + + path_cache[path] = cols + return cols + + @overload + def perform_collect( + self, args: Sequence[str] | None = ..., genitems: Literal[True] = ... + ) -> Sequence[nodes.Item]: ... + + @overload + def perform_collect( + self, args: Sequence[str] | None = ..., genitems: bool = ... + ) -> Sequence[nodes.Item | nodes.Collector]: ... + + def perform_collect( + self, args: Sequence[str] | None = None, genitems: bool = True + ) -> Sequence[nodes.Item | nodes.Collector]: + """Perform the collection phase for this session. + + This is called by the default :hook:`pytest_collection` hook + implementation; see the documentation of this hook for more details. + For testing purposes, it may also be called directly on a fresh + ``Session``. + + This function normally recursively expands any collectors collected + from the session to their items, and only items are returned. For + testing purposes, this may be suppressed by passing ``genitems=False``, + in which case the return value contains these collectors unexpanded, + and ``session.items`` is empty. + """ + if args is None: + args = self.config.args + + self.trace("perform_collect", self, args) + self.trace.root.indent += 1 + + hook = self.config.hook + + self._notfound = [] + self._initial_parts = [] + self._collection_cache = {} + self.items = [] + items: Sequence[nodes.Item | nodes.Collector] = self.items + consider_namespace_packages: bool = self.config.getini( + "consider_namespace_packages" + ) + try: + initialpaths: list[Path] = [] + initialpaths_with_parents: list[Path] = [] + + collection_args = [ + resolve_collection_argument( + self.config.invocation_params.dir, + arg, + i, + as_pypath=self.config.option.pyargs, + consider_namespace_packages=consider_namespace_packages, + ) + for i, arg in enumerate(args) + ] + + if not self.config.getoption("keepduplicates"): + # Normalize the collection arguments -- remove duplicates and overlaps. + self._initial_parts = normalize_collection_arguments(collection_args) + else: + self._initial_parts = collection_args + + for collection_argument in self._initial_parts: + initialpaths.append(collection_argument.path) + initialpaths_with_parents.append(collection_argument.path) + initialpaths_with_parents.extend(collection_argument.path.parents) + self._initialpaths = frozenset(initialpaths) + self._initialpaths_with_parents = frozenset(initialpaths_with_parents) + + rep = collect_one_node(self) + self.ihook.pytest_collectreport(report=rep) + self.trace.root.indent -= 1 + if self._notfound: + errors = [] + for arg, collectors in self._notfound: + if collectors: + errors.append( + f"not found: {arg}\n(no match in any of {collectors!r})" + ) + else: + errors.append(f"found no collectors for {arg}") + + raise UsageError(*errors) + + if not genitems: + items = rep.result + else: + if rep.passed: + for node in rep.result: + self.items.extend(self.genitems(node)) + + self.config.pluginmanager.check_pending() + hook.pytest_collection_modifyitems( + session=self, config=self.config, items=items + ) + finally: + self._notfound = [] + self._initial_parts = [] + self._collection_cache = {} + hook.pytest_collection_finish(session=self) + + if genitems: + self.testscollected = len(items) + + return items + + def _collect_one_node( + self, + node: nodes.Collector, + handle_dupes: bool = True, + ) -> tuple[CollectReport, bool]: + if node in self._collection_cache and handle_dupes: + rep = self._collection_cache[node] + return rep, True + else: + rep = collect_one_node(node) + self._collection_cache[node] = rep + return rep, False + + def collect(self) -> Iterator[nodes.Item | nodes.Collector]: + # This is a cache for the root directories of the initial paths. + # We can't use collection_cache for Session because of its special + # role as the bootstrapping collector. + path_cache: dict[Path, Sequence[nodes.Collector]] = {} + + pm = self.config.pluginmanager + + for collection_argument in self._initial_parts: + self.trace("processing argument", collection_argument) + self.trace.root.indent += 1 + + argpath = collection_argument.path + names = collection_argument.parts + parametrization = collection_argument.parametrization + module_name = collection_argument.module_name + + # resolve_collection_argument() ensures this. + if argpath.is_dir(): + assert not names, f"invalid arg {(argpath, names)!r}" + + paths = [argpath] + # Add relevant parents of the path, from the root, e.g. + # /a/b/c.py -> [/, /a, /a/b, /a/b/c.py] + if module_name is None: + # Paths outside of the confcutdir should not be considered. + for path in argpath.parents: + if not pm._is_in_confcutdir(path): + break + paths.insert(0, path) + else: + # For --pyargs arguments, only consider paths matching the module + # name. Paths beyond the package hierarchy are not included. + module_name_parts = module_name.split(".") + for i, path in enumerate(argpath.parents, 2): + if i > len(module_name_parts) or path.stem != module_name_parts[-i]: + break + paths.insert(0, path) + + # Start going over the parts from the root, collecting each level + # and discarding all nodes which don't match the level's part. + any_matched_in_initial_part = False + notfound_collectors = [] + work: list[tuple[nodes.Collector | nodes.Item, list[Path | str]]] = [ + (self, [*paths, *names]) + ] + while work: + matchnode, matchparts = work.pop() + + # Pop'd all of the parts, this is a match. + if not matchparts: + yield matchnode + any_matched_in_initial_part = True + continue + + # Should have been matched by now, discard. + if not isinstance(matchnode, nodes.Collector): + continue + + # Collect this level of matching. + # Collecting Session (self) is done directly to avoid endless + # recursion to this function. + subnodes: Sequence[nodes.Collector | nodes.Item] + if isinstance(matchnode, Session): + assert isinstance(matchparts[0], Path) + subnodes = matchnode._collect_path(matchparts[0], path_cache) + else: + # For backward compat, files given directly multiple + # times on the command line should not be deduplicated. + handle_dupes = not ( + len(matchparts) == 1 + and isinstance(matchparts[0], Path) + and matchparts[0].is_file() + ) + rep, duplicate = self._collect_one_node(matchnode, handle_dupes) + if not duplicate and not rep.passed: + # Report collection failures here to avoid failing to + # run some test specified in the command line because + # the module could not be imported (#134). + matchnode.ihook.pytest_collectreport(report=rep) + if not rep.passed: + continue + subnodes = rep.result + + # Prune this level. + any_matched_in_collector = False + for node in reversed(subnodes): + # Path part e.g. `/a/b/` in `/a/b/test_file.py::TestIt::test_it`. + if isinstance(matchparts[0], Path): + is_match = node.path == matchparts[0] + if sys.platform == "win32" and not is_match: + # In case the file paths do not match, fallback to samefile() to + # account for short-paths on Windows (#11895). But use a version + # which doesn't resolve symlinks, otherwise we might match the + # same file more than once (#12039). + is_match = samefile_nofollow(node.path, matchparts[0]) + + # Name part e.g. `TestIt` in `/a/b/test_file.py::TestIt::test_it`. + else: + if len(matchparts) == 1: + # This the last part, one parametrization goes. + if parametrization is not None: + # A parametrized arg must match exactly. + is_match = node.name == matchparts[0] + parametrization + else: + # A non-parameterized arg matches all parametrizations (if any). + # TODO: Remove the hacky split once the collection structure + # contains parametrization. + is_match = node.name.split("[")[0] == matchparts[0] + else: + is_match = node.name == matchparts[0] + if is_match: + work.append((node, matchparts[1:])) + any_matched_in_collector = True + + if not any_matched_in_collector: + notfound_collectors.append(matchnode) + + if not any_matched_in_initial_part: + report_arg = "::".join((str(argpath), *names)) + self._notfound.append((report_arg, notfound_collectors)) + + self.trace.root.indent -= 1 + + def genitems(self, node: nodes.Item | nodes.Collector) -> Iterator[nodes.Item]: + self.trace("genitems", node) + if isinstance(node, nodes.Item): + node.ihook.pytest_itemcollected(item=node) + yield node + else: + assert isinstance(node, nodes.Collector) + # For backward compat, dedup only applies to files. + handle_dupes = not isinstance(node, nodes.File) + rep, duplicate = self._collect_one_node(node, handle_dupes) + if rep.passed: + for subnode in rep.result: + yield from self.genitems(subnode) + if not duplicate: + node.ihook.pytest_collectreport(report=rep) + + +def search_pypath( + module_name: str, *, consider_namespace_packages: bool = False +) -> str | None: + """Search sys.path for the given a dotted module name, and return its file + system path if found.""" + try: + spec = importlib.util.find_spec(module_name) + # AttributeError: looks like package module, but actually filename + # ImportError: module does not exist + # ValueError: not a module name + except (AttributeError, ImportError, ValueError): + return None + + if spec is None: + return None + + if ( + spec.submodule_search_locations is None + or len(spec.submodule_search_locations) == 0 + ): + # Must be a simple module. + return spec.origin + + if consider_namespace_packages: + # If submodule_search_locations is set, it's a package (regular or namespace). + # Typically there is a single entry, but documentation claims it can be empty too + # (e.g. if the package has no physical location). + return spec.submodule_search_locations[0] + + if spec.origin is None: + # This is only the case for namespace packages + return None + + return os.path.dirname(spec.origin) + + +@dataclasses.dataclass(frozen=True) +class CollectionArgument: + """A resolved collection argument.""" + + path: Path + parts: Sequence[str] + parametrization: str | None + module_name: str | None + original_index: int + + +def resolve_collection_argument( + invocation_path: Path, + arg: str, + arg_index: int, + *, + as_pypath: bool = False, + consider_namespace_packages: bool = False, +) -> CollectionArgument: + """Parse path arguments optionally containing selection parts and return (fspath, names). + + Command-line arguments can point to files and/or directories, and optionally contain + parts for specific tests selection, for example: + + "pkg/tests/test_foo.py::TestClass::test_foo" + + This function ensures the path exists, and returns a resolved `CollectionArgument`: + + CollectionArgument( + path=Path("/full/path/to/pkg/tests/test_foo.py"), + parts=["TestClass", "test_foo"], + module_name=None, + ) + + When as_pypath is True, expects that the command-line argument actually contains + module paths instead of file-system paths: + + "pkg.tests.test_foo::TestClass::test_foo[a,b]" + + In which case we search sys.path for a matching module, and then return the *path* to the + found module, which may look like this: + + CollectionArgument( + path=Path("/home/u/myvenv/lib/site-packages/pkg/tests/test_foo.py"), + parts=["TestClass", "test_foo"], + parametrization="[a,b]", + module_name="pkg.tests.test_foo", + ) + + If the path doesn't exist, raise UsageError. + If the path is a directory and selection parts are present, raise UsageError. + """ + base, squacket, rest = arg.partition("[") + strpath, *parts = base.split("::") + if squacket and not parts: + raise UsageError(f"path cannot contain [] parametrization: {arg}") + parametrization = f"{squacket}{rest}" if squacket else None + module_name = None + if as_pypath: + pyarg_strpath = search_pypath( + strpath, consider_namespace_packages=consider_namespace_packages + ) + if pyarg_strpath is not None: + module_name = strpath + strpath = pyarg_strpath + fspath = invocation_path / strpath + fspath = absolutepath(fspath) + if not safe_exists(fspath): + msg = ( + "module or package not found: {arg} (missing __init__.py?)" + if as_pypath + else "file or directory not found: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + if parts and fspath.is_dir(): + msg = ( + "package argument cannot contain :: selection parts: {arg}" + if as_pypath + else "directory argument cannot contain :: selection parts: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + return CollectionArgument( + path=fspath, + parts=parts, + parametrization=parametrization, + module_name=module_name, + original_index=arg_index, + ) + + +def is_collection_argument_subsumed_by( + arg: CollectionArgument, by: CollectionArgument +) -> bool: + """Check if `arg` is subsumed (contained) by `by`.""" + # First check path subsumption. + if by.path != arg.path: + # `by` subsumes `arg` if `by` is a parent directory of `arg` and has no + # parts (collects everything in that directory). + if not by.parts: + return arg.path.is_relative_to(by.path) + return False + # Paths are equal, check parts. + # For example: ("TestClass",) is a prefix of ("TestClass", "test_method"). + if len(by.parts) > len(arg.parts) or arg.parts[: len(by.parts)] != by.parts: + return False + # Paths and parts are equal, check parametrization. + # A `by` without parametrization (None) matches everything, e.g. + # `pytest x.py::test_it` matches `x.py::test_it[0]`. Otherwise must be + # exactly equal. + if by.parametrization is not None and by.parametrization != arg.parametrization: + return False + return True + + +def normalize_collection_arguments( + collection_args: Sequence[CollectionArgument], +) -> list[CollectionArgument]: + """Normalize collection arguments to eliminate overlapping paths and parts. + + Detects when collection arguments overlap in either paths or parts and only + keeps the shorter prefix, or the earliest argument if duplicate, preserving + order. The result is prefix-free. + """ + # A quadratic algorithm is not acceptable since large inputs are possible. + # So this uses an O(n*log(n)) algorithm which takes advantage of the + # property that after sorting, a collection argument will immediately + # precede collection arguments it subsumes. An O(n) algorithm is not worth + # it. + collection_args_sorted = sorted( + collection_args, + key=lambda arg: (arg.path, arg.parts, arg.parametrization or ""), + ) + normalized: list[CollectionArgument] = [] + last_kept = None + for arg in collection_args_sorted: + if last_kept is None or not is_collection_argument_subsumed_by(arg, last_kept): + normalized.append(arg) + last_kept = arg + normalized.sort(key=lambda arg: arg.original_index) + return normalized diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__init__.py b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__init__.py new file mode 100644 index 00000000..841d7811 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__init__.py @@ -0,0 +1,301 @@ +"""Generic mechanism for marking and selecting python functions.""" + +from __future__ import annotations + +import collections +from collections.abc import Collection +from collections.abc import Iterable +from collections.abc import Set as AbstractSet +import dataclasses +from typing import TYPE_CHECKING + +from .expression import Expression +from .structures import _HiddenParam +from .structures import EMPTY_PARAMETERSET_OPTION +from .structures import get_empty_parameterset_mark +from .structures import HIDDEN_PARAM +from .structures import Mark +from .structures import MARK_GEN +from .structures import MarkDecorator +from .structures import MarkGenerator +from .structures import ParameterSet +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import NOT_SET +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey + + +if TYPE_CHECKING: + from _pytest.nodes import Item + + +__all__ = [ + "HIDDEN_PARAM", + "MARK_GEN", + "Mark", + "MarkDecorator", + "MarkGenerator", + "ParameterSet", + "get_empty_parameterset_mark", +] + + +old_mark_config_key = StashKey[Config | None]() + + +def param( + *values: object, + marks: MarkDecorator | Collection[MarkDecorator | Mark] = (), + id: str | _HiddenParam | None = None, +) -> ParameterSet: + """Specify a parameter in `pytest.mark.parametrize`_ calls or + :ref:`parametrized fixtures `. + + .. code-block:: python + + @pytest.mark.parametrize( + "test_input,expected", + [ + ("3+5", 8), + pytest.param("6*9", 42, marks=pytest.mark.xfail), + ], + ) + def test_eval(test_input, expected): + assert eval(test_input) == expected + + :param values: Variable args of the values of the parameter set, in order. + + :param marks: + A single mark or a list of marks to be applied to this parameter set. + + :ref:`pytest.mark.usefixtures ` cannot be added via this parameter. + + :type id: str | Literal[pytest.HIDDEN_PARAM] | None + :param id: + The id to attribute to this parameter set. + + .. versionadded:: 8.4 + :ref:`hidden-param` means to hide the parameter set + from the test name. Can only be used at most 1 time, as + test names need to be unique. + """ + return ParameterSet.param(*values, marks=marks, id=id) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( # private to use reserved lower-case short option + "-k", + action="store", + dest="keyword", + default="", + metavar="EXPRESSION", + help="Only run tests which match the given substring expression. " + "An expression is a Python evaluable expression " + "where all names are substring-matched against test names " + "and their parent classes. Example: -k 'test_method or test_" + "other' matches all test functions and classes whose name " + "contains 'test_method' or 'test_other', while -k 'not test_method' " + "matches those that don't contain 'test_method' in their names. " + "-k 'not test_method and not test_other' will eliminate the matches. " + "Additionally keywords are matched to classes and functions " + "containing extra names in their 'extra_keyword_matches' set, " + "as well as functions which have names assigned directly to them. " + "The matching is case-insensitive.", + ) + + group._addoption( # private to use reserved lower-case short option + "-m", + action="store", + dest="markexpr", + default="", + metavar="MARKEXPR", + help="Only run tests matching given mark expression. " + "For example: -m 'mark1 and not mark2'.", + ) + + group.addoption( + "--markers", + action="store_true", + help="show markers (builtin, plugin and per-project ones).", + ) + + parser.addini("markers", "Register new markers for test functions", "linelist") + parser.addini(EMPTY_PARAMETERSET_OPTION, "Default marker for empty parametersets") + + +@hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + import _pytest.config + + if config.option.markers: + config._do_configure() + tw = _pytest.config.create_terminal_writer(config) + for line in config.getini("markers"): + parts = line.split(":", 1) + name = parts[0] + rest = parts[1] if len(parts) == 2 else "" + tw.write(f"@pytest.mark.{name}:", bold=True) + tw.line(rest) + tw.line() + config._ensure_unconfigure() + return 0 + + return None + + +@dataclasses.dataclass +class KeywordMatcher: + """A matcher for keywords. + + Given a list of names, matches any substring of one of these names. The + string inclusion check is case-insensitive. + + Will match on the name of colitem, including the names of its parents. + Only matches names of items which are either a :class:`Class` or a + :class:`Function`. + + Additionally, matches on names in the 'extra_keyword_matches' set of + any item, as well as names directly assigned to test functions. + """ + + __slots__ = ("_names",) + + _names: AbstractSet[str] + + @classmethod + def from_item(cls, item: Item) -> KeywordMatcher: + mapped_names = set() + + # Add the names of the current item and any parent items, + # except the Session and root Directory's which are not + # interesting for matching. + import pytest + + for node in item.listchain(): + if isinstance(node, pytest.Session): + continue + if isinstance(node, pytest.Directory) and isinstance( + node.parent, pytest.Session + ): + continue + mapped_names.add(node.name) + + # Add the names added as extra keywords to current or parent items. + mapped_names.update(item.listextrakeywords()) + + # Add the names attached to the current function through direct assignment. + function_obj = getattr(item, "function", None) + if function_obj: + mapped_names.update(function_obj.__dict__) + + # Add the markers to the keywords as we no longer handle them correctly. + mapped_names.update(mark.name for mark in item.iter_markers()) + + return cls(mapped_names) + + def __call__(self, subname: str, /, **kwargs: str | int | bool | None) -> bool: + if kwargs: + raise UsageError("Keyword expressions do not support call parameters.") + subname = subname.lower() + return any(subname in name.lower() for name in self._names) + + +def deselect_by_keyword(items: list[Item], config: Config) -> None: + keywordexpr = config.option.keyword.lstrip() + if not keywordexpr: + return + + expr = _parse_expression(keywordexpr, "Wrong expression passed to '-k'") + + remaining = [] + deselected = [] + for colitem in items: + if not expr.evaluate(KeywordMatcher.from_item(colitem)): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +@dataclasses.dataclass +class MarkMatcher: + """A matcher for markers which are present. + + Tries to match on any marker names, attached to the given colitem. + """ + + __slots__ = ("own_mark_name_mapping",) + + own_mark_name_mapping: dict[str, list[Mark]] + + @classmethod + def from_markers(cls, markers: Iterable[Mark]) -> MarkMatcher: + mark_name_mapping = collections.defaultdict(list) + for mark in markers: + mark_name_mapping[mark.name].append(mark) + return cls(mark_name_mapping) + + def __call__(self, name: str, /, **kwargs: str | int | bool | None) -> bool: + if not (matches := self.own_mark_name_mapping.get(name, [])): + return False + + for mark in matches: # pylint: disable=consider-using-any-or-all + if all(mark.kwargs.get(k, NOT_SET) == v for k, v in kwargs.items()): + return True + return False + + +def deselect_by_mark(items: list[Item], config: Config) -> None: + matchexpr = config.option.markexpr + if not matchexpr: + return + + expr = _parse_expression(matchexpr, "Wrong expression passed to '-m'") + remaining: list[Item] = [] + deselected: list[Item] = [] + for item in items: + if expr.evaluate(MarkMatcher.from_markers(item.iter_markers())): + remaining.append(item) + else: + deselected.append(item) + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +def _parse_expression(expr: str, exc_message: str) -> Expression: + try: + return Expression.compile(expr) + except SyntaxError as e: + raise UsageError( + f"{exc_message}: {e.text}: at column {e.offset}: {e.msg}" + ) from None + + +def pytest_collection_modifyitems(items: list[Item], config: Config) -> None: + deselect_by_keyword(items, config) + deselect_by_mark(items, config) + + +def pytest_configure(config: Config) -> None: + config.stash[old_mark_config_key] = MARK_GEN._config + MARK_GEN._config = config + + empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION) + + if empty_parameterset not in ("skip", "xfail", "fail_at_collect", None, ""): + raise UsageError( + f"{EMPTY_PARAMETERSET_OPTION!s} must be one of skip, xfail or fail_at_collect" + f" but it is {empty_parameterset!r}" + ) + + +def pytest_unconfigure(config: Config) -> None: + MARK_GEN._config = config.stash.get(old_mark_config_key, None) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..5ce9174e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/expression.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/expression.cpython-312.pyc new file mode 100644 index 00000000..7ad38629 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/expression.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/structures.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/structures.cpython-312.pyc new file mode 100644 index 00000000..db9d7f6c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/__pycache__/structures.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/mark/expression.py b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/expression.py new file mode 100644 index 00000000..3bdbd03c --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/expression.py @@ -0,0 +1,353 @@ +r"""Evaluate match expressions, as used by `-k` and `-m`. + +The grammar is: + +expression: expr? EOF +expr: and_expr ('or' and_expr)* +and_expr: not_expr ('and' not_expr)* +not_expr: 'not' not_expr | '(' expr ')' | ident kwargs? + +ident: (\w|:|\+|-|\.|\[|\]|\\|/)+ +kwargs: ('(' name '=' value ( ', ' name '=' value )* ')') +name: a valid ident, but not a reserved keyword +value: (unescaped) string literal | (-)?[0-9]+ | 'False' | 'True' | 'None' + +The semantics are: + +- Empty expression evaluates to False. +- ident evaluates to True or False according to a provided matcher function. +- ident with parentheses and keyword arguments evaluates to True or False according to a provided matcher function. +- or/and/not evaluate according to the usual boolean semantics. +""" + +from __future__ import annotations + +import ast +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import enum +import keyword +import re +import types +from typing import Final +from typing import final +from typing import Literal +from typing import NoReturn +from typing import overload +from typing import Protocol + + +__all__ = [ + "Expression", + "ExpressionMatcher", +] + + +FILE_NAME: Final = "" + + +class TokenType(enum.Enum): + LPAREN = "left parenthesis" + RPAREN = "right parenthesis" + OR = "or" + AND = "and" + NOT = "not" + IDENT = "identifier" + EOF = "end of input" + EQUAL = "=" + STRING = "string literal" + COMMA = "," + + +@dataclasses.dataclass(frozen=True) +class Token: + __slots__ = ("pos", "type", "value") + type: TokenType + value: str + pos: int + + +class Scanner: + __slots__ = ("current", "input", "tokens") + + def __init__(self, input: str) -> None: + self.input = input + self.tokens = self.lex(input) + self.current = next(self.tokens) + + def lex(self, input: str) -> Iterator[Token]: + pos = 0 + while pos < len(input): + if input[pos] in (" ", "\t"): + pos += 1 + elif input[pos] == "(": + yield Token(TokenType.LPAREN, "(", pos) + pos += 1 + elif input[pos] == ")": + yield Token(TokenType.RPAREN, ")", pos) + pos += 1 + elif input[pos] == "=": + yield Token(TokenType.EQUAL, "=", pos) + pos += 1 + elif input[pos] == ",": + yield Token(TokenType.COMMA, ",", pos) + pos += 1 + elif (quote_char := input[pos]) in ("'", '"'): + end_quote_pos = input.find(quote_char, pos + 1) + if end_quote_pos == -1: + raise SyntaxError( + f'closing quote "{quote_char}" is missing', + (FILE_NAME, 1, pos + 1, input), + ) + value = input[pos : end_quote_pos + 1] + if (backslash_pos := input.find("\\")) != -1: + raise SyntaxError( + r'escaping with "\" not supported in marker expression', + (FILE_NAME, 1, backslash_pos + 1, input), + ) + yield Token(TokenType.STRING, value, pos) + pos += len(value) + else: + match = re.match(r"(:?\w|:|\+|-|\.|\[|\]|\\|/)+", input[pos:]) + if match: + value = match.group(0) + if value == "or": + yield Token(TokenType.OR, value, pos) + elif value == "and": + yield Token(TokenType.AND, value, pos) + elif value == "not": + yield Token(TokenType.NOT, value, pos) + else: + yield Token(TokenType.IDENT, value, pos) + pos += len(value) + else: + raise SyntaxError( + f'unexpected character "{input[pos]}"', + (FILE_NAME, 1, pos + 1, input), + ) + yield Token(TokenType.EOF, "", pos) + + @overload + def accept(self, type: TokenType, *, reject: Literal[True]) -> Token: ... + + @overload + def accept( + self, type: TokenType, *, reject: Literal[False] = False + ) -> Token | None: ... + + def accept(self, type: TokenType, *, reject: bool = False) -> Token | None: + if self.current.type is type: + token = self.current + if token.type is not TokenType.EOF: + self.current = next(self.tokens) + return token + if reject: + self.reject((type,)) + return None + + def reject(self, expected: Sequence[TokenType]) -> NoReturn: + raise SyntaxError( + "expected {}; got {}".format( + " OR ".join(type.value for type in expected), + self.current.type.value, + ), + (FILE_NAME, 1, self.current.pos + 1, self.input), + ) + + +# True, False and None are legal match expression identifiers, +# but illegal as Python identifiers. To fix this, this prefix +# is added to identifiers in the conversion to Python AST. +IDENT_PREFIX = "$" + + +def expression(s: Scanner) -> ast.Expression: + if s.accept(TokenType.EOF): + ret: ast.expr = ast.Constant(False) + else: + ret = expr(s) + s.accept(TokenType.EOF, reject=True) + return ast.fix_missing_locations(ast.Expression(ret)) + + +def expr(s: Scanner) -> ast.expr: + ret = and_expr(s) + while s.accept(TokenType.OR): + rhs = and_expr(s) + ret = ast.BoolOp(ast.Or(), [ret, rhs]) + return ret + + +def and_expr(s: Scanner) -> ast.expr: + ret = not_expr(s) + while s.accept(TokenType.AND): + rhs = not_expr(s) + ret = ast.BoolOp(ast.And(), [ret, rhs]) + return ret + + +def not_expr(s: Scanner) -> ast.expr: + if s.accept(TokenType.NOT): + return ast.UnaryOp(ast.Not(), not_expr(s)) + if s.accept(TokenType.LPAREN): + ret = expr(s) + s.accept(TokenType.RPAREN, reject=True) + return ret + ident = s.accept(TokenType.IDENT) + if ident: + name = ast.Name(IDENT_PREFIX + ident.value, ast.Load()) + if s.accept(TokenType.LPAREN): + ret = ast.Call(func=name, args=[], keywords=all_kwargs(s)) + s.accept(TokenType.RPAREN, reject=True) + else: + ret = name + return ret + + s.reject((TokenType.NOT, TokenType.LPAREN, TokenType.IDENT)) + + +BUILTIN_MATCHERS = {"True": True, "False": False, "None": None} + + +def single_kwarg(s: Scanner) -> ast.keyword: + keyword_name = s.accept(TokenType.IDENT, reject=True) + if not keyword_name.value.isidentifier(): + raise SyntaxError( + f"not a valid python identifier {keyword_name.value}", + (FILE_NAME, 1, keyword_name.pos + 1, s.input), + ) + if keyword.iskeyword(keyword_name.value): + raise SyntaxError( + f"unexpected reserved python keyword `{keyword_name.value}`", + (FILE_NAME, 1, keyword_name.pos + 1, s.input), + ) + s.accept(TokenType.EQUAL, reject=True) + + if value_token := s.accept(TokenType.STRING): + value: str | int | bool | None = value_token.value[1:-1] # strip quotes + else: + value_token = s.accept(TokenType.IDENT, reject=True) + if (number := value_token.value).isdigit() or ( + number.startswith("-") and number[1:].isdigit() + ): + value = int(number) + elif value_token.value in BUILTIN_MATCHERS: + value = BUILTIN_MATCHERS[value_token.value] + else: + raise SyntaxError( + f'unexpected character/s "{value_token.value}"', + (FILE_NAME, 1, value_token.pos + 1, s.input), + ) + + ret = ast.keyword(keyword_name.value, ast.Constant(value)) + return ret + + +def all_kwargs(s: Scanner) -> list[ast.keyword]: + ret = [single_kwarg(s)] + while s.accept(TokenType.COMMA): + ret.append(single_kwarg(s)) + return ret + + +class ExpressionMatcher(Protocol): + """A callable which, given an identifier and optional kwargs, should return + whether it matches in an :class:`Expression` evaluation. + + Should be prepared to handle arbitrary strings as input. + + If no kwargs are provided, the expression of the form `foo`. + If kwargs are provided, the expression is of the form `foo(1, b=True, "s")`. + + If the expression is not supported (e.g. don't want to accept the kwargs + syntax variant), should raise :class:`~pytest.UsageError`. + + Example:: + + def matcher(name: str, /, **kwargs: str | int | bool | None) -> bool: + # Match `cat`. + if name == "cat" and not kwargs: + return True + # Match `dog(barks=True)`. + if name == "dog" and kwargs == {"barks": False}: + return True + return False + """ + + def __call__(self, name: str, /, **kwargs: str | int | bool | None) -> bool: ... + + +@dataclasses.dataclass +class MatcherNameAdapter: + matcher: ExpressionMatcher + name: str + + def __bool__(self) -> bool: + return self.matcher(self.name) + + def __call__(self, **kwargs: str | int | bool | None) -> bool: + return self.matcher(self.name, **kwargs) + + +class MatcherAdapter(Mapping[str, MatcherNameAdapter]): + """Adapts a matcher function to a locals mapping as required by eval().""" + + def __init__(self, matcher: ExpressionMatcher) -> None: + self.matcher = matcher + + def __getitem__(self, key: str) -> MatcherNameAdapter: + return MatcherNameAdapter(matcher=self.matcher, name=key[len(IDENT_PREFIX) :]) + + def __iter__(self) -> Iterator[str]: + raise NotImplementedError() + + def __len__(self) -> int: + raise NotImplementedError() + + +@final +class Expression: + """A compiled match expression as used by -k and -m. + + The expression can be evaluated against different matchers. + """ + + __slots__ = ("_code", "input") + + def __init__(self, input: str, code: types.CodeType) -> None: + #: The original input line, as a string. + self.input: Final = input + self._code: Final = code + + @classmethod + def compile(cls, input: str) -> Expression: + """Compile a match expression. + + :param input: The input expression - one line. + + :raises SyntaxError: If the expression is malformed. + """ + astexpr = expression(Scanner(input)) + code = compile( + astexpr, + filename="", + mode="eval", + ) + return Expression(input, code) + + def evaluate(self, matcher: ExpressionMatcher) -> bool: + """Evaluate the match expression. + + :param matcher: + A callback which determines whether an identifier matches or not. + See the :class:`ExpressionMatcher` protocol for details and example. + + :returns: Whether the expression matches or not. + + :raises UsageError: + If the matcher doesn't support the expression. Cannot happen if the + matcher supports all expressions. + """ + return bool(eval(self._code, {"__builtins__": {}}, MatcherAdapter(matcher))) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/mark/structures.py b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/structures.py new file mode 100644 index 00000000..16bb6d81 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/mark/structures.py @@ -0,0 +1,664 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Collection +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import MutableMapping +from collections.abc import Sequence +import dataclasses +import enum +import inspect +from typing import Any +from typing import final +from typing import NamedTuple +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +import warnings + +from .._code import getfslineno +from ..compat import NOTSET +from ..compat import NotSetType +from _pytest.config import Config +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import MARKED_FIXTURE +from _pytest.outcomes import fail +from _pytest.raises import AbstractRaises +from _pytest.scope import _ScopeName +from _pytest.warning_types import PytestUnknownMarkWarning + + +if TYPE_CHECKING: + from ..nodes import Node + + +EMPTY_PARAMETERSET_OPTION = "empty_parameter_set_mark" + + +# Singleton type for HIDDEN_PARAM, as described in: +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class _HiddenParam(enum.Enum): + token = 0 + + +#: Can be used as a parameter set id to hide it from the test name. +HIDDEN_PARAM = _HiddenParam.token + + +def istestfunc(func) -> bool: + return callable(func) and getattr(func, "__name__", "") != "" + + +def get_empty_parameterset_mark( + config: Config, argnames: Sequence[str], func +) -> MarkDecorator: + from ..nodes import Collector + + argslisting = ", ".join(argnames) + + _fs, lineno = getfslineno(func) + reason = f"got empty parameter set for ({argslisting})" + requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION) + if requested_mark in ("", None, "skip"): + mark = MARK_GEN.skip(reason=reason) + elif requested_mark == "xfail": + mark = MARK_GEN.xfail(reason=reason, run=False) + elif requested_mark == "fail_at_collect": + raise Collector.CollectError( + f"Empty parameter set in '{func.__name__}' at line {lineno + 1}" + ) + else: + raise LookupError(requested_mark) + return mark + + +class ParameterSet(NamedTuple): + """A set of values for a set of parameters along with associated marks and + an optional ID for the set. + + Examples:: + + pytest.param(1, 2, 3) + # ParameterSet(values=(1, 2, 3), marks=(), id=None) + + pytest.param("hello", id="greeting") + # ParameterSet(values=("hello",), marks=(), id="greeting") + + # Parameter set with marks + pytest.param(42, marks=pytest.mark.xfail) + # ParameterSet(values=(42,), marks=(MarkDecorator(...),), id=None) + + # From parametrize mark (parameter names + list of parameter sets) + pytest.mark.parametrize( + ("a", "b", "expected"), + [ + (1, 2, 3), + pytest.param(40, 2, 42, id="everything"), + ], + ) + # ParameterSet(values=(1, 2, 3), marks=(), id=None) + # ParameterSet(values=(40, 2, 42), marks=(), id="everything") + """ + + values: Sequence[object | NotSetType] + marks: Collection[MarkDecorator | Mark] + id: str | _HiddenParam | None + + @classmethod + def param( + cls, + *values: object, + marks: MarkDecorator | Collection[MarkDecorator | Mark] = (), + id: str | _HiddenParam | None = None, + ) -> ParameterSet: + if isinstance(marks, MarkDecorator): + marks = (marks,) + else: + assert isinstance(marks, collections.abc.Collection) + if any(i.name == "usefixtures" for i in marks): + raise ValueError( + "pytest.param cannot add pytest.mark.usefixtures; see " + "https://docs.pytest.org/en/stable/reference/reference.html#pytest-param" + ) + + if id is not None: + if not isinstance(id, str) and id is not HIDDEN_PARAM: + raise TypeError( + "Expected id to be a string or a `pytest.HIDDEN_PARAM` sentinel, " + f"got {type(id)}: {id!r}", + ) + return cls(values, marks, id) + + @classmethod + def extract_from( + cls, + parameterset: ParameterSet | Sequence[object] | object, + force_tuple: bool = False, + ) -> ParameterSet: + """Extract from an object or objects. + + :param parameterset: + A legacy style parameterset that may or may not be a tuple, + and may or may not be wrapped into a mess of mark objects. + + :param force_tuple: + Enforce tuple wrapping so single argument tuple values + don't get decomposed and break tests. + """ + if isinstance(parameterset, cls): + return parameterset + if force_tuple: + return cls.param(parameterset) + else: + # TODO: Refactor to fix this type-ignore. Currently the following + # passes type-checking but crashes: + # + # @pytest.mark.parametrize(('x', 'y'), [1, 2]) + # def test_foo(x, y): pass + return cls(parameterset, marks=[], id=None) # type: ignore[arg-type] + + @staticmethod + def _parse_parametrize_args( + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + *args, + **kwargs, + ) -> tuple[Sequence[str], bool]: + if isinstance(argnames, str): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + force_tuple = len(argnames) == 1 + else: + force_tuple = False + return argnames, force_tuple + + @staticmethod + def _parse_parametrize_parameters( + argvalues: Iterable[ParameterSet | Sequence[object] | object], + force_tuple: bool, + ) -> list[ParameterSet]: + return [ + ParameterSet.extract_from(x, force_tuple=force_tuple) for x in argvalues + ] + + @classmethod + def _for_parametrize( + cls, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + func, + config: Config, + nodeid: str, + ) -> tuple[Sequence[str], list[ParameterSet]]: + argnames, force_tuple = cls._parse_parametrize_args(argnames, argvalues) + parameters = cls._parse_parametrize_parameters(argvalues, force_tuple) + del argvalues + + if parameters: + # Check all parameter sets have the correct number of values. + for param in parameters: + if len(param.values) != len(argnames): + msg = ( + '{nodeid}: in "parametrize" the number of names ({names_len}):\n' + " {names}\n" + "must be equal to the number of values ({values_len}):\n" + " {values}" + ) + fail( + msg.format( + nodeid=nodeid, + values=param.values, + names=argnames, + names_len=len(argnames), + values_len=len(param.values), + ), + pytrace=False, + ) + else: + # Empty parameter set (likely computed at runtime): create a single + # parameter set with NOTSET values, with the "empty parameter set" mark applied to it. + mark = get_empty_parameterset_mark(config, argnames, func) + parameters.append( + ParameterSet( + values=(NOTSET,) * len(argnames), marks=[mark], id="NOTSET" + ) + ) + return argnames, parameters + + +@final +@dataclasses.dataclass(frozen=True) +class Mark: + """A pytest mark.""" + + #: Name of the mark. + name: str + #: Positional arguments of the mark decorator. + args: tuple[Any, ...] + #: Keyword arguments of the mark decorator. + kwargs: Mapping[str, Any] + + #: Source Mark for ids with parametrize Marks. + _param_ids_from: Mark | None = dataclasses.field(default=None, repr=False) + #: Resolved/generated ids with parametrize Marks. + _param_ids_generated: Sequence[str] | None = dataclasses.field( + default=None, repr=False + ) + + def __init__( + self, + name: str, + args: tuple[Any, ...], + kwargs: Mapping[str, Any], + param_ids_from: Mark | None = None, + param_ids_generated: Sequence[str] | None = None, + *, + _ispytest: bool = False, + ) -> None: + """:meta private:""" + check_ispytest(_ispytest) + # Weirdness to bypass frozen=True. + object.__setattr__(self, "name", name) + object.__setattr__(self, "args", args) + object.__setattr__(self, "kwargs", kwargs) + object.__setattr__(self, "_param_ids_from", param_ids_from) + object.__setattr__(self, "_param_ids_generated", param_ids_generated) + + def _has_param_ids(self) -> bool: + return "ids" in self.kwargs or len(self.args) >= 4 + + def combined_with(self, other: Mark) -> Mark: + """Return a new Mark which is a combination of this + Mark and another Mark. + + Combines by appending args and merging kwargs. + + :param Mark other: The mark to combine with. + :rtype: Mark + """ + assert self.name == other.name + + # Remember source of ids with parametrize Marks. + param_ids_from: Mark | None = None + if self.name == "parametrize": + if other._has_param_ids(): + param_ids_from = other + elif self._has_param_ids(): + param_ids_from = self + + return Mark( + self.name, + self.args + other.args, + dict(self.kwargs, **other.kwargs), + param_ids_from=param_ids_from, + _ispytest=True, + ) + + +# A generic parameter designating an object to which a Mark may +# be applied -- a test function (callable) or class. +# Note: a lambda is not allowed, but this can't be represented. +Markable = TypeVar("Markable", bound=Callable[..., object] | type) + + +@dataclasses.dataclass +class MarkDecorator: + """A decorator for applying a mark on test functions and classes. + + ``MarkDecorators`` are created with ``pytest.mark``:: + + mark1 = pytest.mark.NAME # Simple MarkDecorator + mark2 = pytest.mark.NAME(name1=value) # Parametrized MarkDecorator + + and can then be applied as decorators to test functions:: + + @mark2 + def test_function(): + pass + + When a ``MarkDecorator`` is called, it does the following: + + 1. If called with a single class as its only positional argument and no + additional keyword arguments, it attaches the mark to the class so it + gets applied automatically to all test cases found in that class. + + 2. If called with a single function as its only positional argument and + no additional keyword arguments, it attaches the mark to the function, + containing all the arguments already stored internally in the + ``MarkDecorator``. + + 3. When called in any other case, it returns a new ``MarkDecorator`` + instance with the original ``MarkDecorator``'s content updated with + the arguments passed to this call. + + Note: The rules above prevent a ``MarkDecorator`` from storing only a + single function or class reference as its positional argument with no + additional keyword or positional arguments. You can work around this by + using `with_args()`. + """ + + mark: Mark + + def __init__(self, mark: Mark, *, _ispytest: bool = False) -> None: + """:meta private:""" + check_ispytest(_ispytest) + self.mark = mark + + @property + def name(self) -> str: + """Alias for mark.name.""" + return self.mark.name + + @property + def args(self) -> tuple[Any, ...]: + """Alias for mark.args.""" + return self.mark.args + + @property + def kwargs(self) -> Mapping[str, Any]: + """Alias for mark.kwargs.""" + return self.mark.kwargs + + @property + def markname(self) -> str: + """:meta private:""" + return self.name # for backward-compat (2.4.1 had this attr) + + def with_args(self, *args: object, **kwargs: object) -> MarkDecorator: + """Return a MarkDecorator with extra arguments added. + + Unlike calling the MarkDecorator, with_args() can be used even + if the sole argument is a callable/class. + """ + mark = Mark(self.name, args, kwargs, _ispytest=True) + return MarkDecorator(self.mark.combined_with(mark), _ispytest=True) + + # Type ignored because the overloads overlap with an incompatible + # return type. Not much we can do about that. Thankfully mypy picks + # the first match so it works out even if we break the rules. + @overload + def __call__(self, arg: Markable) -> Markable: # type: ignore[overload-overlap] + pass + + @overload + def __call__(self, *args: object, **kwargs: object) -> MarkDecorator: + pass + + def __call__(self, *args: object, **kwargs: object): + """Call the MarkDecorator.""" + if args and not kwargs: + func = args[0] + is_class = inspect.isclass(func) + # For staticmethods/classmethods, the marks are eventually fetched from the + # function object, not the descriptor, so unwrap. + unwrapped_func = func + if isinstance(func, staticmethod | classmethod): + unwrapped_func = func.__func__ + if len(args) == 1 and (istestfunc(unwrapped_func) or is_class): + store_mark(unwrapped_func, self.mark, stacklevel=3) + return func + return self.with_args(*args, **kwargs) + + +def get_unpacked_marks( + obj: object | type, + *, + consider_mro: bool = True, +) -> list[Mark]: + """Obtain the unpacked marks that are stored on an object. + + If obj is a class and consider_mro is true, return marks applied to + this class and all of its super-classes in MRO order. If consider_mro + is false, only return marks applied directly to this class. + """ + if isinstance(obj, type): + if not consider_mro: + mark_lists = [obj.__dict__.get("pytestmark", [])] + else: + mark_lists = [ + x.__dict__.get("pytestmark", []) for x in reversed(obj.__mro__) + ] + mark_list = [] + for item in mark_lists: + if isinstance(item, list): + mark_list.extend(item) + else: + mark_list.append(item) + else: + mark_attribute = getattr(obj, "pytestmark", []) + if isinstance(mark_attribute, list): + mark_list = mark_attribute + else: + mark_list = [mark_attribute] + return list(normalize_mark_list(mark_list)) + + +def normalize_mark_list( + mark_list: Iterable[Mark | MarkDecorator], +) -> Iterable[Mark]: + """ + Normalize an iterable of Mark or MarkDecorator objects into a list of marks + by retrieving the `mark` attribute on MarkDecorator instances. + + :param mark_list: marks to normalize + :returns: A new list of the extracted Mark objects + """ + for mark in mark_list: + mark_obj = getattr(mark, "mark", mark) + if not isinstance(mark_obj, Mark): + raise TypeError(f"got {mark_obj!r} instead of Mark") + yield mark_obj + + +def store_mark(obj, mark: Mark, *, stacklevel: int = 2) -> None: + """Store a Mark on an object. + + This is used to implement the Mark declarations/decorators correctly. + """ + assert isinstance(mark, Mark), mark + + from ..fixtures import getfixturemarker + + if getfixturemarker(obj) is not None: + warnings.warn(MARKED_FIXTURE, stacklevel=stacklevel) + + # Always reassign name to avoid updating pytestmark in a reference that + # was only borrowed. + obj.pytestmark = [*get_unpacked_marks(obj, consider_mro=False), mark] + + +# Typing for builtin pytest marks. This is cheating; it gives builtin marks +# special privilege, and breaks modularity. But practicality beats purity... +if TYPE_CHECKING: + + class _SkipMarkDecorator(MarkDecorator): + @overload # type: ignore[override,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: ... + + @overload + def __call__(self, reason: str = ...) -> MarkDecorator: ... + + class _SkipifMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + condition: str | bool = ..., + *conditions: str | bool, + reason: str = ..., + ) -> MarkDecorator: ... + + class _XfailMarkDecorator(MarkDecorator): + @overload # type: ignore[override,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: ... + + @overload + def __call__( + self, + condition: str | bool = False, + *conditions: str | bool, + reason: str = ..., + run: bool = ..., + raises: None + | type[BaseException] + | tuple[type[BaseException], ...] + | AbstractRaises[BaseException] = ..., + strict: bool = ..., + ) -> MarkDecorator: ... + + class _ParametrizeMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + *, + indirect: bool | Sequence[str] = ..., + ids: Iterable[None | str | float | int | bool] + | Callable[[Any], object | None] + | None = ..., + scope: _ScopeName | None = ..., + ) -> MarkDecorator: ... + + class _UsefixturesMarkDecorator(MarkDecorator): + def __call__(self, *fixtures: str) -> MarkDecorator: # type: ignore[override] + ... + + class _FilterwarningsMarkDecorator(MarkDecorator): + def __call__(self, *filters: str) -> MarkDecorator: # type: ignore[override] + ... + + +@final +class MarkGenerator: + """Factory for :class:`MarkDecorator` objects - exposed as + a ``pytest.mark`` singleton instance. + + Example:: + + import pytest + + + @pytest.mark.slowtest + def test_function(): + pass + + applies a 'slowtest' :class:`Mark` on ``test_function``. + """ + + # See TYPE_CHECKING above. + if TYPE_CHECKING: + skip: _SkipMarkDecorator + skipif: _SkipifMarkDecorator + xfail: _XfailMarkDecorator + parametrize: _ParametrizeMarkDecorator + usefixtures: _UsefixturesMarkDecorator + filterwarnings: _FilterwarningsMarkDecorator + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._config: Config | None = None + self._markers: set[str] = set() + + def __getattr__(self, name: str) -> MarkDecorator: + """Generate a new :class:`MarkDecorator` with the given name.""" + if name[0] == "_": + raise AttributeError("Marker name must NOT start with underscore") + + if self._config is not None: + # We store a set of markers as a performance optimisation - if a mark + # name is in the set we definitely know it, but a mark may be known and + # not in the set. We therefore start by updating the set! + if name not in self._markers: + for line in self._config.getini("markers"): + # example lines: "skipif(condition): skip the given test if..." + # or "hypothesis: tests which use Hypothesis", so to get the + # marker name we split on both `:` and `(`. + marker = line.split(":")[0].split("(")[0].strip() + self._markers.add(marker) + + # If the name is not in the set of known marks after updating, + # then it really is time to issue a warning or an error. + if name not in self._markers: + # Raise a specific error for common misspellings of "parametrize". + if name in ["parameterize", "parametrise", "parameterise"]: + __tracebackhide__ = True + fail(f"Unknown '{name}' mark, did you mean 'parametrize'?") + + strict_markers = self._config.getini("strict_markers") + if strict_markers is None: + strict_markers = self._config.getini("strict") + if strict_markers: + fail( + f"{name!r} not found in `markers` configuration option", + pytrace=False, + ) + + warnings.warn( + f"Unknown pytest.mark.{name} - is this a typo? You can register " + "custom marks to avoid this warning - for details, see " + "https://docs.pytest.org/en/stable/how-to/mark.html", + PytestUnknownMarkWarning, + 2, + ) + + return MarkDecorator(Mark(name, (), {}, _ispytest=True), _ispytest=True) + + +MARK_GEN = MarkGenerator(_ispytest=True) + + +@final +class NodeKeywords(MutableMapping[str, Any]): + __slots__ = ("_markers", "node", "parent") + + def __init__(self, node: Node) -> None: + self.node = node + self.parent = node.parent + self._markers = {node.name: True} + + def __getitem__(self, key: str) -> Any: + try: + return self._markers[key] + except KeyError: + if self.parent is None: + raise + return self.parent.keywords[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._markers[key] = value + + # Note: we could've avoided explicitly implementing some of the methods + # below and use the collections.abc fallback, but that would be slow. + + def __contains__(self, key: object) -> bool: + return key in self._markers or ( + self.parent is not None and key in self.parent.keywords + ) + + def update( # type: ignore[override] + self, + other: Mapping[str, Any] | Iterable[tuple[str, Any]] = (), + **kwds: Any, + ) -> None: + self._markers.update(other) + self._markers.update(kwds) + + def __delitem__(self, key: str) -> None: + raise ValueError("cannot delete key in keywords dict") + + def __iter__(self) -> Iterator[str]: + # Doesn't need to be fast. + yield from self._markers + if self.parent is not None: + for keyword in self.parent.keywords: + # self._marks and self.parent.keywords can have duplicates. + if keyword not in self._markers: + yield keyword + + def __len__(self) -> int: + # Doesn't need to be fast. + return sum(1 for keyword in self) + + def __repr__(self) -> str: + return f"" diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/monkeypatch.py b/Backend/venv/lib/python3.12/site-packages/_pytest/monkeypatch.py new file mode 100644 index 00000000..07cc3fc4 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/monkeypatch.py @@ -0,0 +1,435 @@ +# mypy: allow-untyped-defs +"""Monkeypatching and mocking functionality.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import MutableMapping +from contextlib import contextmanager +import os +from pathlib import Path +import re +import sys +from typing import Any +from typing import final +from typing import overload +from typing import TypeVar +import warnings + +from _pytest.deprecated import MONKEYPATCH_LEGACY_NAMESPACE_PACKAGES +from _pytest.fixtures import fixture +from _pytest.warning_types import PytestWarning + + +RE_IMPORT_ERROR_NAME = re.compile(r"^No module named (.*)$") + + +K = TypeVar("K") +V = TypeVar("V") + + +@fixture +def monkeypatch() -> Generator[MonkeyPatch]: + """A convenient fixture for monkey-patching. + + The fixture provides these methods to modify objects, dictionaries, or + :data:`os.environ`: + + * :meth:`monkeypatch.setattr(obj, name, value, raising=True) ` + * :meth:`monkeypatch.delattr(obj, name, raising=True) ` + * :meth:`monkeypatch.setitem(mapping, name, value) ` + * :meth:`monkeypatch.delitem(obj, name, raising=True) ` + * :meth:`monkeypatch.setenv(name, value, prepend=None) ` + * :meth:`monkeypatch.delenv(name, raising=True) ` + * :meth:`monkeypatch.syspath_prepend(path) ` + * :meth:`monkeypatch.chdir(path) ` + * :meth:`monkeypatch.context() ` + + All modifications will be undone after the requesting test function or + fixture has finished. The ``raising`` parameter determines if a :class:`KeyError` + or :class:`AttributeError` will be raised if the set/deletion operation does not have the + specified target. + + To undo modifications done by the fixture in a contained scope, + use :meth:`context() `. + """ + mpatch = MonkeyPatch() + yield mpatch + mpatch.undo() + + +def resolve(name: str) -> object: + # Simplified from zope.dottedname. + parts = name.split(".") + + used = parts.pop(0) + found: object = __import__(used) + for part in parts: + used += "." + part + try: + found = getattr(found, part) + except AttributeError: + pass + else: + continue + # We use explicit un-nesting of the handling block in order + # to avoid nested exceptions. + try: + __import__(used) + except ImportError as ex: + expected = str(ex).split()[-1] + if expected == used: + raise + else: + raise ImportError(f"import error in {used}: {ex}") from ex + found = annotated_getattr(found, part, used) + return found + + +def annotated_getattr(obj: object, name: str, ann: str) -> object: + try: + obj = getattr(obj, name) + except AttributeError as e: + raise AttributeError( + f"{type(obj).__name__!r} object at {ann} has no attribute {name!r}" + ) from e + return obj + + +def derive_importpath(import_path: str, raising: bool) -> tuple[str, object]: + if not isinstance(import_path, str) or "." not in import_path: + raise TypeError(f"must be absolute import path string, not {import_path!r}") + module, attr = import_path.rsplit(".", 1) + target = resolve(module) + if raising: + annotated_getattr(target, attr, ann=module) + return attr, target + + +class Notset: + def __repr__(self) -> str: + return "" + + +notset = Notset() + + +@final +class MonkeyPatch: + """Helper to conveniently monkeypatch attributes/items/environment + variables/syspath. + + Returned by the :fixture:`monkeypatch` fixture. + + .. versionchanged:: 6.2 + Can now also be used directly as `pytest.MonkeyPatch()`, for when + the fixture is not available. In this case, use + :meth:`with MonkeyPatch.context() as mp: ` or remember to call + :meth:`undo` explicitly. + """ + + def __init__(self) -> None: + self._setattr: list[tuple[object, str, object]] = [] + self._setitem: list[tuple[Mapping[Any, Any], object, object]] = [] + self._cwd: str | None = None + self._savesyspath: list[str] | None = None + + @classmethod + @contextmanager + def context(cls) -> Generator[MonkeyPatch]: + """Context manager that returns a new :class:`MonkeyPatch` object + which undoes any patching done inside the ``with`` block upon exit. + + Example: + + .. code-block:: python + + import functools + + + def test_partial(monkeypatch): + with monkeypatch.context() as m: + m.setattr(functools, "partial", 3) + + Useful in situations where it is desired to undo some patches before the test ends, + such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples + of this see :issue:`3290`). + """ + m = cls() + try: + yield m + finally: + m.undo() + + @overload + def setattr( + self, + target: str, + name: object, + value: Notset = ..., + raising: bool = ..., + ) -> None: ... + + @overload + def setattr( + self, + target: object, + name: str, + value: object, + raising: bool = ..., + ) -> None: ... + + def setattr( + self, + target: str | object, + name: object | str, + value: object = notset, + raising: bool = True, + ) -> None: + """ + Set attribute value on target, memorizing the old value. + + For example: + + .. code-block:: python + + import os + + monkeypatch.setattr(os, "getcwd", lambda: "/") + + The code above replaces the :func:`os.getcwd` function by a ``lambda`` which + always returns ``"/"``. + + For convenience, you can specify a string as ``target`` which + will be interpreted as a dotted import path, with the last part + being the attribute name: + + .. code-block:: python + + monkeypatch.setattr("os.getcwd", lambda: "/") + + Raises :class:`AttributeError` if the attribute does not exist, unless + ``raising`` is set to False. + + **Where to patch** + + ``monkeypatch.setattr`` works by (temporarily) changing the object that a name points to with another one. + There can be many names pointing to any individual object, so for patching to work you must ensure + that you patch the name used by the system under test. + + See the section :ref:`Where to patch ` in the :mod:`unittest.mock` + docs for a complete explanation, which is meant for :func:`unittest.mock.patch` but + applies to ``monkeypatch.setattr`` as well. + """ + __tracebackhide__ = True + import inspect + + if isinstance(value, Notset): + if not isinstance(target, str): + raise TypeError( + "use setattr(target, name, value) or " + "setattr(target, value) with target being a dotted " + "import string" + ) + value = name + name, target = derive_importpath(target, raising) + else: + if not isinstance(name, str): + raise TypeError( + "use setattr(target, name, value) with name being a string or " + "setattr(target, value) with target being a dotted " + "import string" + ) + + oldval = getattr(target, name, notset) + if raising and oldval is notset: + raise AttributeError(f"{target!r} has no attribute {name!r}") + + # avoid class descriptors like staticmethod/classmethod + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + setattr(target, name, value) + + def delattr( + self, + target: object | str, + name: str | Notset = notset, + raising: bool = True, + ) -> None: + """Delete attribute ``name`` from ``target``. + + If no ``name`` is specified and ``target`` is a string + it will be interpreted as a dotted import path with the + last part being the attribute name. + + Raises AttributeError it the attribute does not exist, unless + ``raising`` is set to False. + """ + __tracebackhide__ = True + import inspect + + if isinstance(name, Notset): + if not isinstance(target, str): + raise TypeError( + "use delattr(target, name) or " + "delattr(target) with target being a dotted " + "import string" + ) + name, target = derive_importpath(target, raising) + + if not hasattr(target, name): + if raising: + raise AttributeError(name) + else: + oldval = getattr(target, name, notset) + # Avoid class descriptors like staticmethod/classmethod. + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + delattr(target, name) + + def setitem(self, dic: Mapping[K, V], name: K, value: V) -> None: + """Set dictionary entry ``name`` to value.""" + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dic[name] = value # type: ignore[index] + + def delitem(self, dic: Mapping[K, V], name: K, raising: bool = True) -> None: + """Delete ``name`` from dict. + + Raises ``KeyError`` if it doesn't exist, unless ``raising`` is set to + False. + """ + if name not in dic: + if raising: + raise KeyError(name) + else: + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dic[name] # type: ignore[attr-defined] + + def setenv(self, name: str, value: str, prepend: str | None = None) -> None: + """Set environment variable ``name`` to ``value``. + + If ``prepend`` is a character, read the current environment variable + value and prepend the ``value`` adjoined with the ``prepend`` + character. + """ + if not isinstance(value, str): + warnings.warn( # type: ignore[unreachable] + PytestWarning( + f"Value of environment variable {name} type should be str, but got " + f"{value!r} (type: {type(value).__name__}); converted to str implicitly" + ), + stacklevel=2, + ) + value = str(value) + if prepend and name in os.environ: + value = value + prepend + os.environ[name] + self.setitem(os.environ, name, value) + + def delenv(self, name: str, raising: bool = True) -> None: + """Delete ``name`` from the environment. + + Raises ``KeyError`` if it does not exist, unless ``raising`` is set to + False. + """ + environ: MutableMapping[str, str] = os.environ + self.delitem(environ, name, raising=raising) + + def syspath_prepend(self, path) -> None: + """Prepend ``path`` to ``sys.path`` list of import locations.""" + if self._savesyspath is None: + self._savesyspath = sys.path[:] + sys.path.insert(0, str(path)) + + # https://github.com/pypa/setuptools/blob/d8b901bc/docs/pkg_resources.txt#L162-L171 + # this is only needed when pkg_resources was already loaded by the namespace package + if "pkg_resources" in sys.modules: + import pkg_resources + from pkg_resources import fixup_namespace_packages + + # Only issue deprecation warning if this call would actually have an + # effect for this specific path. + if ( + hasattr(pkg_resources, "_namespace_packages") + and pkg_resources._namespace_packages + ): + path_obj = Path(str(path)) + for ns_pkg in pkg_resources._namespace_packages: + if ns_pkg is None: + continue + ns_pkg_path = path_obj / ns_pkg.replace(".", os.sep) + if ns_pkg_path.is_dir(): + warnings.warn( + MONKEYPATCH_LEGACY_NAMESPACE_PACKAGES, stacklevel=2 + ) + break + + fixup_namespace_packages(str(path)) + + # A call to syspathinsert() usually means that the caller wants to + # import some dynamically created files, thus with python3 we + # invalidate its import caches. + # This is especially important when any namespace package is in use, + # since then the mtime based FileFinder cache (that gets created in + # this case already) gets not invalidated when writing the new files + # quickly afterwards. + from importlib import invalidate_caches + + invalidate_caches() + + def chdir(self, path: str | os.PathLike[str]) -> None: + """Change the current working directory to the specified path. + + :param path: + The path to change into. + """ + if self._cwd is None: + self._cwd = os.getcwd() + os.chdir(path) + + def undo(self) -> None: + """Undo previous changes. + + This call consumes the undo stack. Calling it a second time has no + effect unless you do more monkeypatching after the undo call. + + There is generally no need to call `undo()`, since it is + called automatically during tear-down. + + .. note:: + The same `monkeypatch` fixture is used across a + single test function invocation. If `monkeypatch` is used both by + the test function itself and one of the test fixtures, + calling `undo()` will undo all of the changes made in + both functions. + + Prefer to use :meth:`context() ` instead. + """ + for obj, name, value in reversed(self._setattr): + if value is not notset: + setattr(obj, name, value) + else: + delattr(obj, name) + self._setattr[:] = [] + for dictionary, key, value in reversed(self._setitem): + if value is notset: + try: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dictionary[key] # type: ignore[attr-defined] + except KeyError: + pass # Was already deleted, so we have the desired state. + else: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dictionary[key] = value # type: ignore[index] + self._setitem[:] = [] + if self._savesyspath is not None: + sys.path[:] = self._savesyspath + self._savesyspath = None + + if self._cwd is not None: + os.chdir(self._cwd) + self._cwd = None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/nodes.py b/Backend/venv/lib/python3.12/site-packages/_pytest/nodes.py new file mode 100644 index 00000000..6690f6ab --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/nodes.py @@ -0,0 +1,772 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import abc +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import MutableMapping +from functools import cached_property +from functools import lru_cache +import os +import pathlib +from pathlib import Path +from typing import Any +from typing import cast +from typing import NoReturn +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +import warnings + +import pluggy + +import _pytest._code +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest._code.code import TracebackStyle +from _pytest.compat import LEGACY_PATH +from _pytest.compat import signature +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config.compat import _check_path +from _pytest.deprecated import NODE_CTOR_FSPATH_ARG +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import NodeKeywords +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.stash import Stash +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + # Imported here due to circular import. + from _pytest.main import Session + + +SEP = "/" + +tracebackcutdir = Path(_pytest.__file__).parent + + +_T = TypeVar("_T") + + +def _imply_path( + node_type: type[Node], + path: Path | None, + fspath: LEGACY_PATH | None, +) -> Path: + if fspath is not None: + warnings.warn( + NODE_CTOR_FSPATH_ARG.format( + node_type_name=node_type.__name__, + ), + stacklevel=6, + ) + if path is not None: + if fspath is not None: + _check_path(path, fspath) + return path + else: + assert fspath is not None + return Path(fspath) + + +_NodeType = TypeVar("_NodeType", bound="Node") + + +class NodeMeta(abc.ABCMeta): + """Metaclass used by :class:`Node` to enforce that direct construction raises + :class:`Failed`. + + This behaviour supports the indirection introduced with :meth:`Node.from_parent`, + the named constructor to be used instead of direct construction. The design + decision to enforce indirection with :class:`NodeMeta` was made as a + temporary aid for refactoring the collection tree, which was diagnosed to + have :class:`Node` objects whose creational patterns were overly entangled. + Once the refactoring is complete, this metaclass can be removed. + + See https://github.com/pytest-dev/pytest/projects/3 for an overview of the + progress on detangling the :class:`Node` classes. + """ + + def __call__(cls, *k, **kw) -> NoReturn: + msg = ( + "Direct construction of {name} has been deprecated, please use {name}.from_parent.\n" + "See " + "https://docs.pytest.org/en/stable/deprecations.html#node-construction-changed-to-node-from-parent" + " for more details." + ).format(name=f"{cls.__module__}.{cls.__name__}") + fail(msg, pytrace=False) + + def _create(cls: type[_T], *k, **kw) -> _T: + try: + return super().__call__(*k, **kw) # type: ignore[no-any-return,misc] + except TypeError: + sig = signature(getattr(cls, "__init__")) + known_kw = {k: v for k, v in kw.items() if k in sig.parameters} + from .warning_types import PytestDeprecationWarning + + warnings.warn( + PytestDeprecationWarning( + f"{cls} is not using a cooperative constructor and only takes {set(known_kw)}.\n" + "See https://docs.pytest.org/en/stable/deprecations.html" + "#constructors-of-custom-pytest-node-subclasses-should-take-kwargs " + "for more details." + ) + ) + + return super().__call__(*k, **known_kw) # type: ignore[no-any-return,misc] + + +class Node(abc.ABC, metaclass=NodeMeta): + r"""Base class of :class:`Collector` and :class:`Item`, the components of + the test collection tree. + + ``Collector``\'s are the internal nodes of the tree, and ``Item``\'s are the + leaf nodes. + """ + + # Implemented in the legacypath plugin. + #: A ``LEGACY_PATH`` copy of the :attr:`path` attribute. Intended for usage + #: for methods not migrated to ``pathlib.Path`` yet, such as + #: :meth:`Item.reportinfo `. Will be deprecated in + #: a future release, prefer using :attr:`path` instead. + fspath: LEGACY_PATH + + # Use __slots__ to make attribute access faster. + # Note that __dict__ is still available. + __slots__ = ( + "__dict__", + "_nodeid", + "_store", + "config", + "name", + "parent", + "path", + "session", + ) + + def __init__( + self, + name: str, + parent: Node | None = None, + config: Config | None = None, + session: Session | None = None, + fspath: LEGACY_PATH | None = None, + path: Path | None = None, + nodeid: str | None = None, + ) -> None: + #: A unique name within the scope of the parent node. + self.name: str = name + + #: The parent collector node. + self.parent = parent + + if config: + #: The pytest config object. + self.config: Config = config + else: + if not parent: + raise TypeError("config or parent must be provided") + self.config = parent.config + + if session: + #: The pytest session this node is part of. + self.session: Session = session + else: + if not parent: + raise TypeError("session or parent must be provided") + self.session = parent.session + + if path is None and fspath is None: + path = getattr(parent, "path", None) + #: Filesystem path where this node was collected from (can be None). + self.path: pathlib.Path = _imply_path(type(self), path, fspath=fspath) + + # The explicit annotation is to avoid publicly exposing NodeKeywords. + #: Keywords/markers collected from all scopes. + self.keywords: MutableMapping[str, Any] = NodeKeywords(self) + + #: The marker objects belonging to this node. + self.own_markers: list[Mark] = [] + + #: Allow adding of extra keywords to use for matching. + self.extra_keyword_matches: set[str] = set() + + if nodeid is not None: + assert "::()" not in nodeid + self._nodeid = nodeid + else: + if not self.parent: + raise TypeError("nodeid or parent must be provided") + self._nodeid = self.parent.nodeid + "::" + self.name + + #: A place where plugins can store information on the node for their + #: own use. + self.stash: Stash = Stash() + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + @classmethod + def from_parent(cls, parent: Node, **kw) -> Self: + """Public constructor for Nodes. + + This indirection got introduced in order to enable removing + the fragile logic from the node constructors. + + Subclasses can use ``super().from_parent(...)`` when overriding the + construction. + + :param parent: The parent node of this Node. + """ + if "config" in kw: + raise TypeError("config is not a valid argument for from_parent") + if "session" in kw: + raise TypeError("session is not a valid argument for from_parent") + return cls._create(parent=parent, **kw) + + @property + def ihook(self) -> pluggy.HookRelay: + """fspath-sensitive hook proxy used to call pytest hooks.""" + return self.session.gethookproxy(self.path) + + def __repr__(self) -> str: + return "<{} {}>".format(self.__class__.__name__, getattr(self, "name", None)) + + def warn(self, warning: Warning) -> None: + """Issue a warning for this Node. + + Warnings will be displayed after the test session, unless explicitly suppressed. + + :param Warning warning: + The warning instance to issue. + + :raises ValueError: If ``warning`` instance is not a subclass of Warning. + + Example usage: + + .. code-block:: python + + node.warn(PytestWarning("some message")) + node.warn(UserWarning("some message")) + + .. versionchanged:: 6.2 + Any subclass of :class:`Warning` is now accepted, rather than only + :class:`PytestWarning ` subclasses. + """ + # enforce type checks here to avoid getting a generic type error later otherwise. + if not isinstance(warning, Warning): + raise ValueError( + f"warning must be an instance of Warning or subclass, got {warning!r}" + ) + path, lineno = get_fslocation_from_item(self) + assert lineno is not None + warnings.warn_explicit( + warning, + category=None, + filename=str(path), + lineno=lineno + 1, + ) + + # Methods for ordering nodes. + + @property + def nodeid(self) -> str: + """A ::-separated string denoting its collection tree address.""" + return self._nodeid + + def __hash__(self) -> int: + return hash(self._nodeid) + + def setup(self) -> None: + pass + + def teardown(self) -> None: + pass + + def iter_parents(self) -> Iterator[Node]: + """Iterate over all parent collectors starting from and including self + up to the root of the collection tree. + + .. versionadded:: 8.1 + """ + parent: Node | None = self + while parent is not None: + yield parent + parent = parent.parent + + def listchain(self) -> list[Node]: + """Return a list of all parent collectors starting from the root of the + collection tree down to and including self.""" + chain = [] + item: Node | None = self + while item is not None: + chain.append(item) + item = item.parent + chain.reverse() + return chain + + def add_marker(self, marker: str | MarkDecorator, append: bool = True) -> None: + """Dynamically add a marker object to the node. + + :param marker: + The marker. + :param append: + Whether to append the marker, or prepend it. + """ + from _pytest.mark import MARK_GEN + + if isinstance(marker, MarkDecorator): + marker_ = marker + elif isinstance(marker, str): + marker_ = getattr(MARK_GEN, marker) + else: + raise ValueError("is not a string or pytest.mark.* Marker") + self.keywords[marker_.name] = marker_ + if append: + self.own_markers.append(marker_.mark) + else: + self.own_markers.insert(0, marker_.mark) + + def iter_markers(self, name: str | None = None) -> Iterator[Mark]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of the markers of the node. + """ + return (x[1] for x in self.iter_markers_with_node(name=name)) + + def iter_markers_with_node( + self, name: str | None = None + ) -> Iterator[tuple[Node, Mark]]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of (node, mark) tuples. + """ + for node in self.iter_parents(): + for mark in node.own_markers: + if name is None or getattr(mark, "name", None) == name: + yield node, mark + + @overload + def get_closest_marker(self, name: str) -> Mark | None: ... + + @overload + def get_closest_marker(self, name: str, default: Mark) -> Mark: ... + + def get_closest_marker(self, name: str, default: Mark | None = None) -> Mark | None: + """Return the first marker matching the name, from closest (for + example function) to farther level (for example module level). + + :param default: Fallback return value if no marker was found. + :param name: Name to filter by. + """ + return next(self.iter_markers(name=name), default) + + def listextrakeywords(self) -> set[str]: + """Return a set of all extra keywords in self and any parents.""" + extra_keywords: set[str] = set() + for item in self.listchain(): + extra_keywords.update(item.extra_keyword_matches) + return extra_keywords + + def listnames(self) -> list[str]: + return [x.name for x in self.listchain()] + + def addfinalizer(self, fin: Callable[[], object]) -> None: + """Register a function to be called without arguments when this node is + finalized. + + This method can only be called when this node is active + in a setup chain, for example during self.setup(). + """ + self.session._setupstate.addfinalizer(fin, self) + + def getparent(self, cls: type[_NodeType]) -> _NodeType | None: + """Get the closest parent node (including self) which is an instance of + the given class. + + :param cls: The node class to search for. + :returns: The node, if found. + """ + for node in self.iter_parents(): + if isinstance(node, cls): + return node + return None + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + return excinfo.traceback + + def _repr_failure_py( + self, + excinfo: ExceptionInfo[BaseException], + style: TracebackStyle | None = None, + ) -> TerminalRepr: + from _pytest.fixtures import FixtureLookupError + + if isinstance(excinfo.value, ConftestImportFailure): + excinfo = ExceptionInfo.from_exception(excinfo.value.cause) + if isinstance(excinfo.value, fail.Exception): + if not excinfo.value.pytrace: + style = "value" + if isinstance(excinfo.value, FixtureLookupError): + return excinfo.value.formatrepr() + + tbfilter: bool | Callable[[ExceptionInfo[BaseException]], Traceback] + if self.config.getoption("fulltrace", False): + style = "long" + tbfilter = False + else: + tbfilter = self._traceback_filter + if style == "auto": + style = "long" + # XXX should excinfo.getrepr record all data and toterminal() process it? + if style is None: + if self.config.getoption("tbstyle", "auto") == "short": + style = "short" + else: + style = "long" + + if self.config.get_verbosity() > 1: + truncate_locals = False + else: + truncate_locals = True + + truncate_args = False if self.config.get_verbosity() > 2 else True + + # excinfo.getrepr() formats paths relative to the CWD if `abspath` is False. + # It is possible for a fixture/test to change the CWD while this code runs, which + # would then result in the user seeing confusing paths in the failure message. + # To fix this, if the CWD changed, always display the full absolute path. + # It will be better to just always display paths relative to invocation_dir, but + # this requires a lot of plumbing (#6428). + try: + abspath = Path(os.getcwd()) != self.config.invocation_params.dir + except OSError: + abspath = True + + return excinfo.getrepr( + funcargs=True, + abspath=abspath, + showlocals=self.config.getoption("showlocals", False), + style=style, + tbfilter=tbfilter, + truncate_locals=truncate_locals, + truncate_args=truncate_args, + ) + + def repr_failure( + self, + excinfo: ExceptionInfo[BaseException], + style: TracebackStyle | None = None, + ) -> str | TerminalRepr: + """Return a representation of a collection or test failure. + + .. seealso:: :ref:`non-python tests` + + :param excinfo: Exception information for the failure. + """ + return self._repr_failure_py(excinfo, style) + + +def get_fslocation_from_item(node: Node) -> tuple[str | Path, int | None]: + """Try to extract the actual location from a node, depending on available attributes: + + * "location": a pair (path, lineno) + * "obj": a Python object that the node wraps. + * "path": just a path + + :rtype: A tuple of (str|Path, int) with filename and 0-based line number. + """ + # See Item.location. + location: tuple[str, int | None, str] | None = getattr(node, "location", None) + if location is not None: + return location[:2] + obj = getattr(node, "obj", None) + if obj is not None: + return getfslineno(obj) + return getattr(node, "path", "unknown location"), -1 + + +class Collector(Node, abc.ABC): + """Base class of all collectors. + + Collector create children through `collect()` and thus iteratively build + the collection tree. + """ + + class CollectError(Exception): + """An error during collection, contains a custom message.""" + + @abc.abstractmethod + def collect(self) -> Iterable[Item | Collector]: + """Collect children (items and collectors) for this collector.""" + raise NotImplementedError("abstract") + + # TODO: This omits the style= parameter which breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, excinfo: ExceptionInfo[BaseException] + ) -> str | TerminalRepr: + """Return a representation of a collection failure. + + :param excinfo: Exception information for the failure. + """ + if isinstance(excinfo.value, self.CollectError) and not self.config.getoption( + "fulltrace", False + ): + exc = excinfo.value + return str(exc.args[0]) + + # Respect explicit tbstyle option, but default to "short" + # (_repr_failure_py uses "long" with "fulltrace" option always). + tbstyle = self.config.getoption("tbstyle", "auto") + if tbstyle == "auto": + tbstyle = "short" + + return self._repr_failure_py(excinfo, style=tbstyle) + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "path"): + traceback = excinfo.traceback + ntraceback = traceback.cut(path=self.path) + if ntraceback == traceback: + ntraceback = ntraceback.cut(excludepath=tracebackcutdir) + return ntraceback.filter(excinfo) + return excinfo.traceback + + +@lru_cache(maxsize=1000) +def _check_initialpaths_for_relpath( + initial_paths: frozenset[Path], path: Path +) -> str | None: + if path in initial_paths: + return "" + + for parent in path.parents: + if parent in initial_paths: + return str(path.relative_to(parent)) + + return None + + +class FSCollector(Collector, abc.ABC): + """Base class for filesystem collectors.""" + + def __init__( + self, + fspath: LEGACY_PATH | None = None, + path_or_parent: Path | Node | None = None, + path: Path | None = None, + name: str | None = None, + parent: Node | None = None, + config: Config | None = None, + session: Session | None = None, + nodeid: str | None = None, + ) -> None: + if path_or_parent: + if isinstance(path_or_parent, Node): + assert parent is None + parent = cast(FSCollector, path_or_parent) + elif isinstance(path_or_parent, Path): + assert path is None + path = path_or_parent + + path = _imply_path(type(self), path, fspath=fspath) + if name is None: + name = path.name + if parent is not None and parent.path != path: + try: + rel = path.relative_to(parent.path) + except ValueError: + pass + else: + name = str(rel) + name = name.replace(os.sep, SEP) + self.path = path + + if session is None: + assert parent is not None + session = parent.session + + if nodeid is None: + try: + nodeid = str(self.path.relative_to(session.config.rootpath)) + except ValueError: + nodeid = _check_initialpaths_for_relpath(session._initialpaths, path) + + if nodeid and os.sep != SEP: + nodeid = nodeid.replace(os.sep, SEP) + + super().__init__( + name=name, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + path=path, + ) + + @classmethod + def from_parent( + cls, + parent, + *, + fspath: LEGACY_PATH | None = None, + path: Path | None = None, + **kw, + ) -> Self: + """The public constructor.""" + return super().from_parent(parent=parent, fspath=fspath, path=path, **kw) + + +class File(FSCollector, abc.ABC): + """Base class for collecting tests from a file. + + :ref:`non-python tests`. + """ + + +class Directory(FSCollector, abc.ABC): + """Base class for collecting files from a directory. + + A basic directory collector does the following: goes over the files and + sub-directories in the directory and creates collectors for them by calling + the hooks :hook:`pytest_collect_directory` and :hook:`pytest_collect_file`, + after checking that they are not ignored using + :hook:`pytest_ignore_collect`. + + The default directory collectors are :class:`~pytest.Dir` and + :class:`~pytest.Package`. + + .. versionadded:: 8.0 + + :ref:`custom directory collectors`. + """ + + +class Item(Node, abc.ABC): + """Base class of all test invocation items. + + Note that for a single function there might be multiple test invocation items. + """ + + nextitem = None + + def __init__( + self, + name, + parent=None, + config: Config | None = None, + session: Session | None = None, + nodeid: str | None = None, + **kw, + ) -> None: + # The first two arguments are intentionally passed positionally, + # to keep plugins who define a node type which inherits from + # (pytest.Item, pytest.File) working (see issue #8435). + # They can be made kwargs when the deprecation above is done. + super().__init__( + name, + parent, + config=config, + session=session, + nodeid=nodeid, + **kw, + ) + self._report_sections: list[tuple[str, str, str]] = [] + + #: A list of tuples (name, value) that holds user defined properties + #: for this test. + self.user_properties: list[tuple[str, object]] = [] + + self._check_item_and_collector_diamond_inheritance() + + def _check_item_and_collector_diamond_inheritance(self) -> None: + """ + Check if the current type inherits from both File and Collector + at the same time, emitting a warning accordingly (#8447). + """ + cls = type(self) + + # We inject an attribute in the type to avoid issuing this warning + # for the same class more than once, which is not helpful. + # It is a hack, but was deemed acceptable in order to avoid + # flooding the user in the common case. + attr_name = "_pytest_diamond_inheritance_warning_shown" + if getattr(cls, attr_name, False): + return + setattr(cls, attr_name, True) + + problems = ", ".join( + base.__name__ for base in cls.__bases__ if issubclass(base, Collector) + ) + if problems: + warnings.warn( + f"{cls.__name__} is an Item subclass and should not be a collector, " + f"however its bases {problems} are collectors.\n" + "Please split the Collectors and the Item into separate node types.\n" + "Pytest Doc example: https://docs.pytest.org/en/latest/example/nonpython.html\n" + "example pull request on a plugin: https://github.com/asmeurer/pytest-flakes/pull/40/", + PytestWarning, + ) + + @abc.abstractmethod + def runtest(self) -> None: + """Run the test case for this item. + + Must be implemented by subclasses. + + .. seealso:: :ref:`non-python tests` + """ + raise NotImplementedError("runtest must be implemented by Item subclass") + + def add_report_section(self, when: str, key: str, content: str) -> None: + """Add a new report section, similar to what's done internally to add + stdout and stderr captured output:: + + item.add_report_section("call", "stdout", "report section contents") + + :param str when: + One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``. + :param str key: + Name of the section, can be customized at will. Pytest uses ``"stdout"`` and + ``"stderr"`` internally. + :param str content: + The full contents as a string. + """ + if content: + self._report_sections.append((when, key, content)) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + """Get location information for this item for test reports. + + Returns a tuple with three elements: + + - The path of the test (default ``self.path``) + - The 0-based line number of the test (default ``None``) + - A name of the test to be shown (default ``""``) + + .. seealso:: :ref:`non-python tests` + """ + return self.path, None, "" + + @cached_property + def location(self) -> tuple[str, int | None, str]: + """ + Returns a tuple of ``(relfspath, lineno, testname)`` for this item + where ``relfspath`` is file path relative to ``config.rootpath`` + and lineno is a 0-based line number. + """ + location = self.reportinfo() + path = absolutepath(location[0]) + relfspath = self.session._node_location_to_relpath(path) + assert type(location[2]) is str + return (relfspath, location[1], location[2]) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/outcomes.py b/Backend/venv/lib/python3.12/site-packages/_pytest/outcomes.py new file mode 100644 index 00000000..766be95c --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/outcomes.py @@ -0,0 +1,308 @@ +"""Exception classes and constants handling test outcomes as well as +functions creating them.""" + +from __future__ import annotations + +import sys +from typing import Any +from typing import ClassVar +from typing import NoReturn + +from .warning_types import PytestDeprecationWarning + + +class OutcomeException(BaseException): + """OutcomeException and its subclass instances indicate and contain info + about test and collection outcomes.""" + + def __init__(self, msg: str | None = None, pytrace: bool = True) -> None: + if msg is not None and not isinstance(msg, str): + error_msg = ( # type: ignore[unreachable] + "{} expected string as 'msg' parameter, got '{}' instead.\n" + "Perhaps you meant to use a mark?" + ) + raise TypeError(error_msg.format(type(self).__name__, type(msg).__name__)) + super().__init__(msg) + self.msg = msg + self.pytrace = pytrace + + def __repr__(self) -> str: + if self.msg is not None: + return self.msg + return f"<{self.__class__.__name__} instance>" + + __str__ = __repr__ + + +TEST_OUTCOME = (OutcomeException, Exception) + + +class Skipped(OutcomeException): + # XXX hackish: on 3k we fake to live in the builtins + # in order to have Skipped exception printing shorter/nicer + __module__ = "builtins" + + def __init__( + self, + msg: str | None = None, + pytrace: bool = True, + allow_module_level: bool = False, + *, + _use_item_location: bool = False, + ) -> None: + super().__init__(msg=msg, pytrace=pytrace) + self.allow_module_level = allow_module_level + # If true, the skip location is reported as the item's location, + # instead of the place that raises the exception/calls skip(). + self._use_item_location = _use_item_location + + +class Failed(OutcomeException): + """Raised from an explicit call to pytest.fail().""" + + __module__ = "builtins" + + +class Exit(Exception): + """Raised for immediate program exits (no tracebacks/summaries).""" + + def __init__( + self, msg: str = "unknown reason", returncode: int | None = None + ) -> None: + self.msg = msg + self.returncode = returncode + super().__init__(msg) + + +class XFailed(Failed): + """Raised from an explicit call to pytest.xfail().""" + + +class _Exit: + """Exit testing process. + + :param reason: + The message to show as the reason for exiting pytest. reason has a default value + only because `msg` is deprecated. + + :param returncode: + Return code to be used when exiting pytest. None means the same as ``0`` (no error), + same as :func:`sys.exit`. + + :raises pytest.exit.Exception: + The exception that is raised. + """ + + Exception: ClassVar[type[Exit]] = Exit + + def __call__(self, reason: str = "", returncode: int | None = None) -> NoReturn: + __tracebackhide__ = True + raise Exit(msg=reason, returncode=returncode) + + +exit: _Exit = _Exit() + + +class _Skip: + """Skip an executing test with the given message. + + This function should be called only during testing (setup, call or teardown) or + during collection by using the ``allow_module_level`` flag. This function can + be called in doctests as well. + + :param reason: + The message to show the user as reason for the skip. + + :param allow_module_level: + Allows this function to be called at module level. + Raising the skip exception at module level will stop + the execution of the module and prevent the collection of all tests in the module, + even those defined before the `skip` call. + + Defaults to False. + + :raises pytest.skip.Exception: + The exception that is raised. + + .. note:: + It is better to use the :ref:`pytest.mark.skipif ref` marker when + possible to declare a test to be skipped under certain conditions + like mismatching platforms or dependencies. + Similarly, use the ``# doctest: +SKIP`` directive (see :py:data:`doctest.SKIP`) + to skip a doctest statically. + """ + + Exception: ClassVar[type[Skipped]] = Skipped + + def __call__(self, reason: str = "", allow_module_level: bool = False) -> NoReturn: + __tracebackhide__ = True + raise Skipped(msg=reason, allow_module_level=allow_module_level) + + +skip: _Skip = _Skip() + + +class _Fail: + """Explicitly fail an executing test with the given message. + + :param reason: + The message to show the user as reason for the failure. + + :param pytrace: + If False, msg represents the full failure information and no + python traceback will be reported. + + :raises pytest.fail.Exception: + The exception that is raised. + """ + + Exception: ClassVar[type[Failed]] = Failed + + def __call__(self, reason: str = "", pytrace: bool = True) -> NoReturn: + __tracebackhide__ = True + raise Failed(msg=reason, pytrace=pytrace) + + +fail: _Fail = _Fail() + + +class _XFail: + """Imperatively xfail an executing test or setup function with the given reason. + + This function should be called only during testing (setup, call or teardown). + + No other code is executed after using ``xfail()`` (it is implemented + internally by raising an exception). + + :param reason: + The message to show the user as reason for the xfail. + + .. note:: + It is better to use the :ref:`pytest.mark.xfail ref` marker when + possible to declare a test to be xfailed under certain conditions + like known bugs or missing features. + + :raises pytest.xfail.Exception: + The exception that is raised. + """ + + Exception: ClassVar[type[XFailed]] = XFailed + + def __call__(self, reason: str = "") -> NoReturn: + __tracebackhide__ = True + raise XFailed(msg=reason) + + +xfail: _XFail = _XFail() + + +def importorskip( + modname: str, + minversion: str | None = None, + reason: str | None = None, + *, + exc_type: type[ImportError] | None = None, +) -> Any: + """Import and return the requested module ``modname``, or skip the + current test if the module cannot be imported. + + :param modname: + The name of the module to import. + :param minversion: + If given, the imported module's ``__version__`` attribute must be at + least this minimal version, otherwise the test is still skipped. + :param reason: + If given, this reason is shown as the message when the module cannot + be imported. + :param exc_type: + The exception that should be captured in order to skip modules. + Must be :py:class:`ImportError` or a subclass. + + If the module can be imported but raises :class:`ImportError`, pytest will + issue a warning to the user, as often users expect the module not to be + found (which would raise :class:`ModuleNotFoundError` instead). + + This warning can be suppressed by passing ``exc_type=ImportError`` explicitly. + + See :ref:`import-or-skip-import-error` for details. + + + :returns: + The imported module. This should be assigned to its canonical name. + + :raises pytest.skip.Exception: + If the module cannot be imported. + + Example:: + + docutils = pytest.importorskip("docutils") + + .. versionadded:: 8.2 + + The ``exc_type`` parameter. + """ + import warnings + + __tracebackhide__ = True + compile(modname, "", "eval") # to catch syntaxerrors + + # Until pytest 9.1, we will warn the user if we catch ImportError (instead of ModuleNotFoundError), + # as this might be hiding an installation/environment problem, which is not usually what is intended + # when using importorskip() (#11523). + # In 9.1, to keep the function signature compatible, we just change the code below to: + # 1. Use `exc_type = ModuleNotFoundError` if `exc_type` is not given. + # 2. Remove `warn_on_import` and the warning handling. + if exc_type is None: + exc_type = ImportError + warn_on_import_error = True + else: + warn_on_import_error = False + + skipped: Skipped | None = None + warning: Warning | None = None + + with warnings.catch_warnings(): + # Make sure to ignore ImportWarnings that might happen because + # of existing directories with the same name we're trying to + # import but without a __init__.py file. + warnings.simplefilter("ignore") + + try: + __import__(modname) + except exc_type as exc: + # Do not raise or issue warnings inside the catch_warnings() block. + if reason is None: + reason = f"could not import {modname!r}: {exc}" + skipped = Skipped(reason, allow_module_level=True) + + if warn_on_import_error and not isinstance(exc, ModuleNotFoundError): + lines = [ + "", + f"Module '{modname}' was found, but when imported by pytest it raised:", + f" {exc!r}", + "In pytest 9.1 this warning will become an error by default.", + "You can fix the underlying problem, or alternatively overwrite this behavior and silence this " + "warning by passing exc_type=ImportError explicitly.", + "See https://docs.pytest.org/en/stable/deprecations.html#pytest-importorskip-default-behavior-regarding-importerror", + ] + warning = PytestDeprecationWarning("\n".join(lines)) + + if warning: + warnings.warn(warning, stacklevel=2) + if skipped: + raise skipped + + mod = sys.modules[modname] + if minversion is None: + return mod + verattr = getattr(mod, "__version__", None) + if minversion is not None: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if verattr is None or Version(verattr) < Version(minversion): + raise Skipped( + f"module {modname!r} has __version__ {verattr!r}, required is: {minversion!r}", + allow_module_level=True, + ) + return mod diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/pastebin.py b/Backend/venv/lib/python3.12/site-packages/_pytest/pastebin.py new file mode 100644 index 00000000..c7b39d96 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/pastebin.py @@ -0,0 +1,117 @@ +# mypy: allow-untyped-defs +"""Submit failure or test session information to a pastebin service.""" + +from __future__ import annotations + +from io import StringIO +import tempfile +from typing import IO + +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter +import pytest + + +pastebinfile_key = StashKey[IO[bytes]]() + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group.addoption( + "--pastebin", + metavar="mode", + action="store", + dest="pastebin", + default=None, + choices=["failed", "all"], + help="Send failed|all info to bpaste.net pastebin service", + ) + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + if config.option.pastebin == "all": + tr = config.pluginmanager.getplugin("terminalreporter") + # If no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a worker node + # when using pytest-xdist, for example. + if tr is not None: + # pastebin file will be UTF-8 encoded binary file. + config.stash[pastebinfile_key] = tempfile.TemporaryFile("w+b") + oldwrite = tr._tw.write + + def tee_write(s, **kwargs): + oldwrite(s, **kwargs) + if isinstance(s, str): + s = s.encode("utf-8") + config.stash[pastebinfile_key].write(s) + + tr._tw.write = tee_write + + +def pytest_unconfigure(config: Config) -> None: + if pastebinfile_key in config.stash: + pastebinfile = config.stash[pastebinfile_key] + # Get terminal contents and delete file. + pastebinfile.seek(0) + sessionlog = pastebinfile.read() + pastebinfile.close() + del config.stash[pastebinfile_key] + # Undo our patching in the terminal reporter. + tr = config.pluginmanager.getplugin("terminalreporter") + del tr._tw.__dict__["write"] + # Write summary. + tr.write_sep("=", "Sending information to Paste Service") + pastebinurl = create_new_paste(sessionlog) + tr.write_line(f"pastebin session-log: {pastebinurl}\n") + + +def create_new_paste(contents: str | bytes) -> str: + """Create a new paste using the bpaste.net service. + + :contents: Paste contents string. + :returns: URL to the pasted contents, or an error message. + """ + import re + from urllib.error import HTTPError + from urllib.parse import urlencode + from urllib.request import urlopen + + params = {"code": contents, "lexer": "text", "expiry": "1week"} + url = "https://bpa.st" + try: + response: str = ( + urlopen(url, data=urlencode(params).encode("ascii")).read().decode("utf-8") + ) + except HTTPError as e: + with e: # HTTPErrors are also http responses that must be closed! + return f"bad response: {e}" + except OSError as e: # eg urllib.error.URLError + return f"bad response: {e}" + m = re.search(r'href="/raw/(\w+)"', response) + if m: + return f"{url}/show/{m.group(1)}" + else: + return "bad response: invalid format ('" + response + "')" + + +def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None: + if terminalreporter.config.option.pastebin != "failed": + return + if "failed" in terminalreporter.stats: + terminalreporter.write_sep("=", "Sending information to Paste Service") + for rep in terminalreporter.stats["failed"]: + try: + msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc + except AttributeError: + msg = terminalreporter._getfailureheadline(rep) + file = StringIO() + tw = create_terminal_writer(terminalreporter.config, file) + rep.toterminal(tw) + s = file.getvalue() + assert len(s) + pastebinurl = create_new_paste(s) + terminalreporter.write_line(f"{msg} --> {pastebinurl}") diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/pathlib.py b/Backend/venv/lib/python3.12/site-packages/_pytest/pathlib.py new file mode 100644 index 00000000..cd154346 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/pathlib.py @@ -0,0 +1,1063 @@ +from __future__ import annotations + +import atexit +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +import contextlib +from enum import Enum +from errno import EBADF +from errno import ELOOP +from errno import ENOENT +from errno import ENOTDIR +import fnmatch +from functools import partial +from importlib.machinery import ModuleSpec +from importlib.machinery import PathFinder +import importlib.util +import itertools +import os +from os.path import expanduser +from os.path import expandvars +from os.path import isabs +from os.path import sep +from pathlib import Path +from pathlib import PurePath +from posixpath import sep as posix_sep +import shutil +import sys +import types +from types import ModuleType +from typing import Any +from typing import TypeVar +import uuid +import warnings + +from _pytest.compat import assert_never +from _pytest.outcomes import skip +from _pytest.warning_types import PytestWarning + + +if sys.version_info < (3, 11): + from importlib._bootstrap_external import _NamespaceLoader as NamespaceLoader +else: + from importlib.machinery import NamespaceLoader + +LOCK_TIMEOUT = 60 * 60 * 24 * 3 + +_AnyPurePath = TypeVar("_AnyPurePath", bound=PurePath) + +# The following function, variables and comments were +# copied from cpython 3.9 Lib/pathlib.py file. + +# EBADF - guard against macOS `stat` throwing EBADF +_IGNORED_ERRORS = (ENOENT, ENOTDIR, EBADF, ELOOP) + +_IGNORED_WINERRORS = ( + 21, # ERROR_NOT_READY - drive exists but is not accessible + 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself +) + + +def _ignore_error(exception: Exception) -> bool: + return ( + getattr(exception, "errno", None) in _IGNORED_ERRORS + or getattr(exception, "winerror", None) in _IGNORED_WINERRORS + ) + + +def get_lock_path(path: _AnyPurePath) -> _AnyPurePath: + return path.joinpath(".lock") + + +def on_rm_rf_error( + func: Callable[..., Any] | None, + path: str, + excinfo: BaseException + | tuple[type[BaseException], BaseException, types.TracebackType | None], + *, + start_path: Path, +) -> bool: + """Handle known read-only errors during rmtree. + + The returned value is used only by our own tests. + """ + if isinstance(excinfo, BaseException): + exc = excinfo + else: + exc = excinfo[1] + + # Another process removed the file in the middle of the "rm_rf" (xdist for example). + # More context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018 + if isinstance(exc, FileNotFoundError): + return False + + if not isinstance(exc, PermissionError): + warnings.warn( + PytestWarning(f"(rm_rf) error removing {path}\n{type(exc)}: {exc}") + ) + return False + + if func not in (os.rmdir, os.remove, os.unlink): + if func not in (os.open,): + warnings.warn( + PytestWarning( + f"(rm_rf) unknown function {func} when removing {path}:\n{type(exc)}: {exc}" + ) + ) + return False + + # Chmod + retry. + import stat + + def chmod_rw(p: str) -> None: + mode = os.stat(p).st_mode + os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR) + + # For files, we need to recursively go upwards in the directories to + # ensure they all are also writable. + p = Path(path) + if p.is_file(): + for parent in p.parents: + chmod_rw(str(parent)) + # Stop when we reach the original path passed to rm_rf. + if parent == start_path: + break + chmod_rw(str(path)) + + func(path) + return True + + +def ensure_extended_length_path(path: Path) -> Path: + """Get the extended-length version of a path (Windows). + + On Windows, by default, the maximum length of a path (MAX_PATH) is 260 + characters, and operations on paths longer than that fail. But it is possible + to overcome this by converting the path to "extended-length" form before + performing the operation: + https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation + + On Windows, this function returns the extended-length absolute version of path. + On other platforms it returns path unchanged. + """ + if sys.platform.startswith("win32"): + path = path.resolve() + path = Path(get_extended_length_path_str(str(path))) + return path + + +def get_extended_length_path_str(path: str) -> str: + """Convert a path to a Windows extended length path.""" + long_path_prefix = "\\\\?\\" + unc_long_path_prefix = "\\\\?\\UNC\\" + if path.startswith((long_path_prefix, unc_long_path_prefix)): + return path + # UNC + if path.startswith("\\\\"): + return unc_long_path_prefix + path[2:] + return long_path_prefix + path + + +def rm_rf(path: Path) -> None: + """Remove the path contents recursively, even if some elements + are read-only.""" + path = ensure_extended_length_path(path) + onerror = partial(on_rm_rf_error, start_path=path) + if sys.version_info >= (3, 12): + shutil.rmtree(str(path), onexc=onerror) + else: + shutil.rmtree(str(path), onerror=onerror) + + +def find_prefixed(root: Path, prefix: str) -> Iterator[os.DirEntry[str]]: + """Find all elements in root that begin with the prefix, case-insensitive.""" + l_prefix = prefix.lower() + for x in os.scandir(root): + if x.name.lower().startswith(l_prefix): + yield x + + +def extract_suffixes(iter: Iterable[os.DirEntry[str]], prefix: str) -> Iterator[str]: + """Return the parts of the paths following the prefix. + + :param iter: Iterator over path names. + :param prefix: Expected prefix of the path names. + """ + p_len = len(prefix) + for entry in iter: + yield entry.name[p_len:] + + +def find_suffixes(root: Path, prefix: str) -> Iterator[str]: + """Combine find_prefixes and extract_suffixes.""" + return extract_suffixes(find_prefixed(root, prefix), prefix) + + +def parse_num(maybe_num: str) -> int: + """Parse number path suffixes, returns -1 on error.""" + try: + return int(maybe_num) + except ValueError: + return -1 + + +def _force_symlink(root: Path, target: str | PurePath, link_to: str | Path) -> None: + """Helper to create the current symlink. + + It's full of race conditions that are reasonably OK to ignore + for the context of best effort linking to the latest test run. + + The presumption being that in case of much parallelism + the inaccuracy is going to be acceptable. + """ + current_symlink = root.joinpath(target) + try: + current_symlink.unlink() + except OSError: + pass + try: + current_symlink.symlink_to(link_to) + except Exception: + pass + + +def make_numbered_dir(root: Path, prefix: str, mode: int = 0o700) -> Path: + """Create a directory with an increased number as suffix for the given prefix.""" + for i in range(10): + # try up to 10 times to create the folder + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + new_number = max_existing + 1 + new_path = root.joinpath(f"{prefix}{new_number}") + try: + new_path.mkdir(mode=mode) + except Exception: + pass + else: + _force_symlink(root, prefix + "current", new_path) + return new_path + else: + raise OSError( + "could not create numbered dir with prefix " + f"{prefix} in {root} after 10 tries" + ) + + +def create_cleanup_lock(p: Path) -> Path: + """Create a lock to prevent premature folder cleanup.""" + lock_path = get_lock_path(p) + try: + fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644) + except FileExistsError as e: + raise OSError(f"cannot create lockfile in {p}") from e + else: + pid = os.getpid() + spid = str(pid).encode() + os.write(fd, spid) + os.close(fd) + if not lock_path.is_file(): + raise OSError("lock path got renamed after successful creation") + return lock_path + + +def register_cleanup_lock_removal( + lock_path: Path, register: Any = atexit.register +) -> Any: + """Register a cleanup function for removing a lock, by default on atexit.""" + pid = os.getpid() + + def cleanup_on_exit(lock_path: Path = lock_path, original_pid: int = pid) -> None: + current_pid = os.getpid() + if current_pid != original_pid: + # fork + return + try: + lock_path.unlink() + except OSError: + pass + + return register(cleanup_on_exit) + + +def maybe_delete_a_numbered_dir(path: Path) -> None: + """Remove a numbered directory if its lock can be obtained and it does + not seem to be in use.""" + path = ensure_extended_length_path(path) + lock_path = None + try: + lock_path = create_cleanup_lock(path) + parent = path.parent + + garbage = parent.joinpath(f"garbage-{uuid.uuid4()}") + path.rename(garbage) + rm_rf(garbage) + except OSError: + # known races: + # * other process did a cleanup at the same time + # * deletable folder was found + # * process cwd (Windows) + return + finally: + # If we created the lock, ensure we remove it even if we failed + # to properly remove the numbered dir. + if lock_path is not None: + try: + lock_path.unlink() + except OSError: + pass + + +def ensure_deletable(path: Path, consider_lock_dead_if_created_before: float) -> bool: + """Check if `path` is deletable based on whether the lock file is expired.""" + if path.is_symlink(): + return False + lock = get_lock_path(path) + try: + if not lock.is_file(): + return True + except OSError: + # we might not have access to the lock file at all, in this case assume + # we don't have access to the entire directory (#7491). + return False + try: + lock_time = lock.stat().st_mtime + except Exception: + return False + else: + if lock_time < consider_lock_dead_if_created_before: + # We want to ignore any errors while trying to remove the lock such as: + # - PermissionDenied, like the file permissions have changed since the lock creation; + # - FileNotFoundError, in case another pytest process got here first; + # and any other cause of failure. + with contextlib.suppress(OSError): + lock.unlink() + return True + return False + + +def try_cleanup(path: Path, consider_lock_dead_if_created_before: float) -> None: + """Try to cleanup a folder if we can ensure it's deletable.""" + if ensure_deletable(path, consider_lock_dead_if_created_before): + maybe_delete_a_numbered_dir(path) + + +def cleanup_candidates(root: Path, prefix: str, keep: int) -> Iterator[Path]: + """List candidates for numbered directories to be removed - follows py.path.""" + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + max_delete = max_existing - keep + entries = find_prefixed(root, prefix) + entries, entries2 = itertools.tee(entries) + numbers = map(parse_num, extract_suffixes(entries2, prefix)) + for entry, number in zip(entries, numbers, strict=True): + if number <= max_delete: + yield Path(entry) + + +def cleanup_dead_symlinks(root: Path) -> None: + for left_dir in root.iterdir(): + if left_dir.is_symlink(): + if not left_dir.resolve().exists(): + left_dir.unlink() + + +def cleanup_numbered_dir( + root: Path, prefix: str, keep: int, consider_lock_dead_if_created_before: float +) -> None: + """Cleanup for lock driven numbered directories.""" + if not root.exists(): + return + for path in cleanup_candidates(root, prefix, keep): + try_cleanup(path, consider_lock_dead_if_created_before) + for path in root.glob("garbage-*"): + try_cleanup(path, consider_lock_dead_if_created_before) + + cleanup_dead_symlinks(root) + + +def make_numbered_dir_with_cleanup( + root: Path, + prefix: str, + keep: int, + lock_timeout: float, + mode: int, +) -> Path: + """Create a numbered dir with a cleanup lock and remove old ones.""" + e = None + for i in range(10): + try: + p = make_numbered_dir(root, prefix, mode) + # Only lock the current dir when keep is not 0 + if keep != 0: + lock_path = create_cleanup_lock(p) + register_cleanup_lock_removal(lock_path) + except Exception as exc: + e = exc + else: + consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout + # Register a cleanup for program exit + atexit.register( + cleanup_numbered_dir, + root, + prefix, + keep, + consider_lock_dead_if_created_before, + ) + return p + assert e is not None + raise e + + +def resolve_from_str(input: str, rootpath: Path) -> Path: + input = expanduser(input) + input = expandvars(input) + if isabs(input): + return Path(input) + else: + return rootpath.joinpath(input) + + +def fnmatch_ex(pattern: str, path: str | os.PathLike[str]) -> bool: + """A port of FNMatcher from py.path.common which works with PurePath() instances. + + The difference between this algorithm and PurePath.match() is that the + latter matches "**" glob expressions for each part of the path, while + this algorithm uses the whole path instead. + + For example: + "tests/foo/bar/doc/test_foo.py" matches pattern "tests/**/doc/test*.py" + with this algorithm, but not with PurePath.match(). + + This algorithm was ported to keep backward-compatibility with existing + settings which assume paths match according this logic. + + References: + * https://bugs.python.org/issue29249 + * https://bugs.python.org/issue34731 + """ + path = PurePath(path) + iswin32 = sys.platform.startswith("win") + + if iswin32 and sep not in pattern and posix_sep in pattern: + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posix_sep, sep) + + if sep not in pattern: + name = path.name + else: + name = str(path) + if path.is_absolute() and not os.path.isabs(pattern): + pattern = f"*{os.sep}{pattern}" + return fnmatch.fnmatch(name, pattern) + + +def parts(s: str) -> set[str]: + parts = s.split(sep) + return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))} + + +def symlink_or_skip( + src: os.PathLike[str] | str, + dst: os.PathLike[str] | str, + **kwargs: Any, +) -> None: + """Make a symlink, or skip the test in case symlinks are not supported.""" + try: + os.symlink(src, dst, **kwargs) + except OSError as e: + skip(f"symlinks not supported: {e}") + + +class ImportMode(Enum): + """Possible values for `mode` parameter of `import_path`.""" + + prepend = "prepend" + append = "append" + importlib = "importlib" + + +class ImportPathMismatchError(ImportError): + """Raised on import_path() if there is a mismatch of __file__'s. + + This can happen when `import_path` is called multiple times with different filenames that has + the same basename but reside in packages + (for example "/tests1/test_foo.py" and "/tests2/test_foo.py"). + """ + + +def import_path( + path: str | os.PathLike[str], + *, + mode: str | ImportMode = ImportMode.prepend, + root: Path, + consider_namespace_packages: bool, +) -> ModuleType: + """ + Import and return a module from the given path, which can be a file (a module) or + a directory (a package). + + :param path: + Path to the file to import. + + :param mode: + Controls the underlying import mechanism that will be used: + + * ImportMode.prepend: the directory containing the module (or package, taking + `__init__.py` files into account) will be put at the *start* of `sys.path` before + being imported with `importlib.import_module`. + + * ImportMode.append: same as `prepend`, but the directory will be appended + to the end of `sys.path`, if not already in `sys.path`. + + * ImportMode.importlib: uses more fine control mechanisms provided by `importlib` + to import the module, which avoids having to muck with `sys.path` at all. It effectively + allows having same-named test modules in different places. + + :param root: + Used as an anchor when mode == ImportMode.importlib to obtain + a unique name for the module being imported so it can safely be stored + into ``sys.modules``. + + :param consider_namespace_packages: + If True, consider namespace packages when resolving module names. + + :raises ImportPathMismatchError: + If after importing the given `path` and the module `__file__` + are different. Only raised in `prepend` and `append` modes. + """ + path = Path(path) + mode = ImportMode(mode) + + if not path.exists(): + raise ImportError(path) + + if mode is ImportMode.importlib: + # Try to import this module using the standard import mechanisms, but + # without touching sys.path. + try: + pkg_root, module_name = resolve_pkg_root_and_module_name( + path, consider_namespace_packages=consider_namespace_packages + ) + except CouldNotResolvePathError: + pass + else: + # If the given module name is already in sys.modules, do not import it again. + with contextlib.suppress(KeyError): + return sys.modules[module_name] + + mod = _import_module_using_spec( + module_name, path, pkg_root, insert_modules=False + ) + if mod is not None: + return mod + + # Could not import the module with the current sys.path, so we fall back + # to importing the file as a single module, not being a part of a package. + module_name = module_name_from_path(path, root) + with contextlib.suppress(KeyError): + return sys.modules[module_name] + + mod = _import_module_using_spec( + module_name, path, path.parent, insert_modules=True + ) + if mod is None: + raise ImportError(f"Can't find module {module_name} at location {path}") + return mod + + try: + pkg_root, module_name = resolve_pkg_root_and_module_name( + path, consider_namespace_packages=consider_namespace_packages + ) + except CouldNotResolvePathError: + pkg_root, module_name = path.parent, path.stem + + # Change sys.path permanently: restoring it at the end of this function would cause surprising + # problems because of delayed imports: for example, a conftest.py file imported by this function + # might have local imports, which would fail at runtime if we restored sys.path. + if mode is ImportMode.append: + if str(pkg_root) not in sys.path: + sys.path.append(str(pkg_root)) + elif mode is ImportMode.prepend: + if str(pkg_root) != sys.path[0]: + sys.path.insert(0, str(pkg_root)) + else: + assert_never(mode) + + importlib.import_module(module_name) + + mod = sys.modules[module_name] + if path.name == "__init__.py": + return mod + + ignore = os.environ.get("PY_IGNORE_IMPORTMISMATCH", "") + if ignore != "1": + module_file = mod.__file__ + if module_file is None: + raise ImportPathMismatchError(module_name, module_file, path) + + if module_file.endswith((".pyc", ".pyo")): + module_file = module_file[:-1] + if module_file.endswith(os.sep + "__init__.py"): + module_file = module_file[: -(len(os.sep + "__init__.py"))] + + try: + is_same = _is_same(str(path), module_file) + except FileNotFoundError: + is_same = False + + if not is_same: + raise ImportPathMismatchError(module_name, module_file, path) + + return mod + + +def _import_module_using_spec( + module_name: str, module_path: Path, module_location: Path, *, insert_modules: bool +) -> ModuleType | None: + """ + Tries to import a module by its canonical name, path, and its parent location. + + :param module_name: + The expected module name, will become the key of `sys.modules`. + + :param module_path: + The file path of the module, for example `/foo/bar/test_demo.py`. + If module is a package, pass the path to the `__init__.py` of the package. + If module is a namespace package, pass directory path. + + :param module_location: + The parent location of the module. + If module is a package, pass the directory containing the `__init__.py` file. + + :param insert_modules: + If True, will call `insert_missing_modules` to create empty intermediate modules + with made-up module names (when importing test files not reachable from `sys.path`). + + Example 1 of parent_module_*: + + module_name: "a.b.c.demo" + module_path: Path("a/b/c/demo.py") + module_location: Path("a/b/c/") + if "a.b.c" is package ("a/b/c/__init__.py" exists), then + parent_module_name: "a.b.c" + parent_module_path: Path("a/b/c/__init__.py") + parent_module_location: Path("a/b/c/") + else: + parent_module_name: "a.b.c" + parent_module_path: Path("a/b/c") + parent_module_location: Path("a/b/") + + Example 2 of parent_module_*: + + module_name: "a.b.c" + module_path: Path("a/b/c/__init__.py") + module_location: Path("a/b/c/") + if "a.b" is package ("a/b/__init__.py" exists), then + parent_module_name: "a.b" + parent_module_path: Path("a/b/__init__.py") + parent_module_location: Path("a/b/") + else: + parent_module_name: "a.b" + parent_module_path: Path("a/b/") + parent_module_location: Path("a/") + """ + # Attempt to import the parent module, seems is our responsibility: + # https://github.com/python/cpython/blob/73906d5c908c1e0b73c5436faeff7d93698fc074/Lib/importlib/_bootstrap.py#L1308-L1311 + parent_module_name, _, name = module_name.rpartition(".") + parent_module: ModuleType | None = None + if parent_module_name: + parent_module = sys.modules.get(parent_module_name) + # If the parent_module lacks the `__path__` attribute, AttributeError when finding a submodule's spec, + # requiring re-import according to the path. + need_reimport = not hasattr(parent_module, "__path__") + if parent_module is None or need_reimport: + # Get parent_location based on location, get parent_path based on path. + if module_path.name == "__init__.py": + # If the current module is in a package, + # need to leave the package first and then enter the parent module. + parent_module_path = module_path.parent.parent + else: + parent_module_path = module_path.parent + + if (parent_module_path / "__init__.py").is_file(): + # If the parent module is a package, loading by __init__.py file. + parent_module_path = parent_module_path / "__init__.py" + + parent_module = _import_module_using_spec( + parent_module_name, + parent_module_path, + parent_module_path.parent, + insert_modules=insert_modules, + ) + + # Checking with sys.meta_path first in case one of its hooks can import this module, + # such as our own assertion-rewrite hook. + for meta_importer in sys.meta_path: + module_name_of_meta = getattr(meta_importer.__class__, "__module__", "") + if module_name_of_meta == "_pytest.assertion.rewrite" and module_path.is_file(): + # Import modules in subdirectories by module_path + # to ensure assertion rewrites are not missed (#12659). + find_spec_path = [str(module_location), str(module_path)] + else: + find_spec_path = [str(module_location)] + + spec = meta_importer.find_spec(module_name, find_spec_path) + + if spec_matches_module_path(spec, module_path): + break + else: + loader = None + if module_path.is_dir(): + # The `spec_from_file_location` matches a loader based on the file extension by default. + # For a namespace package, need to manually specify a loader. + loader = NamespaceLoader(name, module_path, PathFinder()) # type: ignore[arg-type] + + spec = importlib.util.spec_from_file_location( + module_name, str(module_path), loader=loader + ) + + if spec_matches_module_path(spec, module_path): + assert spec is not None + # Find spec and import this module. + mod = importlib.util.module_from_spec(spec) + sys.modules[module_name] = mod + spec.loader.exec_module(mod) # type: ignore[union-attr] + + # Set this module as an attribute of the parent module (#12194). + if parent_module is not None: + setattr(parent_module, name, mod) + + if insert_modules: + insert_missing_modules(sys.modules, module_name) + return mod + + return None + + +def spec_matches_module_path(module_spec: ModuleSpec | None, module_path: Path) -> bool: + """Return true if the given ModuleSpec can be used to import the given module path.""" + if module_spec is None: + return False + + if module_spec.origin: + return Path(module_spec.origin) == module_path + + # Compare the path with the `module_spec.submodule_Search_Locations` in case + # the module is part of a namespace package. + # https://docs.python.org/3/library/importlib.html#importlib.machinery.ModuleSpec.submodule_search_locations + if module_spec.submodule_search_locations: # can be None. + for path in module_spec.submodule_search_locations: + if Path(path) == module_path: + return True + + return False + + +# Implement a special _is_same function on Windows which returns True if the two filenames +# compare equal, to circumvent os.path.samefile returning False for mounts in UNC (#7678). +if sys.platform.startswith("win"): + + def _is_same(f1: str, f2: str) -> bool: + return Path(f1) == Path(f2) or os.path.samefile(f1, f2) + +else: + + def _is_same(f1: str, f2: str) -> bool: + return os.path.samefile(f1, f2) + + +def module_name_from_path(path: Path, root: Path) -> str: + """ + Return a dotted module name based on the given path, anchored on root. + + For example: path="projects/src/tests/test_foo.py" and root="/projects", the + resulting module name will be "src.tests.test_foo". + """ + path = path.with_suffix("") + try: + relative_path = path.relative_to(root) + except ValueError: + # If we can't get a relative path to root, use the full path, except + # for the first part ("d:\\" or "/" depending on the platform, for example). + path_parts = path.parts[1:] + else: + # Use the parts for the relative path to the root path. + path_parts = relative_path.parts + + # Module name for packages do not contain the __init__ file, unless + # the `__init__.py` file is at the root. + if len(path_parts) >= 2 and path_parts[-1] == "__init__": + path_parts = path_parts[:-1] + + # Module names cannot contain ".", normalize them to "_". This prevents + # a directory having a "." in the name (".env.310" for example) causing extra intermediate modules. + # Also, important to replace "." at the start of paths, as those are considered relative imports. + path_parts = tuple(x.replace(".", "_") for x in path_parts) + + return ".".join(path_parts) + + +def insert_missing_modules(modules: dict[str, ModuleType], module_name: str) -> None: + """ + Used by ``import_path`` to create intermediate modules when using mode=importlib. + + When we want to import a module as "src.tests.test_foo" for example, we need + to create empty modules "src" and "src.tests" after inserting "src.tests.test_foo", + otherwise "src.tests.test_foo" is not importable by ``__import__``. + """ + module_parts = module_name.split(".") + while module_name: + parent_module_name, _, child_name = module_name.rpartition(".") + if parent_module_name: + parent_module = modules.get(parent_module_name) + if parent_module is None: + try: + # If sys.meta_path is empty, calling import_module will issue + # a warning and raise ModuleNotFoundError. To avoid the + # warning, we check sys.meta_path explicitly and raise the error + # ourselves to fall back to creating a dummy module. + if not sys.meta_path: + raise ModuleNotFoundError + parent_module = importlib.import_module(parent_module_name) + except ModuleNotFoundError: + parent_module = ModuleType( + module_name, + doc="Empty module created by pytest's importmode=importlib.", + ) + modules[parent_module_name] = parent_module + + # Add child attribute to the parent that can reference the child + # modules. + if not hasattr(parent_module, child_name): + setattr(parent_module, child_name, modules[module_name]) + + module_parts.pop(-1) + module_name = ".".join(module_parts) + + +def resolve_package_path(path: Path) -> Path | None: + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + + Returns None if it cannot be determined. + """ + result = None + for parent in itertools.chain((path,), path.parents): + if parent.is_dir(): + if not (parent / "__init__.py").is_file(): + break + if not parent.name.isidentifier(): + break + result = parent + return result + + +def resolve_pkg_root_and_module_name( + path: Path, *, consider_namespace_packages: bool = False +) -> tuple[Path, str]: + """ + Return the path to the directory of the root package that contains the + given Python file, and its module name: + + src/ + app/ + __init__.py + core/ + __init__.py + models.py + + Passing the full path to `models.py` will yield Path("src") and "app.core.models". + + If consider_namespace_packages is True, then we additionally check upwards in the hierarchy + for namespace packages: + + https://packaging.python.org/en/latest/guides/packaging-namespace-packages + + Raises CouldNotResolvePathError if the given path does not belong to a package (missing any __init__.py files). + """ + pkg_root: Path | None = None + pkg_path = resolve_package_path(path) + if pkg_path is not None: + pkg_root = pkg_path.parent + if consider_namespace_packages: + start = pkg_root if pkg_root is not None else path.parent + for candidate in (start, *start.parents): + module_name = compute_module_name(candidate, path) + if module_name and is_importable(module_name, path): + # Point the pkg_root to the root of the namespace package. + pkg_root = candidate + break + + if pkg_root is not None: + module_name = compute_module_name(pkg_root, path) + if module_name: + return pkg_root, module_name + + raise CouldNotResolvePathError(f"Could not resolve for {path}") + + +def is_importable(module_name: str, module_path: Path) -> bool: + """ + Return if the given module path could be imported normally by Python, akin to the user + entering the REPL and importing the corresponding module name directly, and corresponds + to the module_path specified. + + :param module_name: + Full module name that we want to check if is importable. + For example, "app.models". + + :param module_path: + Full path to the python module/package we want to check if is importable. + For example, "/projects/src/app/models.py". + """ + try: + # Note this is different from what we do in ``_import_module_using_spec``, where we explicitly search through + # sys.meta_path to be able to pass the path of the module that we want to import (``meta_importer.find_spec``). + # Using importlib.util.find_spec() is different, it gives the same results as trying to import + # the module normally in the REPL. + spec = importlib.util.find_spec(module_name) + except (ImportError, ValueError, ImportWarning): + return False + else: + return spec_matches_module_path(spec, module_path) + + +def compute_module_name(root: Path, module_path: Path) -> str | None: + """Compute a module name based on a path and a root anchor.""" + try: + path_without_suffix = module_path.with_suffix("") + except ValueError: + # Empty paths (such as Path.cwd()) might break meta_path hooks (like our own assertion rewriter). + return None + + try: + relative = path_without_suffix.relative_to(root) + except ValueError: # pragma: no cover + return None + names = list(relative.parts) + if not names: + return None + if names[-1] == "__init__": + names.pop() + return ".".join(names) + + +class CouldNotResolvePathError(Exception): + """Custom exception raised by resolve_pkg_root_and_module_name.""" + + +def scandir( + path: str | os.PathLike[str], + sort_key: Callable[[os.DirEntry[str]], object] = lambda entry: entry.name, +) -> list[os.DirEntry[str]]: + """Scan a directory recursively, in breadth-first order. + + The returned entries are sorted according to the given key. + The default is to sort by name. + If the directory does not exist, return an empty list. + """ + entries = [] + # Attempt to create a scandir iterator for the given path. + try: + scandir_iter = os.scandir(path) + except FileNotFoundError: + # If the directory does not exist, return an empty list. + return [] + # Use the scandir iterator in a context manager to ensure it is properly closed. + with scandir_iter as s: + for entry in s: + try: + entry.is_file() + except OSError as err: + if _ignore_error(err): + continue + # Reraise non-ignorable errors to avoid hiding issues. + raise + entries.append(entry) + entries.sort(key=sort_key) # type: ignore[arg-type] + return entries + + +def visit( + path: str | os.PathLike[str], recurse: Callable[[os.DirEntry[str]], bool] +) -> Iterator[os.DirEntry[str]]: + """Walk a directory recursively, in breadth-first order. + + The `recurse` predicate determines whether a directory is recursed. + + Entries at each directory level are sorted. + """ + entries = scandir(path) + yield from entries + for entry in entries: + if entry.is_dir() and recurse(entry): + yield from visit(entry.path, recurse) + + +def absolutepath(path: str | os.PathLike[str]) -> Path: + """Convert a path to an absolute path using os.path.abspath. + + Prefer this over Path.resolve() (see #6523). + Prefer this over Path.absolute() (not public, doesn't normalize). + """ + return Path(os.path.abspath(path)) + + +def commonpath(path1: Path, path2: Path) -> Path | None: + """Return the common part shared with the other path, or None if there is + no common part. + + If one path is relative and one is absolute, returns None. + """ + try: + return Path(os.path.commonpath((str(path1), str(path2)))) + except ValueError: + return None + + +def bestrelpath(directory: Path, dest: Path) -> str: + """Return a string which is a relative path from directory to dest such + that directory/bestrelpath == dest. + + The paths must be either both absolute or both relative. + + If no such path can be determined, returns dest. + """ + assert isinstance(directory, Path) + assert isinstance(dest, Path) + if dest == directory: + return os.curdir + # Find the longest common directory. + base = commonpath(directory, dest) + # Can be the case on Windows for two absolute paths on different drives. + # Can be the case for two relative paths without common prefix. + # Can be the case for a relative path and an absolute path. + if not base: + return str(dest) + reldirectory = directory.relative_to(base) + reldest = dest.relative_to(base) + return os.path.join( + # Back from directory to base. + *([os.pardir] * len(reldirectory.parts)), + # Forward from base to dest. + *reldest.parts, + ) + + +def safe_exists(p: Path) -> bool: + """Like Path.exists(), but account for input arguments that might be too long (#11394).""" + try: + return p.exists() + except (ValueError, OSError): + # ValueError: stat: path too long for Windows + # OSError: [WinError 123] The filename, directory name, or volume label syntax is incorrect + return False + + +def samefile_nofollow(p1: Path, p2: Path) -> bool: + """Test whether two paths reference the same actual file or directory. + + Unlike Path.samefile(), does not resolve symlinks. + """ + return os.path.samestat(p1.lstat(), p2.lstat()) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/py.typed b/Backend/venv/lib/python3.12/site-packages/_pytest/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/pytester.py b/Backend/venv/lib/python3.12/site-packages/_pytest/pytester.py new file mode 100644 index 00000000..1cd5f05d --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/pytester.py @@ -0,0 +1,1791 @@ +# mypy: allow-untyped-defs +"""(Disabled by default) support for testing pytest and pytest plugins. + +PYTEST_DONT_REWRITE +""" + +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Sequence +import contextlib +from fnmatch import fnmatch +import gc +import importlib +from io import StringIO +import locale +import os +from pathlib import Path +import platform +import re +import shutil +import subprocess +import sys +import traceback +from typing import Any +from typing import Final +from typing import final +from typing import IO +from typing import Literal +from typing import overload +from typing import TextIO +from typing import TYPE_CHECKING +from weakref import WeakKeyDictionary + +from iniconfig import IniConfig +from iniconfig import SectionWrapper + +from _pytest import timing +from _pytest._code import Source +from _pytest.capture import _get_multicapture +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import main +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import importorskip +from _pytest.outcomes import skip +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import make_numbered_dir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.tmpdir import TempPathFactory +from _pytest.warning_types import PytestFDWarning + + +if TYPE_CHECKING: + import pexpect + + +pytest_plugins = ["pytester_assertions"] + + +IGNORE_PAM = [ # filenames added when obtaining details about the current user + "/var/lib/sss/mc/passwd" +] + + +def pytest_addoption(parser: Parser) -> None: + parser.addoption( + "--lsof", + action="store_true", + dest="lsof", + default=False, + help="Run FD checks if lsof is available", + ) + + parser.addoption( + "--runpytest", + default="inprocess", + dest="runpytest", + choices=("inprocess", "subprocess"), + help=( + "Run pytest sub runs in tests using an 'inprocess' " + "or 'subprocess' (python -m main) method" + ), + ) + + parser.addini( + "pytester_example_dir", help="Directory to take the pytester example files from" + ) + + +def pytest_configure(config: Config) -> None: + if config.getvalue("lsof"): + checker = LsofFdLeakChecker() + if checker.matching_platform(): + config.pluginmanager.register(checker) + + config.addinivalue_line( + "markers", + "pytester_example_path(*path_segments): join the given path " + "segments to `pytester_example_dir` for this test.", + ) + + +class LsofFdLeakChecker: + def get_open_files(self) -> list[tuple[str, str]]: + if sys.version_info >= (3, 11): + # New in Python 3.11, ignores utf-8 mode + encoding = locale.getencoding() + else: + encoding = locale.getpreferredencoding(False) + out = subprocess.run( + ("lsof", "-Ffn0", "-p", str(os.getpid())), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + text=True, + encoding=encoding, + ).stdout + + def isopen(line: str) -> bool: + return line.startswith("f") and ( + "deleted" not in line + and "mem" not in line + and "txt" not in line + and "cwd" not in line + ) + + open_files = [] + + for line in out.split("\n"): + if isopen(line): + fields = line.split("\0") + fd = fields[0][1:] + filename = fields[1][1:] + if filename in IGNORE_PAM: + continue + if filename.startswith("/"): + open_files.append((fd, filename)) + + return open_files + + def matching_platform(self) -> bool: + try: + subprocess.run(("lsof", "-v"), check=True) + except (OSError, subprocess.CalledProcessError): + return False + else: + return True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_runtest_protocol(self, item: Item) -> Generator[None, object, object]: + lines1 = self.get_open_files() + try: + return (yield) + finally: + if hasattr(sys, "pypy_version_info"): + gc.collect() + lines2 = self.get_open_files() + + new_fds = {t[0] for t in lines2} - {t[0] for t in lines1} + leaked_files = [t for t in lines2 if t[0] in new_fds] + if leaked_files: + error = [ + f"***** {len(leaked_files)} FD leakage detected", + *(str(f) for f in leaked_files), + "*** Before:", + *(str(f) for f in lines1), + "*** After:", + *(str(f) for f in lines2), + f"***** {len(leaked_files)} FD leakage detected", + "*** function {}:{}: {} ".format(*item.location), + "See issue #2366", + ] + item.warn(PytestFDWarning("\n".join(error))) + + +# used at least by pytest-xdist plugin + + +@fixture +def _pytest(request: FixtureRequest) -> PytestArg: + """Return a helper which offers a gethookrecorder(hook) method which + returns a HookRecorder instance which helps to make assertions about called + hooks.""" + return PytestArg(request) + + +class PytestArg: + def __init__(self, request: FixtureRequest) -> None: + self._request = request + + def gethookrecorder(self, hook) -> HookRecorder: + hookrecorder = HookRecorder(hook._pm) + self._request.addfinalizer(hookrecorder.finish_recording) + return hookrecorder + + +def get_public_names(values: Iterable[str]) -> list[str]: + """Only return names from iterator values without a leading underscore.""" + return [x for x in values if x[0] != "_"] + + +@final +class RecordedHookCall: + """A recorded call to a hook. + + The arguments to the hook call are set as attributes. + For example: + + .. code-block:: python + + calls = hook_recorder.getcalls("pytest_runtest_setup") + # Suppose pytest_runtest_setup was called once with `item=an_item`. + assert calls[0].item is an_item + """ + + def __init__(self, name: str, kwargs) -> None: + self.__dict__.update(kwargs) + self._name = name + + def __repr__(self) -> str: + d = self.__dict__.copy() + del d["_name"] + return f"" + + if TYPE_CHECKING: + # The class has undetermined attributes, this tells mypy about it. + def __getattr__(self, key: str): ... + + +@final +class HookRecorder: + """Record all hooks called in a plugin manager. + + Hook recorders are created by :class:`Pytester`. + + This wraps all the hook calls in the plugin manager, recording each call + before propagating the normal calls. + """ + + def __init__( + self, pluginmanager: PytestPluginManager, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + + self._pluginmanager = pluginmanager + self.calls: list[RecordedHookCall] = [] + self.ret: int | ExitCode | None = None + + def before(hook_name: str, hook_impls, kwargs) -> None: + self.calls.append(RecordedHookCall(hook_name, kwargs)) + + def after(outcome, hook_name: str, hook_impls, kwargs) -> None: + pass + + self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after) + + def finish_recording(self) -> None: + self._undo_wrapping() + + def getcalls(self, names: str | Iterable[str]) -> list[RecordedHookCall]: + """Get all recorded calls to hooks with the given names (or name).""" + if isinstance(names, str): + names = names.split() + return [call for call in self.calls if call._name in names] + + def assert_contains(self, entries: Sequence[tuple[str, str]]) -> None: + __tracebackhide__ = True + i = 0 + entries = list(entries) + # Since Python 3.13, f_locals is not a dict, but eval requires a dict. + backlocals = dict(sys._getframe(1).f_locals) + while entries: + name, check = entries.pop(0) + for ind, call in enumerate(self.calls[i:]): + if call._name == name: + print("NAMEMATCH", name, call) + if eval(check, backlocals, call.__dict__): + print("CHECKERMATCH", repr(check), "->", call) + else: + print("NOCHECKERMATCH", repr(check), "-", call) + continue + i += ind + 1 + break + print("NONAMEMATCH", name, "with", call) + else: + fail(f"could not find {name!r} check {check!r}") + + def popcall(self, name: str) -> RecordedHookCall: + __tracebackhide__ = True + for i, call in enumerate(self.calls): + if call._name == name: + del self.calls[i] + return call + lines = [f"could not find call {name!r}, in:"] + lines.extend([f" {x}" for x in self.calls]) + fail("\n".join(lines)) + + def getcall(self, name: str) -> RecordedHookCall: + values = self.getcalls(name) + assert len(values) == 1, (name, values) + return values[0] + + # functionality for test reports + + @overload + def getreports( + self, + names: Literal["pytest_collectreport"], + ) -> Sequence[CollectReport]: ... + + @overload + def getreports( + self, + names: Literal["pytest_runtest_logreport"], + ) -> Sequence[TestReport]: ... + + @overload + def getreports( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: ... + + def getreports( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: + return [x.report for x in self.getcalls(names)] + + def matchreport( + self, + inamepart: str = "", + names: str | Iterable[str] = ( + "pytest_runtest_logreport", + "pytest_collectreport", + ), + when: str | None = None, + ) -> CollectReport | TestReport: + """Return a testreport whose dotted import path matches.""" + values = [] + for rep in self.getreports(names=names): + if not when and rep.when != "call" and rep.passed: + # setup/teardown passing reports - let's ignore those + continue + if when and rep.when != when: + continue + if not inamepart or inamepart in rep.nodeid.split("::"): + values.append(rep) + if not values: + raise ValueError( + f"could not find test report matching {inamepart!r}: " + "no test reports at all!" + ) + if len(values) > 1: + raise ValueError( + f"found 2 or more testreports matching {inamepart!r}: {values}" + ) + return values[0] + + @overload + def getfailures( + self, + names: Literal["pytest_collectreport"], + ) -> Sequence[CollectReport]: ... + + @overload + def getfailures( + self, + names: Literal["pytest_runtest_logreport"], + ) -> Sequence[TestReport]: ... + + @overload + def getfailures( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: ... + + def getfailures( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: + return [rep for rep in self.getreports(names) if rep.failed] + + def getfailedcollections(self) -> Sequence[CollectReport]: + return self.getfailures("pytest_collectreport") + + def listoutcomes( + self, + ) -> tuple[ + Sequence[TestReport], + Sequence[CollectReport | TestReport], + Sequence[CollectReport | TestReport], + ]: + passed = [] + skipped = [] + failed = [] + for rep in self.getreports( + ("pytest_collectreport", "pytest_runtest_logreport") + ): + if rep.passed: + if rep.when == "call": + assert isinstance(rep, TestReport) + passed.append(rep) + elif rep.skipped: + skipped.append(rep) + else: + assert rep.failed, f"Unexpected outcome: {rep!r}" + failed.append(rep) + return passed, skipped, failed + + def countoutcomes(self) -> list[int]: + return [len(x) for x in self.listoutcomes()] + + def assertoutcome(self, passed: int = 0, skipped: int = 0, failed: int = 0) -> None: + __tracebackhide__ = True + from _pytest.pytester_assertions import assertoutcome + + outcomes = self.listoutcomes() + assertoutcome( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + ) + + def clear(self) -> None: + self.calls[:] = [] + + +@fixture +def linecomp() -> LineComp: + """A :class: `LineComp` instance for checking that an input linearly + contains a sequence of strings.""" + return LineComp() + + +@fixture(name="LineMatcher") +def LineMatcher_fixture(request: FixtureRequest) -> type[LineMatcher]: + """A reference to the :class: `LineMatcher`. + + This is instantiable with a list of lines (without their trailing newlines). + This is useful for testing large texts, such as the output of commands. + """ + return LineMatcher + + +@fixture +def pytester( + request: FixtureRequest, tmp_path_factory: TempPathFactory, monkeypatch: MonkeyPatch +) -> Pytester: + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to ``path`` and environment variables during initialization. + + It is particularly useful for testing plugins. It is similar to the :fixture:`tmp_path` + fixture but provides methods which aid in testing pytest itself. + """ + return Pytester(request, tmp_path_factory, monkeypatch, _ispytest=True) + + +@fixture +def _sys_snapshot() -> Generator[None]: + snappaths = SysPathsSnapshot() + snapmods = SysModulesSnapshot() + yield + snapmods.restore() + snappaths.restore() + + +@fixture +def _config_for_test() -> Generator[Config]: + from _pytest.config import get_config + + config = get_config() + yield config + config._ensure_unconfigure() # cleanup, e.g. capman closing tmpfiles. + + +# Regex to match the session duration string in the summary: "74.34s". +rex_session_duration = re.compile(r"\d+\.\d\ds") +# Regex to match all the counts and phrases in the summary line: "34 passed, 111 skipped". +rex_outcome = re.compile(r"(\d+) (\w+)") + + +@final +class RunResult: + """The result of running a command from :class:`~pytest.Pytester`.""" + + def __init__( + self, + ret: int | ExitCode, + outlines: list[str], + errlines: list[str], + duration: float, + ) -> None: + try: + self.ret: int | ExitCode = ExitCode(ret) + """The return value.""" + except ValueError: + self.ret = ret + self.outlines = outlines + """List of lines captured from stdout.""" + self.errlines = errlines + """List of lines captured from stderr.""" + self.stdout = LineMatcher(outlines) + """:class:`~pytest.LineMatcher` of stdout. + + Use e.g. :func:`str(stdout) ` to reconstruct stdout, or the commonly used + :func:`stdout.fnmatch_lines() ` method. + """ + self.stderr = LineMatcher(errlines) + """:class:`~pytest.LineMatcher` of stderr.""" + self.duration = duration + """Duration in seconds.""" + + def __repr__(self) -> str: + return ( + f"" + ) + + def parseoutcomes(self) -> dict[str, int]: + """Return a dictionary of outcome noun -> count from parsing the terminal + output that the test process produced. + + The returned nouns will always be in plural form:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + return self.parse_summary_nouns(self.outlines) + + @classmethod + def parse_summary_nouns(cls, lines) -> dict[str, int]: + """Extract the nouns from a pytest terminal summary line. + + It always returns the plural noun for consistency:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + for line in reversed(lines): + if rex_session_duration.search(line): + outcomes = rex_outcome.findall(line) + ret = {noun: int(count) for (count, noun) in outcomes} + break + else: + raise ValueError("Pytest terminal summary report not found") + + to_plural = { + "warning": "warnings", + "error": "errors", + } + return {to_plural.get(k, k): v for k, v in ret.items()} + + def assert_outcomes( + self, + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: int | None = None, + deselected: int | None = None, + ) -> None: + """ + Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run. + + ``warnings`` and ``deselected`` are only checked if not None. + """ + __tracebackhide__ = True + from _pytest.pytester_assertions import assert_outcomes + + outcomes = self.parseoutcomes() + assert_outcomes( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + errors=errors, + xpassed=xpassed, + xfailed=xfailed, + warnings=warnings, + deselected=deselected, + ) + + +class SysModulesSnapshot: + def __init__(self, preserve: Callable[[str], bool] | None = None) -> None: + self.__preserve = preserve + self.__saved = dict(sys.modules) + + def restore(self) -> None: + if self.__preserve: + self.__saved.update( + (k, m) for k, m in sys.modules.items() if self.__preserve(k) + ) + sys.modules.clear() + sys.modules.update(self.__saved) + + +class SysPathsSnapshot: + def __init__(self) -> None: + self.__saved = list(sys.path), list(sys.meta_path) + + def restore(self) -> None: + sys.path[:], sys.meta_path[:] = self.__saved + + +@final +class Pytester: + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to :attr:`path` and environment variables during initialization. + """ + + __test__ = False + + CLOSE_STDIN: Final = NOTSET + + class TimeoutExpired(Exception): + pass + + def __init__( + self, + request: FixtureRequest, + tmp_path_factory: TempPathFactory, + monkeypatch: MonkeyPatch, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._request = request + self._mod_collections: WeakKeyDictionary[Collector, list[Item | Collector]] = ( + WeakKeyDictionary() + ) + if request.function: + name: str = request.function.__name__ + else: + name = request.node.name + self._name = name + self._path: Path = tmp_path_factory.mktemp(name, numbered=True) + #: A list of plugins to use with :py:meth:`parseconfig` and + #: :py:meth:`runpytest`. Initially this is an empty list but plugins can + #: be added to the list. + #: + #: When running in subprocess mode, specify plugins by name (str) - adding + #: plugin objects directly is not supported. + self.plugins: list[str | _PluggyPlugin] = [] + self._sys_path_snapshot = SysPathsSnapshot() + self._sys_modules_snapshot = self.__take_sys_modules_snapshot() + self._request.addfinalizer(self._finalize) + self._method = self._request.config.getoption("--runpytest") + self._test_tmproot = tmp_path_factory.mktemp(f"tmp-{name}", numbered=True) + + self._monkeypatch = mp = monkeypatch + self.chdir() + mp.setenv("PYTEST_DEBUG_TEMPROOT", str(self._test_tmproot)) + # Ensure no unexpected caching via tox. + mp.delenv("TOX_ENV_DIR", raising=False) + # Discard outer pytest options. + mp.delenv("PYTEST_ADDOPTS", raising=False) + # Ensure no user config is used. + tmphome = str(self.path) + mp.setenv("HOME", tmphome) + mp.setenv("USERPROFILE", tmphome) + # Do not use colors for inner runs by default. + mp.setenv("PY_COLORS", "0") + + @property + def path(self) -> Path: + """Temporary directory path used to create files/run tests from, etc.""" + return self._path + + def __repr__(self) -> str: + return f"" + + def _finalize(self) -> None: + """ + Clean up global state artifacts. + + Some methods modify the global interpreter state and this tries to + clean this up. It does not remove the temporary directory however so + it can be looked at after the test run has finished. + """ + self._sys_modules_snapshot.restore() + self._sys_path_snapshot.restore() + + def __take_sys_modules_snapshot(self) -> SysModulesSnapshot: + # Some zope modules used by twisted-related tests keep internal state + # and can't be deleted; we had some trouble in the past with + # `zope.interface` for example. + # + # Preserve readline due to https://bugs.python.org/issue41033. + # pexpect issues a SIGWINCH. + def preserve_module(name): + return name.startswith(("zope", "readline")) + + return SysModulesSnapshot(preserve=preserve_module) + + def make_hook_recorder(self, pluginmanager: PytestPluginManager) -> HookRecorder: + """Create a new :class:`HookRecorder` for a :class:`PytestPluginManager`.""" + pluginmanager.reprec = reprec = HookRecorder(pluginmanager, _ispytest=True) # type: ignore[attr-defined] + self._request.addfinalizer(reprec.finish_recording) + return reprec + + def chdir(self) -> None: + """Cd into the temporary directory. + + This is done automatically upon instantiation. + """ + self._monkeypatch.chdir(self.path) + + def _makefile( + self, + ext: str, + lines: Sequence[Any | bytes], + files: dict[str, str], + encoding: str = "utf-8", + ) -> Path: + items = list(files.items()) + + if ext is None: + raise TypeError("ext must not be None") + + if ext and not ext.startswith("."): + raise ValueError( + f"pytester.makefile expects a file extension, try .{ext} instead of {ext}" + ) + + def to_text(s: Any | bytes) -> str: + return s.decode(encoding) if isinstance(s, bytes) else str(s) + + if lines: + source = "\n".join(to_text(x) for x in lines) + basename = self._name + items.insert(0, (basename, source)) + + ret = None + for basename, value in items: + p = self.path.joinpath(basename).with_suffix(ext) + p.parent.mkdir(parents=True, exist_ok=True) + source_ = Source(value) + source = "\n".join(to_text(line) for line in source_.lines) + p.write_text(source.strip(), encoding=encoding) + if ret is None: + ret = p + assert ret is not None + return ret + + def makefile(self, ext: str, *args: str, **kwargs: str) -> Path: + r"""Create new text file(s) in the test directory. + + :param ext: + The extension the file(s) should use, including the dot, e.g. `.py`. + :param args: + All args are treated as strings and joined using newlines. + The result is written as contents to the file. The name of the + file is based on the test function requesting this fixture. + :param kwargs: + Each keyword is the name of a file, while the value of it will + be written as contents of the file. + :returns: + The first created file. + + Examples: + + .. code-block:: python + + pytester.makefile(".txt", "line1", "line2") + + pytester.makefile(".ini", pytest="[pytest]\naddopts=-rs\n") + + To create binary files, use :meth:`pathlib.Path.write_bytes` directly: + + .. code-block:: python + + filename = pytester.path.joinpath("foo.bin") + filename.write_bytes(b"...") + """ + return self._makefile(ext, args, kwargs) + + def makeconftest(self, source: str) -> Path: + """Write a conftest.py file. + + :param source: The contents. + :returns: The conftest.py file. + """ + return self.makepyfile(conftest=source) + + def makeini(self, source: str) -> Path: + """Write a tox.ini file. + + :param source: The contents. + :returns: The tox.ini file. + """ + return self.makefile(".ini", tox=source) + + def maketoml(self, source: str) -> Path: + """Write a pytest.toml file. + + :param source: The contents. + :returns: The pytest.toml file. + + .. versionadded:: 9.0 + """ + return self.makefile(".toml", pytest=source) + + def getinicfg(self, source: str) -> SectionWrapper: + """Return the pytest section from the tox.ini config file.""" + p = self.makeini(source) + return IniConfig(str(p))["pytest"] + + def makepyprojecttoml(self, source: str) -> Path: + """Write a pyproject.toml file. + + :param source: The contents. + :returns: The pyproject.ini file. + + .. versionadded:: 6.0 + """ + return self.makefile(".toml", pyproject=source) + + def makepyfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .py extension. + + Defaults to the test name with a '.py' extension, e.g test_foobar.py, overwriting + existing files. + + Examples: + + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.py. + pytester.makepyfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.makepyfile(custom="foobar") + # At this point, both 'test_something.py' & 'custom.py' exist in the test directory. + + """ + return self._makefile(".py", args, kwargs) + + def maketxtfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .txt extension. + + Defaults to the test name with a '.txt' extension, e.g test_foobar.txt, overwriting + existing files. + + Examples: + + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.txt. + pytester.maketxtfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.maketxtfile(custom="foobar") + # At this point, both 'test_something.txt' & 'custom.txt' exist in the test directory. + + """ + return self._makefile(".txt", args, kwargs) + + def syspathinsert(self, path: str | os.PathLike[str] | None = None) -> None: + """Prepend a directory to sys.path, defaults to :attr:`path`. + + This is undone automatically when this object dies at the end of each + test. + + :param path: + The path. + """ + if path is None: + path = self.path + + self._monkeypatch.syspath_prepend(str(path)) + + def mkdir(self, name: str | os.PathLike[str]) -> Path: + """Create a new (sub)directory. + + :param name: + The name of the directory, relative to the pytester path. + :returns: + The created directory. + :rtype: pathlib.Path + """ + p = self.path / name + p.mkdir() + return p + + def mkpydir(self, name: str | os.PathLike[str]) -> Path: + """Create a new python package. + + This creates a (sub)directory with an empty ``__init__.py`` file so it + gets recognised as a Python package. + """ + p = self.path / name + p.mkdir() + p.joinpath("__init__.py").touch() + return p + + def copy_example(self, name: str | None = None) -> Path: + """Copy file from project's directory into the testdir. + + :param name: + The name of the file to copy. + :return: + Path to the copied directory (inside ``self.path``). + :rtype: pathlib.Path + """ + example_dir_ = self._request.config.getini("pytester_example_dir") + if example_dir_ is None: + raise ValueError("pytester_example_dir is unset, can't copy examples") + example_dir: Path = self._request.config.rootpath / example_dir_ + + for extra_element in self._request.node.iter_markers("pytester_example_path"): + assert extra_element.args + example_dir = example_dir.joinpath(*extra_element.args) + + if name is None: + func_name = self._name + maybe_dir = example_dir / func_name + maybe_file = example_dir / (func_name + ".py") + + if maybe_dir.is_dir(): + example_path = maybe_dir + elif maybe_file.is_file(): + example_path = maybe_file + else: + raise LookupError( + f"{func_name} can't be found as module or package in {example_dir}" + ) + else: + example_path = example_dir.joinpath(name) + + if example_path.is_dir() and not example_path.joinpath("__init__.py").is_file(): + shutil.copytree(example_path, self.path, symlinks=True, dirs_exist_ok=True) + return self.path + elif example_path.is_file(): + result = self.path.joinpath(example_path.name) + shutil.copy(example_path, result) + return result + else: + raise LookupError( + f'example "{example_path}" is not found as a file or directory' + ) + + def getnode(self, config: Config, arg: str | os.PathLike[str]) -> Collector | Item: + """Get the collection node of a file. + + :param config: + A pytest config. + See :py:meth:`parseconfig` and :py:meth:`parseconfigure` for creating it. + :param arg: + Path to the file. + :returns: + The node. + """ + session = Session.from_config(config) + assert "::" not in str(arg) + p = Path(os.path.abspath(arg)) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([str(p)], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def getpathnode(self, path: str | os.PathLike[str]) -> Collector | Item: + """Return the collection node of a file. + + This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to + create the (configured) pytest Config instance. + + :param path: + Path to the file. + :returns: + The node. + """ + path = Path(path) + config = self.parseconfigure(path) + session = Session.from_config(config) + x = bestrelpath(session.path, path) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([x], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def genitems(self, colitems: Sequence[Item | Collector]) -> list[Item]: + """Generate all test items from a collection node. + + This recurses into the collection node and returns a list of all the + test items contained within. + + :param colitems: + The collection nodes. + :returns: + The collected items. + """ + session = colitems[0].session + result: list[Item] = [] + for colitem in colitems: + result.extend(session.genitems(colitem)) + return result + + def runitem(self, source: str) -> Any: + """Run the "test_func" Item. + + The calling test instance (class containing the test method) must + provide a ``.getrunner()`` method which should return a runner which + can run the test protocol for a single item, e.g. + ``_pytest.runner.runtestprotocol``. + """ + # used from runner functional tests + item = self.getitem(source) + # the test class where we are called from wants to provide the runner + testclassinstance = self._request.instance + runner = testclassinstance.getrunner() + return runner(item) + + def inline_runsource(self, source: str, *cmdlineargs) -> HookRecorder: + """Run a test module in process using ``pytest.main()``. + + This run writes "source" into a temporary file and runs + ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance + for the result. + + :param source: The source code of the test module. + :param cmdlineargs: Any extra command line arguments to use. + """ + p = self.makepyfile(source) + values = [*list(cmdlineargs), p] + return self.inline_run(*values) + + def inline_genitems(self, *args) -> tuple[list[Item], HookRecorder]: + """Run ``pytest.main(['--collect-only'])`` in-process. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself like :py:meth:`inline_run`, but returns a + tuple of the collected items and a :py:class:`HookRecorder` instance. + """ + rec = self.inline_run("--collect-only", *args) + items = [x.item for x in rec.getcalls("pytest_itemcollected")] + return items, rec + + def inline_run( + self, + *args: str | os.PathLike[str], + plugins=(), + no_reraise_ctrlc: bool = False, + ) -> HookRecorder: + """Run ``pytest.main()`` in-process, returning a HookRecorder. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself. This means it can return a + :py:class:`HookRecorder` instance which gives more detailed results + from that run than can be done by matching stdout/stderr from + :py:meth:`runpytest`. + + :param args: + Command line arguments to pass to :py:func:`pytest.main`. + :param plugins: + Extra plugin instances the ``pytest.main()`` instance should use. + :param no_reraise_ctrlc: + Typically we reraise keyboard interrupts from the child run. If + True, the KeyboardInterrupt exception is captured. + """ + from _pytest.unraisableexception import gc_collect_iterations_key + + # (maybe a cpython bug?) the importlib cache sometimes isn't updated + # properly between file creation and inline_run (especially if imports + # are interspersed with file creation) + importlib.invalidate_caches() + + plugins = list(plugins) + finalizers = [] + try: + # Any sys.module or sys.path changes done while running pytest + # inline should be reverted after the test run completes to avoid + # clashing with later inline tests run within the same pytest test, + # e.g. just because they use matching test module names. + finalizers.append(self.__take_sys_modules_snapshot().restore) + finalizers.append(SysPathsSnapshot().restore) + + # Important note: + # - our tests should not leave any other references/registrations + # laying around other than possibly loaded test modules + # referenced from sys.modules, as nothing will clean those up + # automatically + + rec = [] + + class PytesterHelperPlugin: + @staticmethod + def pytest_configure(config: Config) -> None: + rec.append(self.make_hook_recorder(config.pluginmanager)) + + # The unraisable plugin GC collect slows down inline + # pytester runs too much. + config.stash[gc_collect_iterations_key] = 0 + + plugins.append(PytesterHelperPlugin()) + ret = main([str(x) for x in args], plugins=plugins) + if len(rec) == 1: + reprec = rec.pop() + else: + + class reprec: # type: ignore + pass + + reprec.ret = ret + + # Typically we reraise keyboard interrupts from the child run + # because it's our user requesting interruption of the testing. + if ret == ExitCode.INTERRUPTED and not no_reraise_ctrlc: + calls = reprec.getcalls("pytest_keyboard_interrupt") + if calls and calls[-1].excinfo.type == KeyboardInterrupt: + raise KeyboardInterrupt() + return reprec + finally: + for finalizer in finalizers: + finalizer() + + def runpytest_inprocess( + self, *args: str | os.PathLike[str], **kwargs: Any + ) -> RunResult: + """Return result of running pytest in-process, providing a similar + interface to what self.runpytest() provides.""" + syspathinsert = kwargs.pop("syspathinsert", False) + + if syspathinsert: + self.syspathinsert() + instant = timing.Instant() + capture = _get_multicapture("sys") + capture.start_capturing() + try: + try: + reprec = self.inline_run(*args, **kwargs) + except SystemExit as e: + ret = e.args[0] + try: + ret = ExitCode(e.args[0]) + except ValueError: + pass + + class reprec: # type: ignore + ret = ret + + except Exception: + traceback.print_exc() + + class reprec: # type: ignore + ret = ExitCode(3) + + finally: + out, err = capture.readouterr() + capture.stop_capturing() + sys.stdout.write(out) + sys.stderr.write(err) + + assert reprec.ret is not None + res = RunResult( + reprec.ret, out.splitlines(), err.splitlines(), instant.elapsed().seconds + ) + res.reprec = reprec # type: ignore + return res + + def runpytest(self, *args: str | os.PathLike[str], **kwargs: Any) -> RunResult: + """Run pytest inline or in a subprocess, depending on the command line + option "--runpytest" and return a :py:class:`~pytest.RunResult`.""" + new_args = self._ensure_basetemp(args) + if self._method == "inprocess": + return self.runpytest_inprocess(*new_args, **kwargs) + elif self._method == "subprocess": + return self.runpytest_subprocess(*new_args, **kwargs) + raise RuntimeError(f"Unrecognized runpytest option: {self._method}") + + def _ensure_basetemp( + self, args: Sequence[str | os.PathLike[str]] + ) -> list[str | os.PathLike[str]]: + new_args = list(args) + for x in new_args: + if str(x).startswith("--basetemp"): + break + else: + new_args.append( + "--basetemp={}".format(self.path.parent.joinpath("basetemp")) + ) + return new_args + + def parseconfig(self, *args: str | os.PathLike[str]) -> Config: + """Return a new pytest :class:`pytest.Config` instance from given + commandline args. + + This invokes the pytest bootstrapping code in _pytest.config to create a + new :py:class:`pytest.PytestPluginManager` and call the + :hook:`pytest_cmdline_parse` hook to create a new :class:`pytest.Config` + instance. + + If :attr:`plugins` has been populated they should be plugin modules + to be registered with the plugin manager. + """ + import _pytest.config + + new_args = [str(x) for x in self._ensure_basetemp(args)] + + config = _pytest.config._prepareconfig(new_args, self.plugins) + # we don't know what the test will do with this half-setup config + # object and thus we make sure it gets unconfigured properly in any + # case (otherwise capturing could still be active, for example) + self._request.addfinalizer(config._ensure_unconfigure) + return config + + def parseconfigure(self, *args: str | os.PathLike[str]) -> Config: + """Return a new pytest configured Config instance. + + Returns a new :py:class:`pytest.Config` instance like + :py:meth:`parseconfig`, but also calls the :hook:`pytest_configure` + hook. + """ + config = self.parseconfig(*args) + config._do_configure() + return config + + def getitem( + self, source: str | os.PathLike[str], funcname: str = "test_func" + ) -> Item: + """Return the test item for a test function. + + Writes the source to a python file and runs pytest's collection on + the resulting module, returning the test item for the requested + function name. + + :param source: + The module source. + :param funcname: + The name of the test function for which to return a test item. + :returns: + The test item. + """ + items = self.getitems(source) + for item in items: + if item.name == funcname: + return item + assert 0, f"{funcname!r} item not found in module:\n{source}\nitems: {items}" + + def getitems(self, source: str | os.PathLike[str]) -> list[Item]: + """Return all test items collected from the module. + + Writes the source to a Python file and runs pytest's collection on + the resulting module, returning all test items contained within. + """ + modcol = self.getmodulecol(source) + return self.genitems([modcol]) + + def getmodulecol( + self, + source: str | os.PathLike[str], + configargs=(), + *, + withinit: bool = False, + ): + """Return the module collection node for ``source``. + + Writes ``source`` to a file using :py:meth:`makepyfile` and then + runs the pytest collection on it, returning the collection node for the + test module. + + :param source: + The source code of the module to collect. + + :param configargs: + Any extra arguments to pass to :py:meth:`parseconfigure`. + + :param withinit: + Whether to also write an ``__init__.py`` file to the same + directory to ensure it is a package. + """ + if isinstance(source, os.PathLike): + path = self.path.joinpath(source) + assert not withinit, "not supported for paths" + else: + kw = {self._name: str(source)} + path = self.makepyfile(**kw) + if withinit: + self.makepyfile(__init__="#") + self.config = config = self.parseconfigure(path, *configargs) + return self.getnode(config, path) + + def collect_by_name(self, modcol: Collector, name: str) -> Item | Collector | None: + """Return the collection node for name from the module collection. + + Searches a module collection node for a collection node matching the + given name. + + :param modcol: A module collection node; see :py:meth:`getmodulecol`. + :param name: The name of the node to return. + """ + if modcol not in self._mod_collections: + self._mod_collections[modcol] = list(modcol.collect()) + for colitem in self._mod_collections[modcol]: + if colitem.name == name: + return colitem + return None + + def popen( + self, + cmdargs: Sequence[str | os.PathLike[str]], + stdout: int | TextIO = subprocess.PIPE, + stderr: int | TextIO = subprocess.PIPE, + stdin: NotSetType | bytes | IO[Any] | int = CLOSE_STDIN, + **kw, + ): + """Invoke :py:class:`subprocess.Popen`. + + Calls :py:class:`subprocess.Popen` making sure the current working + directory is in ``PYTHONPATH``. + + You probably want to use :py:meth:`run` instead. + """ + env = os.environ.copy() + env["PYTHONPATH"] = os.pathsep.join( + filter(None, [os.getcwd(), env.get("PYTHONPATH", "")]) + ) + kw["env"] = env + + if stdin is self.CLOSE_STDIN: + kw["stdin"] = subprocess.PIPE + elif isinstance(stdin, bytes): + kw["stdin"] = subprocess.PIPE + else: + kw["stdin"] = stdin + + popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) + if stdin is self.CLOSE_STDIN: + assert popen.stdin is not None + popen.stdin.close() + elif isinstance(stdin, bytes): + assert popen.stdin is not None + popen.stdin.write(stdin) + + return popen + + def run( + self, + *cmdargs: str | os.PathLike[str], + timeout: float | None = None, + stdin: NotSetType | bytes | IO[Any] | int = CLOSE_STDIN, + ) -> RunResult: + """Run a command with arguments. + + Run a process using :py:class:`subprocess.Popen` saving the stdout and + stderr. + + :param cmdargs: + The sequence of arguments to pass to :py:class:`subprocess.Popen`, + with path-like objects being converted to :py:class:`str` + automatically. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :param stdin: + Optional standard input. + + - If it is ``CLOSE_STDIN`` (Default), then this method calls + :py:class:`subprocess.Popen` with ``stdin=subprocess.PIPE``, and + the standard input is closed immediately after the new command is + started. + + - If it is of type :py:class:`bytes`, these bytes are sent to the + standard input of the command. + + - Otherwise, it is passed through to :py:class:`subprocess.Popen`. + For further information in this case, consult the document of the + ``stdin`` parameter in :py:class:`subprocess.Popen`. + :type stdin: _pytest.compat.NotSetType | bytes | IO[Any] | int + :returns: + The result. + + """ + __tracebackhide__ = True + + cmdargs = tuple(os.fspath(arg) for arg in cmdargs) + p1 = self.path.joinpath("stdout") + p2 = self.path.joinpath("stderr") + print("running:", *cmdargs) + print(" in:", Path.cwd()) + + with p1.open("w", encoding="utf8") as f1, p2.open("w", encoding="utf8") as f2: + instant = timing.Instant() + popen = self.popen( + cmdargs, + stdin=stdin, + stdout=f1, + stderr=f2, + ) + if popen.stdin is not None: + popen.stdin.close() + + def handle_timeout() -> None: + __tracebackhide__ = True + + timeout_message = f"{timeout} second timeout expired running: {cmdargs}" + + popen.kill() + popen.wait() + raise self.TimeoutExpired(timeout_message) + + if timeout is None: + ret = popen.wait() + else: + try: + ret = popen.wait(timeout) + except subprocess.TimeoutExpired: + handle_timeout() + f1.flush() + f2.flush() + + with p1.open(encoding="utf8") as f1, p2.open(encoding="utf8") as f2: + out = f1.read().splitlines() + err = f2.read().splitlines() + + self._dump_lines(out, sys.stdout) + self._dump_lines(err, sys.stderr) + + with contextlib.suppress(ValueError): + ret = ExitCode(ret) + return RunResult(ret, out, err, instant.elapsed().seconds) + + def _dump_lines(self, lines, fp): + try: + for line in lines: + print(line, file=fp) + except UnicodeEncodeError: + print(f"couldn't print to {fp} because of encoding") + + def _getpytestargs(self) -> tuple[str, ...]: + return sys.executable, "-mpytest" + + def runpython(self, script: os.PathLike[str]) -> RunResult: + """Run a python script using sys.executable as interpreter.""" + return self.run(sys.executable, script) + + def runpython_c(self, command: str) -> RunResult: + """Run ``python -c "command"``.""" + return self.run(sys.executable, "-c", command) + + def runpytest_subprocess( + self, *args: str | os.PathLike[str], timeout: float | None = None + ) -> RunResult: + """Run pytest as a subprocess with given arguments. + + Any plugins added to the :py:attr:`plugins` list will be added using the + ``-p`` command line option. Additionally ``--basetemp`` is used to put + any temporary files and directories in a numbered directory prefixed + with "runpytest-" to not conflict with the normal numbered pytest + location for temporary files and directories. + + :param args: + The sequence of arguments to pass to the pytest subprocess. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :returns: + The result. + """ + __tracebackhide__ = True + p = make_numbered_dir(root=self.path, prefix="runpytest-", mode=0o700) + args = (f"--basetemp={p}", *args) + for plugin in self.plugins: + if not isinstance(plugin, str): + raise ValueError( + f"Specifying plugins as objects is not supported in pytester subprocess mode; " + f"specify by name instead: {plugin}" + ) + args = ("-p", plugin, *args) + args = self._getpytestargs() + args + return self.run(*args, timeout=timeout) + + def spawn_pytest(self, string: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """Run pytest using pexpect. + + This makes sure to use the right pytest and sets up the temporary + directory locations. + + The pexpect child is returned. + """ + basetemp = self.path / "temp-pexpect" + basetemp.mkdir(mode=0o700) + invoke = " ".join(map(str, self._getpytestargs())) + cmd = f"{invoke} --basetemp={basetemp} {string}" + return self.spawn(cmd, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """Run a command using pexpect. + + The pexpect child is returned. + """ + pexpect = importorskip("pexpect", "3.0") + if hasattr(sys, "pypy_version_info") and "64" in platform.machine(): + skip("pypy-64 bit not supported") + if not hasattr(pexpect, "spawn"): + skip("pexpect.spawn not available") + logfile = self.path.joinpath("spawn.out").open("wb") + + child = pexpect.spawn(cmd, logfile=logfile, timeout=expect_timeout) + self._request.addfinalizer(logfile.close) + return child + + +class LineComp: + def __init__(self) -> None: + self.stringio = StringIO() + """:class:`python:io.StringIO()` instance used for input.""" + + def assert_contains_lines(self, lines2: Sequence[str]) -> None: + """Assert that ``lines2`` are contained (linearly) in :attr:`stringio`'s value. + + Lines are matched using :func:`LineMatcher.fnmatch_lines `. + """ + __tracebackhide__ = True + val = self.stringio.getvalue() + self.stringio.truncate(0) + self.stringio.seek(0) + lines1 = val.split("\n") + LineMatcher(lines1).fnmatch_lines(lines2) + + +class LineMatcher: + """Flexible matching of text. + + This is a convenience class to test large texts like the output of + commands. + + The constructor takes a list of lines without their trailing newlines, i.e. + ``text.splitlines()``. + """ + + def __init__(self, lines: list[str]) -> None: + self.lines = lines + self._log_output: list[str] = [] + + def __str__(self) -> str: + """Return the entire original text. + + .. versionadded:: 6.2 + You can use :meth:`str` in older versions. + """ + return "\n".join(self.lines) + + def _getlines(self, lines2: str | Sequence[str] | Source) -> Sequence[str]: + if isinstance(lines2, str): + lines2 = Source(lines2) + if isinstance(lines2, Source): + lines2 = lines2.strip().lines + return lines2 + + def fnmatch_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:fnmatch.fnmatch`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, fnmatch) + + def re_match_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:re.match`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, lambda name, pat: bool(re.match(pat, name))) + + def _match_lines_random( + self, lines2: Sequence[str], match_func: Callable[[str, str], bool] + ) -> None: + __tracebackhide__ = True + lines2 = self._getlines(lines2) + for line in lines2: + for x in self.lines: + if line == x or match_func(x, line): + self._log("matched: ", repr(line)) + break + else: + msg = f"line {line!r} not found in output" + self._log(msg) + self._fail(msg) + + def get_lines_after(self, fnline: str) -> Sequence[str]: + """Return all lines following the given line in the text. + + The given line can contain glob wildcards. + """ + for i, line in enumerate(self.lines): + if fnline == line or fnmatch(line, fnline): + return self.lines[i + 1 :] + raise ValueError(f"line {fnline!r} not found in output") + + def _log(self, *args) -> None: + self._log_output.append(" ".join(str(x) for x in args)) + + @property + def _log_text(self) -> str: + return "\n".join(self._log_output) + + def fnmatch_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:fnmatch.fnmatch`). + + The argument is a list of lines which have to match and can use glob + wildcards. If they do not match a pytest.fail() is called. The + matches and non-matches are also shown as part of the error message. + + :param lines2: String patterns to match. + :param consecutive: Match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines(lines2, fnmatch, "fnmatch", consecutive=consecutive) + + def re_match_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:re.match`). + + The argument is a list of lines which have to match using ``re.match``. + If they do not match a pytest.fail() is called. + + The matches and non-matches are also shown as part of the error message. + + :param lines2: string patterns to match. + :param consecutive: match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines( + lines2, + lambda name, pat: bool(re.match(pat, name)), + "re.match", + consecutive=consecutive, + ) + + def _match_lines( + self, + lines2: Sequence[str], + match_func: Callable[[str, str], bool], + match_nickname: str, + *, + consecutive: bool = False, + ) -> None: + """Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``. + + :param Sequence[str] lines2: + List of string patterns to match. The actual format depends on + ``match_func``. + :param match_func: + A callable ``match_func(line, pattern)`` where line is the + captured line from stdout/stderr and pattern is the matching + pattern. + :param str match_nickname: + The nickname for the match function that will be logged to stdout + when a match occurs. + :param consecutive: + Match lines consecutively? + """ + if not isinstance(lines2, collections.abc.Sequence): + raise TypeError(f"invalid type for lines2: {type(lines2).__name__}") + lines2 = self._getlines(lines2) + lines1 = self.lines[:] + extralines = [] + __tracebackhide__ = True + wnick = len(match_nickname) + 1 + started = False + for line in lines2: + nomatchprinted = False + while lines1: + nextline = lines1.pop(0) + if line == nextline: + self._log("exact match:", repr(line)) + started = True + break + elif match_func(nextline, line): + self._log(f"{match_nickname}:", repr(line)) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + started = True + break + else: + if consecutive and started: + msg = f"no consecutive match: {line!r}" + self._log(msg) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + self._fail(msg) + if not nomatchprinted: + self._log( + "{:>{width}}".format("nomatch:", width=wnick), repr(line) + ) + nomatchprinted = True + self._log("{:>{width}}".format("and:", width=wnick), repr(nextline)) + extralines.append(nextline) + else: + msg = f"remains unmatched: {line!r}" + self._log(msg) + self._fail(msg) + self._log_output = [] + + def no_fnmatch_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + self._no_match_line(pat, fnmatch, "fnmatch") + + def no_re_match_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``re.match``. + + :param str pat: The regular expression to match lines. + """ + __tracebackhide__ = True + self._no_match_line( + pat, lambda name, pat: bool(re.match(pat, name)), "re.match" + ) + + def _no_match_line( + self, pat: str, match_func: Callable[[str, str], bool], match_nickname: str + ) -> None: + """Ensure captured lines does not have a the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + nomatch_printed = False + wnick = len(match_nickname) + 1 + for line in self.lines: + if match_func(line, pat): + msg = f"{match_nickname}: {pat!r}" + self._log(msg) + self._log("{:>{width}}".format("with:", width=wnick), repr(line)) + self._fail(msg) + else: + if not nomatch_printed: + self._log("{:>{width}}".format("nomatch:", width=wnick), repr(pat)) + nomatch_printed = True + self._log("{:>{width}}".format("and:", width=wnick), repr(line)) + self._log_output = [] + + def _fail(self, msg: str) -> None: + __tracebackhide__ = True + log_text = self._log_text + self._log_output = [] + fail(log_text) + + def str(self) -> str: + """Return the entire original text.""" + return str(self) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/pytester_assertions.py b/Backend/venv/lib/python3.12/site-packages/_pytest/pytester_assertions.py new file mode 100644 index 00000000..915cc8a1 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/pytester_assertions.py @@ -0,0 +1,74 @@ +"""Helper plugin for pytester; should not be loaded on its own.""" + +# This plugin contains assertions used by pytester. pytester cannot +# contain them itself, since it is imported by the `pytest` module, +# hence cannot be subject to assertion rewriting, which requires a +# module to not be already imported. +from __future__ import annotations + +from collections.abc import Sequence + +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +def assertoutcome( + outcomes: tuple[ + Sequence[TestReport], + Sequence[CollectReport | TestReport], + Sequence[CollectReport | TestReport], + ], + passed: int = 0, + skipped: int = 0, + failed: int = 0, +) -> None: + __tracebackhide__ = True + + realpassed, realskipped, realfailed = outcomes + obtained = { + "passed": len(realpassed), + "skipped": len(realskipped), + "failed": len(realfailed), + } + expected = {"passed": passed, "skipped": skipped, "failed": failed} + assert obtained == expected, outcomes + + +def assert_outcomes( + outcomes: dict[str, int], + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: int | None = None, + deselected: int | None = None, +) -> None: + """Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run.""" + __tracebackhide__ = True + + obtained = { + "passed": outcomes.get("passed", 0), + "skipped": outcomes.get("skipped", 0), + "failed": outcomes.get("failed", 0), + "errors": outcomes.get("errors", 0), + "xpassed": outcomes.get("xpassed", 0), + "xfailed": outcomes.get("xfailed", 0), + } + expected = { + "passed": passed, + "skipped": skipped, + "failed": failed, + "errors": errors, + "xpassed": xpassed, + "xfailed": xfailed, + } + if warnings is not None: + obtained["warnings"] = outcomes.get("warnings", 0) + expected["warnings"] = warnings + if deselected is not None: + obtained["deselected"] = outcomes.get("deselected", 0) + expected["deselected"] = deselected + assert obtained == expected diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/python.py b/Backend/venv/lib/python3.12/site-packages/_pytest/python.py new file mode 100644 index 00000000..e6375187 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/python.py @@ -0,0 +1,1772 @@ +# mypy: allow-untyped-defs +"""Python test discovery, setup and run of test functions.""" + +from __future__ import annotations + +import abc +from collections import Counter +from collections import defaultdict +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import enum +import fnmatch +from functools import partial +import inspect +import itertools +import os +from pathlib import Path +import re +import textwrap +import types +from typing import Any +from typing import cast +from typing import final +from typing import Literal +from typing import NoReturn +from typing import TYPE_CHECKING +import warnings + +import _pytest +from _pytest import fixtures +from _pytest import nodes +from _pytest._code import filter_traceback +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest._io.saferepr import saferepr +from _pytest.compat import ascii_escaped +from _pytest.compat import get_default_arg_names +from _pytest.compat import get_real_func +from _pytest.compat import getimfunc +from _pytest.compat import is_async_function +from _pytest.compat import LEGACY_PATH +from _pytest.compat import NOTSET +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import FixtureRequest +from _pytest.fixtures import FuncFixtureInfo +from _pytest.fixtures import get_scope_node +from _pytest.main import Session +from _pytest.mark import ParameterSet +from _pytest.mark.structures import _HiddenParam +from _pytest.mark.structures import get_unpacked_marks +from _pytest.mark.structures import HIDDEN_PARAM +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import normalize_mark_list +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportPathMismatchError +from _pytest.pathlib import scandir +from _pytest.scope import _ScopeName +from _pytest.scope import Scope +from _pytest.stash import StashKey +from _pytest.warning_types import PytestCollectionWarning +from _pytest.warning_types import PytestReturnNotNoneWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "python_files", + type="args", + # NOTE: default is also used in AssertionRewritingHook. + default=["test_*.py", "*_test.py"], + help="Glob-style file patterns for Python test module discovery", + ) + parser.addini( + "python_classes", + type="args", + default=["Test"], + help="Prefixes or glob names for Python test class discovery", + ) + parser.addini( + "python_functions", + type="args", + default=["test"], + help="Prefixes or glob names for Python test function and method discovery", + ) + parser.addini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support", + type="bool", + default=False, + help="Disable string escape non-ASCII characters, might cause unwanted " + "side effects(use at your own risk)", + ) + parser.addini( + "strict_parametrization_ids", + type="bool", + # None => fallback to `strict`. + default=None, + help="Emit an error if non-unique parameter set IDs are detected", + ) + + +def pytest_generate_tests(metafunc: Metafunc) -> None: + for marker in metafunc.definition.iter_markers(name="parametrize"): + metafunc.parametrize(*marker.args, **marker.kwargs, _param_mark=marker) + + +def pytest_configure(config: Config) -> None: + config.addinivalue_line( + "markers", + "parametrize(argnames, argvalues): call a test function multiple " + "times passing in different arguments in turn. argvalues generally " + "needs to be a list of values if argnames specifies only one name " + "or a list of tuples of values if argnames specifies multiple names. " + "Example: @parametrize('arg1', [1,2]) would lead to two calls of the " + "decorated test function, one with arg1=1 and another with arg1=2." + "see https://docs.pytest.org/en/stable/how-to/parametrize.html for more info " + "and examples.", + ) + config.addinivalue_line( + "markers", + "usefixtures(fixturename1, fixturename2, ...): mark tests as needing " + "all of the specified fixtures. see " + "https://docs.pytest.org/en/stable/explanation/fixtures.html#usefixtures ", + ) + + +def async_fail(nodeid: str) -> None: + msg = ( + "async def functions are not natively supported.\n" + "You need to install a suitable plugin for your async framework, for example:\n" + " - anyio\n" + " - pytest-asyncio\n" + " - pytest-tornasync\n" + " - pytest-trio\n" + " - pytest-twisted" + ) + fail(msg, pytrace=False) + + +@hookimpl(trylast=True) +def pytest_pyfunc_call(pyfuncitem: Function) -> object | None: + testfunction = pyfuncitem.obj + if is_async_function(testfunction): + async_fail(pyfuncitem.nodeid) + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + result = testfunction(**testargs) + if hasattr(result, "__await__") or hasattr(result, "__aiter__"): + async_fail(pyfuncitem.nodeid) + elif result is not None: + warnings.warn( + PytestReturnNotNoneWarning( + f"Test functions should return None, but {pyfuncitem.nodeid} returned {type(result)!r}.\n" + "Did you mean to use `assert` instead of `return`?\n" + "See https://docs.pytest.org/en/stable/how-to/assert.html#return-not-none for more information." + ) + ) + return True + + +def pytest_collect_directory( + path: Path, parent: nodes.Collector +) -> nodes.Collector | None: + pkginit = path / "__init__.py" + try: + has_pkginit = pkginit.is_file() + except PermissionError: + # See https://github.com/pytest-dev/pytest/issues/12120#issuecomment-2106349096. + return None + if has_pkginit: + return Package.from_parent(parent, path=path) + return None + + +def pytest_collect_file(file_path: Path, parent: nodes.Collector) -> Module | None: + if file_path.suffix == ".py": + if not parent.session.isinitpath(file_path): + if not path_matches_patterns( + file_path, parent.config.getini("python_files") + ): + return None + ihook = parent.session.gethookproxy(file_path) + module: Module = ihook.pytest_pycollect_makemodule( + module_path=file_path, parent=parent + ) + return module + return None + + +def path_matches_patterns(path: Path, patterns: Iterable[str]) -> bool: + """Return whether path matches any of the patterns in the list of globs given.""" + return any(fnmatch_ex(pattern, path) for pattern in patterns) + + +def pytest_pycollect_makemodule(module_path: Path, parent) -> Module: + return Module.from_parent(parent, path=module_path) + + +@hookimpl(trylast=True) +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> None | nodes.Item | nodes.Collector | list[nodes.Item | nodes.Collector]: + assert isinstance(collector, Class | Module), type(collector) + # Nothing was collected elsewhere, let's do it here. + if safe_isclass(obj): + if collector.istestclass(obj, name): + return Class.from_parent(collector, name=name, obj=obj) + elif collector.istestfunction(obj, name): + # mock seems to store unbound methods (issue473), normalize it. + obj = getattr(obj, "__func__", obj) + # We need to try and unwrap the function if it's a functools.partial + # or a functools.wrapped. + # We mustn't if it's been wrapped with mock.patch (python 2 only). + if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))): + filename, lineno = getfslineno(obj) + warnings.warn_explicit( + message=PytestCollectionWarning( + f"cannot collect {name!r} because it is not a function." + ), + category=None, + filename=str(filename), + lineno=lineno + 1, + ) + elif getattr(obj, "__test__", True): + if inspect.isgeneratorfunction(obj): + fail( + f"'yield' keyword is allowed in fixtures, but not in tests ({name})", + pytrace=False, + ) + return list(collector._genfunctions(name, obj)) + return None + return None + + +class PyobjMixin(nodes.Node): + """this mix-in inherits from Node to carry over the typing information + + as its intended to always mix in before a node + its position in the mro is unaffected""" + + _ALLOW_MARKERS = True + + @property + def module(self): + """Python module object this node was collected from (can be None).""" + node = self.getparent(Module) + return node.obj if node is not None else None + + @property + def cls(self): + """Python class object this node was collected from (can be None).""" + node = self.getparent(Class) + return node.obj if node is not None else None + + @property + def instance(self): + """Python instance object the function is bound to. + + Returns None if not a test method, e.g. for a standalone test function, + a class or a module. + """ + # Overridden by Function. + return None + + @property + def obj(self): + """Underlying Python object.""" + obj = getattr(self, "_obj", None) + if obj is None: + self._obj = obj = self._getobj() + # XXX evil hack + # used to avoid Function marker duplication + if self._ALLOW_MARKERS: + self.own_markers.extend(get_unpacked_marks(self.obj)) + # This assumes that `obj` is called before there is a chance + # to add custom keys to `self.keywords`, so no fear of overriding. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + return obj + + @obj.setter + def obj(self, value): + self._obj = value + + def _getobj(self): + """Get the underlying Python object. May be overwritten by subclasses.""" + # TODO: Improve the type of `parent` such that assert/ignore aren't needed. + assert self.parent is not None + obj = self.parent.obj # type: ignore[attr-defined] + return getattr(obj, self.name) + + def getmodpath(self, stopatmodule: bool = True, includemodule: bool = False) -> str: + """Return Python path relative to the containing module.""" + parts = [] + for node in self.iter_parents(): + name = node.name + if isinstance(node, Module): + name = os.path.splitext(name)[0] + if stopatmodule: + if includemodule: + parts.append(name) + break + parts.append(name) + parts.reverse() + return ".".join(parts) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + # XXX caching? + path, lineno = getfslineno(self.obj) + modpath = self.getmodpath() + return path, lineno, modpath + + +# As an optimization, these builtin attribute names are pre-ignored when +# iterating over an object during collection -- the pytest_pycollect_makeitem +# hook is not called for them. +# fmt: off +class _EmptyClass: pass # noqa: E701 +IGNORED_ATTRIBUTES = frozenset.union( + frozenset(), + # Module. + dir(types.ModuleType("empty_module")), + # Some extra module attributes the above doesn't catch. + {"__builtins__", "__file__", "__cached__"}, + # Class. + dir(_EmptyClass), + # Instance. + dir(_EmptyClass()), +) +del _EmptyClass +# fmt: on + + +class PyCollector(PyobjMixin, nodes.Collector, abc.ABC): + def funcnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_functions", name) + + def isnosetest(self, obj: object) -> bool: + """Look for the __test__ attribute, which is applied by the + @nose.tools.istest decorator. + """ + # We explicitly check for "is True" here to not mistakenly treat + # classes with a custom __getattr__ returning something truthy (like a + # function) as test classes. + return safe_getattr(obj, "__test__", False) is True + + def classnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_classes", name) + + def istestfunction(self, obj: object, name: str) -> bool: + if self.funcnamefilter(name) or self.isnosetest(obj): + if isinstance(obj, staticmethod | classmethod): + # staticmethods and classmethods need to be unwrapped. + obj = safe_getattr(obj, "__func__", False) + return callable(obj) and fixtures.getfixturemarker(obj) is None + else: + return False + + def istestclass(self, obj: object, name: str) -> bool: + if not (self.classnamefilter(name) or self.isnosetest(obj)): + return False + if inspect.isabstract(obj): + return False + return True + + def _matches_prefix_or_glob_option(self, option_name: str, name: str) -> bool: + """Check if the given name matches the prefix or glob-pattern defined + in configuration.""" + for option in self.config.getini(option_name): + if name.startswith(option): + return True + # Check that name looks like a glob-string before calling fnmatch + # because this is called for every name in each collected module, + # and fnmatch is somewhat expensive to call. + elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch( + name, option + ): + return True + return False + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + if not getattr(self.obj, "__test__", True): + return [] + + # Avoid random getattrs and peek in the __dict__ instead. + dicts = [getattr(self.obj, "__dict__", {})] + if isinstance(self.obj, type): + for basecls in self.obj.__mro__: + dicts.append(basecls.__dict__) + + # In each class, nodes should be definition ordered. + # __dict__ is definition ordered. + seen: set[str] = set() + dict_values: list[list[nodes.Item | nodes.Collector]] = [] + collect_imported_tests = self.session.config.getini("collect_imported_tests") + ihook = self.ihook + for dic in dicts: + values: list[nodes.Item | nodes.Collector] = [] + # Note: seems like the dict can change during iteration - + # be careful not to remove the list() without consideration. + for name, obj in list(dic.items()): + if name in IGNORED_ATTRIBUTES: + continue + if name in seen: + continue + seen.add(name) + + if not collect_imported_tests and isinstance(self, Module): + # Do not collect functions and classes from other modules. + if inspect.isfunction(obj) or inspect.isclass(obj): + if obj.__module__ != self._getobj().__name__: + continue + + res = ihook.pytest_pycollect_makeitem( + collector=self, name=name, obj=obj + ) + if res is None: + continue + elif isinstance(res, list): + values.extend(res) + else: + values.append(res) + dict_values.append(values) + + # Between classes in the class hierarchy, reverse-MRO order -- nodes + # inherited from base classes should come before subclasses. + result = [] + for values in reversed(dict_values): + result.extend(values) + return result + + def _genfunctions(self, name: str, funcobj) -> Iterator[Function]: + modulecol = self.getparent(Module) + assert modulecol is not None + module = modulecol.obj + clscol = self.getparent(Class) + cls = (clscol and clscol.obj) or None + + definition = FunctionDefinition.from_parent(self, name=name, callobj=funcobj) + fixtureinfo = definition._fixtureinfo + + # pytest_generate_tests impls call metafunc.parametrize() which fills + # metafunc._calls, the outcome of the hook. + metafunc = Metafunc( + definition=definition, + fixtureinfo=fixtureinfo, + config=self.config, + cls=cls, + module=module, + _ispytest=True, + ) + methods = [] + if hasattr(module, "pytest_generate_tests"): + methods.append(module.pytest_generate_tests) + if cls is not None and hasattr(cls, "pytest_generate_tests"): + methods.append(cls().pytest_generate_tests) + self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc)) + + if not metafunc._calls: + yield Function.from_parent(self, name=name, fixtureinfo=fixtureinfo) + else: + metafunc._recompute_direct_params_indices() + # Direct parametrizations taking place in module/class-specific + # `metafunc.parametrize` calls may have shadowed some fixtures, so make sure + # we update what the function really needs a.k.a its fixture closure. Note that + # direct parametrizations using `@pytest.mark.parametrize` have already been considered + # into making the closure using `ignore_args` arg to `getfixtureclosure`. + fixtureinfo.prune_dependency_tree() + + for callspec in metafunc._calls: + subname = f"{name}[{callspec.id}]" if callspec._idlist else name + yield Function.from_parent( + self, + name=subname, + callspec=callspec, + fixtureinfo=fixtureinfo, + keywords={callspec.id: True}, + originalname=name, + ) + + +def importtestmodule( + path: Path, + config: Config, +): + # We assume we are only called once per module. + importmode = config.getoption("--import-mode") + try: + mod = import_path( + path, + mode=importmode, + root=config.rootpath, + consider_namespace_packages=config.getini("consider_namespace_packages"), + ) + except SyntaxError as e: + raise nodes.Collector.CollectError( + ExceptionInfo.from_current().getrepr(style="short") + ) from e + except ImportPathMismatchError as e: + raise nodes.Collector.CollectError( + "import file mismatch:\n" + "imported module {!r} has this __file__ attribute:\n" + " {}\n" + "which is not the same as the test file we want to collect:\n" + " {}\n" + "HINT: remove __pycache__ / .pyc files and/or use a " + "unique basename for your test file modules".format(*e.args) + ) from e + except ImportError as e: + exc_info = ExceptionInfo.from_current() + if config.get_verbosity() < 2: + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = ( + exc_info.getrepr(style="short") + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + raise nodes.Collector.CollectError( + f"ImportError while importing test module '{path}'.\n" + "Hint: make sure your test modules/packages have valid Python names.\n" + "Traceback:\n" + f"{formatted_tb}" + ) from e + except skip.Exception as e: + if e.allow_module_level: + raise + raise nodes.Collector.CollectError( + "Using pytest.skip outside of a test will skip the entire module. " + "If that's your intention, pass `allow_module_level=True`. " + "If you want to skip a specific test or an entire class, " + "use the @pytest.mark.skip or @pytest.mark.skipif decorators." + ) from e + config.pluginmanager.consider_module(mod) + return mod + + +class Module(nodes.File, PyCollector): + """Collector for test classes and functions in a Python module.""" + + def _getobj(self): + return importtestmodule(self.path, self.config) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + self._register_setup_module_fixture() + self._register_setup_function_fixture() + self.session._fixturemanager.parsefactories(self) + return super().collect() + + def _register_setup_module_fixture(self) -> None: + """Register an autouse, module-scoped fixture for the collected module object + that invokes setUpModule/tearDownModule if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_module = _get_first_non_fixture_func( + self.obj, ("setUpModule", "setup_module") + ) + teardown_module = _get_first_non_fixture_func( + self.obj, ("tearDownModule", "teardown_module") + ) + + if setup_module is None and teardown_module is None: + return + + def xunit_setup_module_fixture(request) -> Generator[None]: + module = request.module + if setup_module is not None: + _call_with_optional_argument(setup_module, module) + yield + if teardown_module is not None: + _call_with_optional_argument(teardown_module, module) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_module_fixture_{self.obj.__name__}", + func=xunit_setup_module_fixture, + nodeid=self.nodeid, + scope="module", + autouse=True, + ) + + def _register_setup_function_fixture(self) -> None: + """Register an autouse, function-scoped fixture for the collected module object + that invokes setup_function/teardown_function if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",)) + teardown_function = _get_first_non_fixture_func( + self.obj, ("teardown_function",) + ) + if setup_function is None and teardown_function is None: + return + + def xunit_setup_function_fixture(request) -> Generator[None]: + if request.instance is not None: + # in this case we are bound to an instance, so we need to let + # setup_method handle this + yield + return + function = request.function + if setup_function is not None: + _call_with_optional_argument(setup_function, function) + yield + if teardown_function is not None: + _call_with_optional_argument(teardown_function, function) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_function_fixture_{self.obj.__name__}", + func=xunit_setup_function_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +class Package(nodes.Directory): + """Collector for files and directories in a Python packages -- directories + with an `__init__.py` file. + + .. note:: + + Directories without an `__init__.py` file are instead collected by + :class:`~pytest.Dir` by default. Both are :class:`~pytest.Directory` + collectors. + + .. versionchanged:: 8.0 + + Now inherits from :class:`~pytest.Directory`. + """ + + def __init__( + self, + fspath: LEGACY_PATH | None, + parent: nodes.Collector, + # NOTE: following args are unused: + config=None, + session=None, + nodeid=None, + path: Path | None = None, + ) -> None: + # NOTE: Could be just the following, but kept as-is for compat. + # super().__init__(self, fspath, parent=parent) + session = parent.session + super().__init__( + fspath=fspath, + path=path, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + ) + + def setup(self) -> None: + init_mod = importtestmodule(self.path / "__init__.py", self.config) + + # Not using fixtures to call setup_module here because autouse fixtures + # from packages are not called automatically (#4085). + setup_module = _get_first_non_fixture_func( + init_mod, ("setUpModule", "setup_module") + ) + if setup_module is not None: + _call_with_optional_argument(setup_module, init_mod) + + teardown_module = _get_first_non_fixture_func( + init_mod, ("tearDownModule", "teardown_module") + ) + if teardown_module is not None: + func = partial(_call_with_optional_argument, teardown_module, init_mod) + self.addfinalizer(func) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + # Always collect __init__.py first. + def sort_key(entry: os.DirEntry[str]) -> object: + return (entry.name != "__init__.py", entry.name) + + config = self.config + col: nodes.Collector | None + cols: Sequence[nodes.Collector] + ihook = self.ihook + for direntry in scandir(self.path, sort_key): + if direntry.is_dir(): + path = Path(direntry.path) + if not self.session.isinitpath(path, with_parents=True): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + col = ihook.pytest_collect_directory(path=path, parent=self) + if col is not None: + yield col + + elif direntry.is_file(): + path = Path(direntry.path) + if not self.session.isinitpath(path): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + cols = ihook.pytest_collect_file(file_path=path, parent=self) + yield from cols + + +def _call_with_optional_argument(func, arg) -> None: + """Call the given function with the given argument if func accepts one argument, otherwise + calls func without arguments.""" + arg_count = func.__code__.co_argcount + if inspect.ismethod(func): + arg_count -= 1 + if arg_count: + func(arg) + else: + func() + + +def _get_first_non_fixture_func(obj: object, names: Iterable[str]) -> object | None: + """Return the attribute from the given object to be used as a setup/teardown + xunit-style function, but only if not marked as a fixture to avoid calling it twice. + """ + for name in names: + meth: object | None = getattr(obj, name, None) + if meth is not None and fixtures.getfixturemarker(meth) is None: + return meth + return None + + +class Class(PyCollector): + """Collector for test methods (and nested classes) in a Python class.""" + + @classmethod + def from_parent(cls, parent, *, name, obj=None, **kw) -> Self: # type: ignore[override] + """The public constructor.""" + return super().from_parent(name=name, parent=parent, **kw) + + def newinstance(self): + return self.obj() + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + if not safe_getattr(self.obj, "__test__", True): + return [] + if hasinit(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + f"cannot collect test class {self.obj.__name__!r} because it has a " + f"__init__ constructor (from: {self.parent.nodeid})" + ) + ) + return [] + elif hasnew(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + f"cannot collect test class {self.obj.__name__!r} because it has a " + f"__new__ constructor (from: {self.parent.nodeid})" + ) + ) + return [] + + self._register_setup_class_fixture() + self._register_setup_method_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + return super().collect() + + def _register_setup_class_fixture(self) -> None: + """Register an autouse, class scoped fixture into the collected class object + that invokes setup_class/teardown_class if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",)) + teardown_class = _get_first_non_fixture_func(self.obj, ("teardown_class",)) + if setup_class is None and teardown_class is None: + return + + def xunit_setup_class_fixture(request) -> Generator[None]: + cls = request.cls + if setup_class is not None: + func = getimfunc(setup_class) + _call_with_optional_argument(func, cls) + yield + if teardown_class is not None: + func = getimfunc(teardown_class) + _call_with_optional_argument(func, cls) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_class_fixture_{self.obj.__qualname__}", + func=xunit_setup_class_fixture, + nodeid=self.nodeid, + scope="class", + autouse=True, + ) + + def _register_setup_method_fixture(self) -> None: + """Register an autouse, function scoped fixture into the collected class object + that invokes setup_method/teardown_method if either or both are available. + + Using a fixture to invoke these methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_name = "setup_method" + setup_method = _get_first_non_fixture_func(self.obj, (setup_name,)) + teardown_name = "teardown_method" + teardown_method = _get_first_non_fixture_func(self.obj, (teardown_name,)) + if setup_method is None and teardown_method is None: + return + + def xunit_setup_method_fixture(request) -> Generator[None]: + instance = request.instance + method = request.function + if setup_method is not None: + func = getattr(instance, setup_name) + _call_with_optional_argument(func, method) + yield + if teardown_method is not None: + func = getattr(instance, teardown_name) + _call_with_optional_argument(func, method) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_method_fixture_{self.obj.__qualname__}", + func=xunit_setup_method_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +def hasinit(obj: object) -> bool: + init: object = getattr(obj, "__init__", None) + if init: + return init != object.__init__ + return False + + +def hasnew(obj: object) -> bool: + new: object = getattr(obj, "__new__", None) + if new: + return new != object.__new__ + return False + + +@final +@dataclasses.dataclass(frozen=True) +class IdMaker: + """Make IDs for a parametrization.""" + + __slots__ = ( + "argnames", + "config", + "func_name", + "idfn", + "ids", + "nodeid", + "parametersets", + ) + + # The argnames of the parametrization. + argnames: Sequence[str] + # The ParameterSets of the parametrization. + parametersets: Sequence[ParameterSet] + # Optionally, a user-provided callable to make IDs for parameters in a + # ParameterSet. + idfn: Callable[[Any], object | None] | None + # Optionally, explicit IDs for ParameterSets by index. + ids: Sequence[object | None] | None + # Optionally, the pytest config. + # Used for controlling ASCII escaping, determining parametrization ID + # strictness, and for calling the :hook:`pytest_make_parametrize_id` hook. + config: Config | None + # Optionally, the ID of the node being parametrized. + # Used only for clearer error messages. + nodeid: str | None + # Optionally, the ID of the function being parametrized. + # Used only for clearer error messages. + func_name: str | None + + def make_unique_parameterset_ids(self) -> list[str | _HiddenParam]: + """Make a unique identifier for each ParameterSet, that may be used to + identify the parametrization in a node ID. + + If strict_parametrization_ids is enabled, and duplicates are detected, + raises CollectError. Otherwise makes the IDs unique as follows: + + Format is -...-[counter], where prm_x_token is + - user-provided id, if given + - else an id derived from the value, applicable for certain types + - else + The counter suffix is appended only in case a string wouldn't be unique + otherwise. + """ + resolved_ids = list(self._resolve_ids()) + # All IDs must be unique! + if len(resolved_ids) != len(set(resolved_ids)): + # Record the number of occurrences of each ID. + id_counts = Counter(resolved_ids) + + if self._strict_parametrization_ids_enabled(): + parameters = ", ".join(self.argnames) + parametersets = ", ".join( + [saferepr(list(param.values)) for param in self.parametersets] + ) + ids = ", ".join( + id if id is not HIDDEN_PARAM else "" for id in resolved_ids + ) + duplicates = ", ".join( + id if id is not HIDDEN_PARAM else "" + for id, count in id_counts.items() + if count > 1 + ) + msg = textwrap.dedent(f""" + Duplicate parametrization IDs detected, but strict_parametrization_ids is set. + + Test name: {self.nodeid} + Parameters: {parameters} + Parameter sets: {parametersets} + IDs: {ids} + Duplicates: {duplicates} + + You can fix this problem using `@pytest.mark.parametrize(..., ids=...)` or `pytest.param(..., id=...)`. + """).strip() # noqa: E501 + raise nodes.Collector.CollectError(msg) + + # Map the ID to its next suffix. + id_suffixes: dict[str, int] = defaultdict(int) + # Suffix non-unique IDs to make them unique. + for index, id in enumerate(resolved_ids): + if id_counts[id] > 1: + if id is HIDDEN_PARAM: + self._complain_multiple_hidden_parameter_sets() + suffix = "" + if id and id[-1].isdigit(): + suffix = "_" + new_id = f"{id}{suffix}{id_suffixes[id]}" + while new_id in set(resolved_ids): + id_suffixes[id] += 1 + new_id = f"{id}{suffix}{id_suffixes[id]}" + resolved_ids[index] = new_id + id_suffixes[id] += 1 + assert len(resolved_ids) == len(set(resolved_ids)), ( + f"Internal error: {resolved_ids=}" + ) + return resolved_ids + + def _strict_parametrization_ids_enabled(self) -> bool: + if self.config is None: + return False + strict_parametrization_ids = self.config.getini("strict_parametrization_ids") + if strict_parametrization_ids is None: + strict_parametrization_ids = self.config.getini("strict") + return cast(bool, strict_parametrization_ids) + + def _resolve_ids(self) -> Iterable[str | _HiddenParam]: + """Resolve IDs for all ParameterSets (may contain duplicates).""" + for idx, parameterset in enumerate(self.parametersets): + if parameterset.id is not None: + # ID provided directly - pytest.param(..., id="...") + if parameterset.id is HIDDEN_PARAM: + yield HIDDEN_PARAM + else: + yield _ascii_escaped_by_config(parameterset.id, self.config) + elif self.ids and idx < len(self.ids) and self.ids[idx] is not None: + # ID provided in the IDs list - parametrize(..., ids=[...]). + if self.ids[idx] is HIDDEN_PARAM: + yield HIDDEN_PARAM + else: + yield self._idval_from_value_required(self.ids[idx], idx) + else: + # ID not provided - generate it. + yield "-".join( + self._idval(val, argname, idx) + for val, argname in zip( + parameterset.values, self.argnames, strict=True + ) + ) + + def _idval(self, val: object, argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet.""" + idval = self._idval_from_function(val, argname, idx) + if idval is not None: + return idval + idval = self._idval_from_hook(val, argname) + if idval is not None: + return idval + idval = self._idval_from_value(val) + if idval is not None: + return idval + return self._idval_from_argname(argname, idx) + + def _idval_from_function(self, val: object, argname: str, idx: int) -> str | None: + """Try to make an ID for a parameter in a ParameterSet using the + user-provided id callable, if given.""" + if self.idfn is None: + return None + try: + id = self.idfn(val) + except Exception as e: + prefix = f"{self.nodeid}: " if self.nodeid is not None else "" + msg = "error raised while trying to determine id of parameter '{}' at position {}" + msg = prefix + msg.format(argname, idx) + raise ValueError(msg) from e + if id is None: + return None + return self._idval_from_value(id) + + def _idval_from_hook(self, val: object, argname: str) -> str | None: + """Try to make an ID for a parameter in a ParameterSet by calling the + :hook:`pytest_make_parametrize_id` hook.""" + if self.config: + id: str | None = self.config.hook.pytest_make_parametrize_id( + config=self.config, val=val, argname=argname + ) + return id + return None + + def _idval_from_value(self, val: object) -> str | None: + """Try to make an ID for a parameter in a ParameterSet from its value, + if the value type is supported.""" + if isinstance(val, str | bytes): + return _ascii_escaped_by_config(val, self.config) + elif val is None or isinstance(val, float | int | bool | complex): + return str(val) + elif isinstance(val, re.Pattern): + return ascii_escaped(val.pattern) + elif val is NOTSET: + # Fallback to default. Note that NOTSET is an enum.Enum. + pass + elif isinstance(val, enum.Enum): + return str(val) + elif isinstance(getattr(val, "__name__", None), str): + # Name of a class, function, module, etc. + name: str = getattr(val, "__name__") + return name + return None + + def _idval_from_value_required(self, val: object, idx: int) -> str: + """Like _idval_from_value(), but fails if the type is not supported.""" + id = self._idval_from_value(val) + if id is not None: + return id + + # Fail. + prefix = self._make_error_prefix() + msg = ( + f"{prefix}ids contains unsupported value {saferepr(val)} (type: {type(val)!r}) at index {idx}. " + "Supported types are: str, bytes, int, float, complex, bool, enum, regex or anything with a __name__." + ) + fail(msg, pytrace=False) + + @staticmethod + def _idval_from_argname(argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet from the argument name + and the index of the ParameterSet.""" + return str(argname) + str(idx) + + def _complain_multiple_hidden_parameter_sets(self) -> NoReturn: + fail( + f"{self._make_error_prefix()}multiple instances of HIDDEN_PARAM " + "cannot be used in the same parametrize call, " + "because the tests names need to be unique." + ) + + def _make_error_prefix(self) -> str: + if self.func_name is not None: + return f"In {self.func_name}: " + elif self.nodeid is not None: + return f"In {self.nodeid}: " + else: + return "" + + +@final +@dataclasses.dataclass(frozen=True) +class CallSpec2: + """A planned parameterized invocation of a test function. + + Calculated during collection for a given test function's Metafunc. + Once collection is over, each callspec is turned into a single Item + and stored in item.callspec. + """ + + # arg name -> arg value which will be passed to a fixture or pseudo-fixture + # of the same name. (indirect or direct parametrization respectively) + params: dict[str, object] = dataclasses.field(default_factory=dict) + # arg name -> arg index. + indices: dict[str, int] = dataclasses.field(default_factory=dict) + # arg name -> parameter scope. + # Used for sorting parametrized resources. + _arg2scope: Mapping[str, Scope] = dataclasses.field(default_factory=dict) + # Parts which will be added to the item's name in `[..]` separated by "-". + _idlist: Sequence[str] = dataclasses.field(default_factory=tuple) + # Marks which will be applied to the item. + marks: list[Mark] = dataclasses.field(default_factory=list) + + def setmulti( + self, + *, + argnames: Iterable[str], + valset: Iterable[object], + id: str | _HiddenParam, + marks: Iterable[Mark | MarkDecorator], + scope: Scope, + param_index: int, + nodeid: str, + ) -> CallSpec2: + params = self.params.copy() + indices = self.indices.copy() + arg2scope = dict(self._arg2scope) + for arg, val in zip(argnames, valset, strict=True): + if arg in params: + raise nodes.Collector.CollectError( + f"{nodeid}: duplicate parametrization of {arg!r}" + ) + params[arg] = val + indices[arg] = param_index + arg2scope[arg] = scope + return CallSpec2( + params=params, + indices=indices, + _arg2scope=arg2scope, + _idlist=self._idlist if id is HIDDEN_PARAM else [*self._idlist, id], + marks=[*self.marks, *normalize_mark_list(marks)], + ) + + def getparam(self, name: str) -> object: + try: + return self.params[name] + except KeyError as e: + raise ValueError(name) from e + + @property + def id(self) -> str: + return "-".join(self._idlist) + + +def get_direct_param_fixture_func(request: FixtureRequest) -> Any: + return request.param + + +# Used for storing pseudo fixturedefs for direct parametrization. +name2pseudofixturedef_key = StashKey[dict[str, FixtureDef[Any]]]() + + +@final +class Metafunc: + """Objects passed to the :hook:`pytest_generate_tests` hook. + + They help to inspect a test function and to generate tests according to + test configuration or values specified in the class or module where a + test function is defined. + """ + + def __init__( + self, + definition: FunctionDefinition, + fixtureinfo: fixtures.FuncFixtureInfo, + config: Config, + cls=None, + module=None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + + #: Access to the underlying :class:`_pytest.python.FunctionDefinition`. + self.definition = definition + + #: Access to the :class:`pytest.Config` object for the test session. + self.config = config + + #: The module object where the test function is defined in. + self.module = module + + #: Underlying Python test function. + self.function = definition.obj + + #: Set of fixture names required by the test function. + self.fixturenames = fixtureinfo.names_closure + + #: Class object where the test function is defined in or ``None``. + self.cls = cls + + self._arg2fixturedefs = fixtureinfo.name2fixturedefs + + # Result of parametrize(). + self._calls: list[CallSpec2] = [] + + self._params_directness: dict[str, Literal["indirect", "direct"]] = {} + + def parametrize( + self, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + indirect: bool | Sequence[str] = False, + ids: Iterable[object | None] | Callable[[Any], object | None] | None = None, + scope: _ScopeName | None = None, + *, + _param_mark: Mark | None = None, + ) -> None: + """Add new invocations to the underlying test function using the list + of argvalues for the given argnames. Parametrization is performed + during the collection phase. If you need to setup expensive resources + see about setting ``indirect`` to do it at test setup time instead. + + Can be called multiple times per test function (but only on different + argument names), in which case each call parametrizes all previous + parametrizations, e.g. + + :: + + unparametrized: t + parametrize ["x", "y"]: t[x], t[y] + parametrize [1, 2]: t[x-1], t[x-2], t[y-1], t[y-2] + + :param argnames: + A comma-separated string denoting one or more argument names, or + a list/tuple of argument strings. + + :param argvalues: + The list of argvalues determines how often a test is invoked with + different argument values. + + If only one argname was specified argvalues is a list of values. + If N argnames were specified, argvalues must be a list of + N-tuples, where each tuple-element specifies a value for its + respective argname. + + :param indirect: + A list of arguments' names (subset of argnames) or a boolean. + If True the list contains all names from the argnames. Each + argvalue corresponding to an argname in this list will + be passed as request.param to its respective argname fixture + function so that it can perform more expensive setups during the + setup phase of a test rather than at collection time. + + :param ids: + Sequence of (or generator for) ids for ``argvalues``, + or a callable to return part of the id for each argvalue. + + With sequences (and generators like ``itertools.count()``) the + returned ids should be of type ``string``, ``int``, ``float``, + ``bool``, or ``None``. + They are mapped to the corresponding index in ``argvalues``. + ``None`` means to use the auto-generated id. + + .. versionadded:: 8.4 + :ref:`hidden-param` means to hide the parameter set + from the test name. Can only be used at most 1 time, as + test names need to be unique. + + If it is a callable it will be called for each entry in + ``argvalues``, and the return value is used as part of the + auto-generated id for the whole set (where parts are joined with + dashes ("-")). + This is useful to provide more specific ids for certain items, e.g. + dates. Returning ``None`` will use an auto-generated id. + + If no ids are provided they will be generated automatically from + the argvalues. + + :param scope: + If specified it denotes the scope of the parameters. + The scope is used for grouping tests by parameter instances. + It will also override any fixture-function defined scope, allowing + to set a dynamic scope using test context or configuration. + """ + nodeid = self.definition.nodeid + + argnames, parametersets = ParameterSet._for_parametrize( + argnames, + argvalues, + self.function, + self.config, + nodeid=self.definition.nodeid, + ) + del argvalues + + if "request" in argnames: + fail( + f"{nodeid}: 'request' is a reserved name and cannot be used in @pytest.mark.parametrize", + pytrace=False, + ) + + if scope is not None: + scope_ = Scope.from_user( + scope, descr=f"parametrize() call in {self.function.__name__}" + ) + else: + scope_ = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect) + + self._validate_if_using_arg_names(argnames, indirect) + + # Use any already (possibly) generated ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from: + generated_ids = _param_mark._param_ids_from._param_ids_generated + if generated_ids is not None: + ids = generated_ids + + ids = self._resolve_parameter_set_ids( + argnames, ids, parametersets, nodeid=self.definition.nodeid + ) + + # Store used (possibly generated) ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from and generated_ids is None: + object.__setattr__(_param_mark._param_ids_from, "_param_ids_generated", ids) + + # Calculate directness. + arg_directness = self._resolve_args_directness(argnames, indirect) + self._params_directness.update(arg_directness) + + # Add direct parametrizations as fixturedefs to arg2fixturedefs by + # registering artificial "pseudo" FixtureDef's such that later at test + # setup time we can rely on FixtureDefs to exist for all argnames. + node = None + # For scopes higher than function, a "pseudo" FixtureDef might have + # already been created for the scope. We thus store and cache the + # FixtureDef on the node related to the scope. + if scope_ is Scope.Function: + name2pseudofixturedef = None + else: + collector = self.definition.parent + assert collector is not None + node = get_scope_node(collector, scope_) + if node is None: + # If used class scope and there is no class, use module-level + # collector (for now). + if scope_ is Scope.Class: + assert isinstance(collector, Module) + node = collector + # If used package scope and there is no package, use session + # (for now). + elif scope_ is Scope.Package: + node = collector.session + else: + assert False, f"Unhandled missing scope: {scope}" + default: dict[str, FixtureDef[Any]] = {} + name2pseudofixturedef = node.stash.setdefault( + name2pseudofixturedef_key, default + ) + for argname in argnames: + if arg_directness[argname] == "indirect": + continue + if name2pseudofixturedef is not None and argname in name2pseudofixturedef: + fixturedef = name2pseudofixturedef[argname] + else: + fixturedef = FixtureDef( + config=self.config, + baseid="", + argname=argname, + func=get_direct_param_fixture_func, + scope=scope_, + params=None, + ids=None, + _ispytest=True, + ) + if name2pseudofixturedef is not None: + name2pseudofixturedef[argname] = fixturedef + self._arg2fixturedefs[argname] = [fixturedef] + + # Create the new calls: if we are parametrize() multiple times (by applying the decorator + # more than once) then we accumulate those calls generating the cartesian product + # of all calls. + newcalls = [] + for callspec in self._calls or [CallSpec2()]: + for param_index, (param_id, param_set) in enumerate( + zip(ids, parametersets, strict=True) + ): + newcallspec = callspec.setmulti( + argnames=argnames, + valset=param_set.values, + id=param_id, + marks=param_set.marks, + scope=scope_, + param_index=param_index, + nodeid=nodeid, + ) + newcalls.append(newcallspec) + self._calls = newcalls + + def _resolve_parameter_set_ids( + self, + argnames: Sequence[str], + ids: Iterable[object | None] | Callable[[Any], object | None] | None, + parametersets: Sequence[ParameterSet], + nodeid: str, + ) -> list[str | _HiddenParam]: + """Resolve the actual ids for the given parameter sets. + + :param argnames: + Argument names passed to ``parametrize()``. + :param ids: + The `ids` parameter of the ``parametrize()`` call (see docs). + :param parametersets: + The parameter sets, each containing a set of values corresponding + to ``argnames``. + :param nodeid str: + The nodeid of the definition item that generated this + parametrization. + :returns: + List with ids for each parameter set given. + """ + if ids is None: + idfn = None + ids_ = None + elif callable(ids): + idfn = ids + ids_ = None + else: + idfn = None + ids_ = self._validate_ids(ids, parametersets, self.function.__name__) + id_maker = IdMaker( + argnames, + parametersets, + idfn, + ids_, + self.config, + nodeid=nodeid, + func_name=self.function.__name__, + ) + return id_maker.make_unique_parameterset_ids() + + def _validate_ids( + self, + ids: Iterable[object | None], + parametersets: Sequence[ParameterSet], + func_name: str, + ) -> list[object | None]: + try: + num_ids = len(ids) # type: ignore[arg-type] + except TypeError: + try: + iter(ids) + except TypeError as e: + raise TypeError("ids must be a callable or an iterable") from e + num_ids = len(parametersets) + + # num_ids == 0 is a special case: https://github.com/pytest-dev/pytest/issues/1849 + if num_ids != len(parametersets) and num_ids != 0: + msg = "In {}: {} parameter sets specified, with different number of ids: {}" + fail(msg.format(func_name, len(parametersets), num_ids), pytrace=False) + + return list(itertools.islice(ids, num_ids)) + + def _resolve_args_directness( + self, + argnames: Sequence[str], + indirect: bool | Sequence[str], + ) -> dict[str, Literal["indirect", "direct"]]: + """Resolve if each parametrized argument must be considered an indirect + parameter to a fixture of the same name, or a direct parameter to the + parametrized function, based on the ``indirect`` parameter of the + parametrized() call. + + :param argnames: + List of argument names passed to ``parametrize()``. + :param indirect: + Same as the ``indirect`` parameter of ``parametrize()``. + :returns + A dict mapping each arg name to either "indirect" or "direct". + """ + arg_directness: dict[str, Literal["indirect", "direct"]] + if isinstance(indirect, bool): + arg_directness = dict.fromkeys( + argnames, "indirect" if indirect else "direct" + ) + elif isinstance(indirect, Sequence): + arg_directness = dict.fromkeys(argnames, "direct") + for arg in indirect: + if arg not in argnames: + fail( + f"In {self.function.__name__}: indirect fixture '{arg}' doesn't exist", + pytrace=False, + ) + arg_directness[arg] = "indirect" + else: + fail( + f"In {self.function.__name__}: expected Sequence or boolean" + f" for indirect, got {type(indirect).__name__}", + pytrace=False, + ) + return arg_directness + + def _validate_if_using_arg_names( + self, + argnames: Sequence[str], + indirect: bool | Sequence[str], + ) -> None: + """Check if all argnames are being used, by default values, or directly/indirectly. + + :param List[str] argnames: List of argument names passed to ``parametrize()``. + :param indirect: Same as the ``indirect`` parameter of ``parametrize()``. + :raises ValueError: If validation fails. + """ + default_arg_names = set(get_default_arg_names(self.function)) + func_name = self.function.__name__ + for arg in argnames: + if arg not in self.fixturenames: + if arg in default_arg_names: + fail( + f"In {func_name}: function already takes an argument '{arg}' with a default value", + pytrace=False, + ) + else: + if isinstance(indirect, Sequence): + name = "fixture" if arg in indirect else "argument" + else: + name = "fixture" if indirect else "argument" + fail( + f"In {func_name}: function uses no {name} '{arg}'", + pytrace=False, + ) + + def _recompute_direct_params_indices(self) -> None: + for argname, param_type in self._params_directness.items(): + if param_type == "direct": + for i, callspec in enumerate(self._calls): + callspec.indices[argname] = i + + +def _find_parametrized_scope( + argnames: Sequence[str], + arg2fixturedefs: Mapping[str, Sequence[fixtures.FixtureDef[object]]], + indirect: bool | Sequence[str], +) -> Scope: + """Find the most appropriate scope for a parametrized call based on its arguments. + + When there's at least one direct argument, always use "function" scope. + + When a test function is parametrized and all its arguments are indirect + (e.g. fixtures), return the most narrow scope based on the fixtures used. + + Related to issue #1832, based on code posted by @Kingdread. + """ + if isinstance(indirect, Sequence): + all_arguments_are_fixtures = len(indirect) == len(argnames) + else: + all_arguments_are_fixtures = bool(indirect) + + if all_arguments_are_fixtures: + fixturedefs = arg2fixturedefs or {} + used_scopes = [ + fixturedef[-1]._scope + for name, fixturedef in fixturedefs.items() + if name in argnames + ] + # Takes the most narrow scope from used fixtures. + return min(used_scopes, default=Scope.Function) + + return Scope.Function + + +def _ascii_escaped_by_config(val: str | bytes, config: Config | None) -> str: + if config is None: + escape_option = False + else: + escape_option = config.getini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support" + ) + # TODO: If escaping is turned off and the user passes bytes, + # will return a bytes. For now we ignore this but the + # code *probably* doesn't handle this case. + return val if escape_option else ascii_escaped(val) # type: ignore + + +class Function(PyobjMixin, nodes.Item): + """Item responsible for setting up and executing a Python test function. + + :param name: + The full function name, including any decorations like those + added by parametrization (``my_func[my_param]``). + :param parent: + The parent Node. + :param config: + The pytest Config object. + :param callspec: + If given, this function has been parametrized and the callspec contains + meta information about the parametrization. + :param callobj: + If given, the object which will be called when the Function is invoked, + otherwise the callobj will be obtained from ``parent`` using ``originalname``. + :param keywords: + Keywords bound to the function object for "-k" matching. + :param session: + The pytest Session object. + :param fixtureinfo: + Fixture information already resolved at this fixture node.. + :param originalname: + The attribute name to use for accessing the underlying function object. + Defaults to ``name``. Set this if name is different from the original name, + for example when it contains decorations like those added by parametrization + (``my_func[my_param]``). + """ + + # Disable since functions handle it themselves. + _ALLOW_MARKERS = False + + def __init__( + self, + name: str, + parent, + config: Config | None = None, + callspec: CallSpec2 | None = None, + callobj=NOTSET, + keywords: Mapping[str, Any] | None = None, + session: Session | None = None, + fixtureinfo: FuncFixtureInfo | None = None, + originalname: str | None = None, + ) -> None: + super().__init__(name, parent, config=config, session=session) + + if callobj is not NOTSET: + self._obj = callobj + self._instance = getattr(callobj, "__self__", None) + + #: Original function name, without any decorations (for example + #: parametrization adds a ``"[...]"`` suffix to function names), used to access + #: the underlying function object from ``parent`` (in case ``callobj`` is not given + #: explicitly). + #: + #: .. versionadded:: 3.0 + self.originalname = originalname or name + + # Note: when FunctionDefinition is introduced, we should change ``originalname`` + # to a readonly property that returns FunctionDefinition.name. + + self.own_markers.extend(get_unpacked_marks(self.obj)) + if callspec: + self.callspec = callspec + self.own_markers.extend(callspec.marks) + + # todo: this is a hell of a hack + # https://github.com/pytest-dev/pytest/issues/4569 + # Note: the order of the updates is important here; indicates what + # takes priority (ctor argument over function attributes over markers). + # Take own_markers only; NodeKeywords handles parent traversal on its own. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + self.keywords.update(self.obj.__dict__) + if keywords: + self.keywords.update(keywords) + + if fixtureinfo is None: + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(self, self.obj, self.cls) + self._fixtureinfo: FuncFixtureInfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + # todo: determine sound type limitations + @classmethod + def from_parent(cls, parent, **kw) -> Self: + """The public constructor.""" + return super().from_parent(parent=parent, **kw) + + def _initrequest(self) -> None: + self.funcargs: dict[str, object] = {} + self._request = fixtures.TopRequest(self, _ispytest=True) + + @property + def function(self): + """Underlying python 'function' object.""" + return getimfunc(self.obj) + + @property + def instance(self): + try: + return self._instance + except AttributeError: + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + self._instance = self._getinstance() + else: + self._instance = None + return self._instance + + def _getinstance(self): + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + return self.parent.newinstance() + else: + return None + + def _getobj(self): + instance = self.instance + if instance is not None: + parent_obj = instance + else: + assert self.parent is not None + parent_obj = self.parent.obj # type: ignore[attr-defined] + return getattr(parent_obj, self.originalname) + + @property + def _pyfuncitem(self): + """(compatonly) for code expecting pytest-2.2 style request objects.""" + return self + + def runtest(self) -> None: + """Execute the underlying test function.""" + self.ihook.pytest_pyfunc_call(pyfuncitem=self) + + def setup(self) -> None: + self._request._fillfixtures() + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False): + code = _pytest._code.Code.from_function(get_real_func(self.obj)) + path, firstlineno = code.path, code.firstlineno + traceback = excinfo.traceback + ntraceback = traceback.cut(path=path, firstlineno=firstlineno) + if ntraceback == traceback: + ntraceback = ntraceback.cut(path=path) + if ntraceback == traceback: + ntraceback = ntraceback.filter(filter_traceback) + if not ntraceback: + ntraceback = traceback + ntraceback = ntraceback.filter(excinfo) + + # issue364: mark all but first and last frames to + # only show a single-line message for each frame. + if self.config.getoption("tbstyle", "auto") == "auto": + if len(ntraceback) > 2: + ntraceback = Traceback( + ( + ntraceback[0], + *(t.with_repr_style("short") for t in ntraceback[1:-1]), + ntraceback[-1], + ) + ) + + return ntraceback + return excinfo.traceback + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> str | TerminalRepr: + style = self.config.getoption("tbstyle", "auto") + if style == "auto": + style = "long" + return self._repr_failure_py(excinfo, style=style) + + +class FunctionDefinition(Function): + """This class is a stop gap solution until we evolve to have actual function + definition nodes and manage to get rid of ``metafunc``.""" + + def runtest(self) -> None: + raise RuntimeError("function definitions are not supposed to be run as tests") + + setup = runtest diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/python_api.py b/Backend/venv/lib/python3.12/site-packages/_pytest/python_api.py new file mode 100644 index 00000000..1e389eb0 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/python_api.py @@ -0,0 +1,820 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Collection +from collections.abc import Mapping +from collections.abc import Sequence +from collections.abc import Sized +from decimal import Decimal +import math +from numbers import Complex +import pprint +import sys +from typing import Any +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from numpy import ndarray + + +def _compare_approx( + full_object: object, + message_data: Sequence[tuple[str, str, str]], + number_of_elements: int, + different_ids: Sequence[object], + max_abs_diff: float, + max_rel_diff: float, +) -> list[str]: + message_list = list(message_data) + message_list.insert(0, ("Index", "Obtained", "Expected")) + max_sizes = [0, 0, 0] + for index, obtained, expected in message_list: + max_sizes[0] = max(max_sizes[0], len(index)) + max_sizes[1] = max(max_sizes[1], len(obtained)) + max_sizes[2] = max(max_sizes[2], len(expected)) + explanation = [ + f"comparison failed. Mismatched elements: {len(different_ids)} / {number_of_elements}:", + f"Max absolute difference: {max_abs_diff}", + f"Max relative difference: {max_rel_diff}", + ] + [ + f"{indexes:<{max_sizes[0]}} | {obtained:<{max_sizes[1]}} | {expected:<{max_sizes[2]}}" + for indexes, obtained, expected in message_list + ] + return explanation + + +# builtin pytest.approx helper + + +class ApproxBase: + """Provide shared utilities for making approximate comparisons between + numbers or sequences of numbers.""" + + # Tell numpy to use our `__eq__` operator instead of its. + __array_ufunc__ = None + __array_priority__ = 100 + + def __init__(self, expected, rel=None, abs=None, nan_ok: bool = False) -> None: + __tracebackhide__ = True + self.expected = expected + self.abs = abs + self.rel = rel + self.nan_ok = nan_ok + self._check_type() + + def __repr__(self) -> str: + raise NotImplementedError + + def _repr_compare(self, other_side: Any) -> list[str]: + return [ + "comparison failed", + f"Obtained: {other_side}", + f"Expected: {self}", + ] + + def __eq__(self, actual) -> bool: + return all( + a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual) + ) + + def __bool__(self): + __tracebackhide__ = True + raise AssertionError( + "approx() is not supported in a boolean context.\nDid you mean: `assert a == approx(b)`?" + ) + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + def __ne__(self, actual) -> bool: + return not (actual == self) + + def _approx_scalar(self, x) -> ApproxScalar: + if isinstance(x, Decimal): + return ApproxDecimal(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + + def _yield_comparisons(self, actual): + """Yield all the pairs of numbers to be compared. + + This is used to implement the `__eq__` method. + """ + raise NotImplementedError + + def _check_type(self) -> None: + """Raise a TypeError if the expected value is not a valid type.""" + # This is only a concern if the expected value is a sequence. In every + # other case, the approx() function ensures that the expected value has + # a numeric type. For this reason, the default is to do nothing. The + # classes that deal with sequences should reimplement this method to + # raise if there are any non-numeric elements in the sequence. + + +def _recursive_sequence_map(f, x): + """Recursively map a function over a sequence of arbitrary depth""" + if isinstance(x, list | tuple): + seq_type = type(x) + return seq_type(_recursive_sequence_map(f, xi) for xi in x) + elif _is_sequence_like(x): + return [_recursive_sequence_map(f, xi) for xi in x] + else: + return f(x) + + +class ApproxNumpy(ApproxBase): + """Perform approximate comparisons where the expected value is numpy array.""" + + def __repr__(self) -> str: + list_scalars = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + return f"approx({list_scalars!r})" + + def _repr_compare(self, other_side: ndarray | list[Any]) -> list[str]: + import itertools + import math + + def get_value_from_nested_list( + nested_list: list[Any], nd_index: tuple[Any, ...] + ) -> Any: + """ + Helper function to get the value out of a nested list, given an n-dimensional index. + This mimics numpy's indexing, but for raw nested python lists. + """ + value: Any = nested_list + for i in nd_index: + value = value[i] + return value + + np_array_shape = self.expected.shape + approx_side_as_seq = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + + # convert other_side to numpy array to ensure shape attribute is available + other_side_as_array = _as_numpy_array(other_side) + assert other_side_as_array is not None + + if np_array_shape != other_side_as_array.shape: + return [ + "Impossible to compare arrays with different shapes.", + f"Shapes: {np_array_shape} and {other_side_as_array.shape}", + ] + + number_of_elements = self.expected.size + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for index in itertools.product(*(range(i) for i in np_array_shape)): + approx_value = get_value_from_nested_list(approx_side_as_seq, index) + other_value = get_value_from_nested_list(other_side_as_array, index) + if approx_value != other_value: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(index) + + message_data = [ + ( + str(index), + str(get_value_from_nested_list(other_side_as_array, index)), + str(get_value_from_nested_list(approx_side_as_seq, index)), + ) + for index in different_ids + ] + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + import numpy as np + + # self.expected is supposed to always be an array here. + + if not np.isscalar(actual): + try: + actual = np.asarray(actual) + except Exception as e: + raise TypeError(f"cannot compare '{actual}' to numpy.ndarray") from e + + if not np.isscalar(actual) and actual.shape != self.expected.shape: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + import numpy as np + + # `actual` can either be a numpy array or a scalar, it is treated in + # `__eq__` before being passed to `ApproxBase.__eq__`, which is the + # only method that calls this one. + + if np.isscalar(actual): + for i in np.ndindex(self.expected.shape): + yield actual, self.expected[i].item() + else: + for i in np.ndindex(self.expected.shape): + yield actual[i].item(), self.expected[i].item() + + +class ApproxMapping(ApproxBase): + """Perform approximate comparisons where the expected value is a mapping + with numeric values (the keys can be anything).""" + + def __repr__(self) -> str: + return f"approx({ ({k: self._approx_scalar(v) for k, v in self.expected.items()})!r})" + + def _repr_compare(self, other_side: Mapping[object, float]) -> list[str]: + import math + + if len(self.expected) != len(other_side): + return [ + "Impossible to compare mappings with different sizes.", + f"Lengths: {len(self.expected)} and {len(other_side)}", + ] + + if set(self.expected.keys()) != set(other_side.keys()): + return [ + "comparison failed.", + f"Mappings has different keys: expected {self.expected.keys()} but got {other_side.keys()}", + ] + + approx_side_as_map = { + k: self._approx_scalar(v) for k, v in self.expected.items() + } + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for (approx_key, approx_value), other_value in zip( + approx_side_as_map.items(), other_side.values(), strict=True + ): + if approx_value != other_value: + if approx_value.expected is not None and other_value is not None: + try: + max_abs_diff = max( + max_abs_diff, abs(approx_value.expected - other_value) + ) + if approx_value.expected == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max( + max_rel_diff, + abs( + (approx_value.expected - other_value) + / approx_value.expected + ), + ) + except ZeroDivisionError: + pass + different_ids.append(approx_key) + + message_data = [ + (str(key), str(other_side[key]), str(approx_side_as_map[key])) + for key in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if set(actual.keys()) != set(self.expected.keys()): + return False + except AttributeError: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + for k in self.expected.keys(): + yield actual[k], self.expected[k] + + def _check_type(self) -> None: + __tracebackhide__ = True + for key, value in self.expected.items(): + if isinstance(value, type(self.expected)): + msg = "pytest.approx() does not support nested dictionaries: key={!r} value={!r}\n full mapping={}" + raise TypeError(msg.format(key, value, pprint.pformat(self.expected))) + + +class ApproxSequenceLike(ApproxBase): + """Perform approximate comparisons where the expected value is a sequence of numbers.""" + + def __repr__(self) -> str: + seq_type = type(self.expected) + if seq_type not in (tuple, list): + seq_type = list + return f"approx({seq_type(self._approx_scalar(x) for x in self.expected)!r})" + + def _repr_compare(self, other_side: Sequence[float]) -> list[str]: + import math + + if len(self.expected) != len(other_side): + return [ + "Impossible to compare lists with different sizes.", + f"Lengths: {len(self.expected)} and {len(other_side)}", + ] + + approx_side_as_map = _recursive_sequence_map(self._approx_scalar, self.expected) + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for i, (approx_value, other_value) in enumerate( + zip(approx_side_as_map, other_side, strict=True) + ): + if approx_value != other_value: + try: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + # Ignore non-numbers for the diff calculations (#13012). + except TypeError: + pass + else: + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(i) + message_data = [ + (str(i), str(other_side[i]), str(approx_side_as_map[i])) + for i in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if len(actual) != len(self.expected): + return False + except TypeError: + return False + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + return zip(actual, self.expected, strict=True) + + def _check_type(self) -> None: + __tracebackhide__ = True + for index, x in enumerate(self.expected): + if isinstance(x, type(self.expected)): + msg = "pytest.approx() does not support nested data structures: {!r} at index {}\n full sequence: {}" + raise TypeError(msg.format(x, index, pprint.pformat(self.expected))) + + +class ApproxScalar(ApproxBase): + """Perform approximate comparisons where the expected value is a single number.""" + + # Using Real should be better than this Union, but not possible yet: + # https://github.com/python/typeshed/pull/3108 + DEFAULT_ABSOLUTE_TOLERANCE: float | Decimal = 1e-12 + DEFAULT_RELATIVE_TOLERANCE: float | Decimal = 1e-6 + + def __repr__(self) -> str: + """Return a string communicating both the expected value and the + tolerance for the comparison being made. + + For example, ``1.0 ± 1e-6``, ``(3+4j) ± 5e-6 ∠ ±180°``. + """ + # Don't show a tolerance for values that aren't compared using + # tolerances, i.e. non-numerics and infinities. Need to call abs to + # handle complex numbers, e.g. (inf + 1j). + if ( + isinstance(self.expected, bool) + or (not isinstance(self.expected, Complex | Decimal)) + or math.isinf(abs(self.expected) or isinstance(self.expected, bool)) + ): + return str(self.expected) + + # If a sensible tolerance can't be calculated, self.tolerance will + # raise a ValueError. In this case, display '???'. + try: + if 1e-3 <= self.tolerance < 1e3: + vetted_tolerance = f"{self.tolerance:n}" + else: + vetted_tolerance = f"{self.tolerance:.1e}" + + if ( + isinstance(self.expected, Complex) + and self.expected.imag + and not math.isinf(self.tolerance) + ): + vetted_tolerance += " ∠ ±180°" + except ValueError: + vetted_tolerance = "???" + + return f"{self.expected} ± {vetted_tolerance}" + + def __eq__(self, actual) -> bool: + """Return whether the given value is equal to the expected value + within the pre-specified tolerance.""" + + def is_bool(val: Any) -> bool: + # Check if `val` is a native bool or numpy bool. + if isinstance(val, bool): + return True + if np := sys.modules.get("numpy"): + return isinstance(val, np.bool_) + return False + + asarray = _as_numpy_array(actual) + if asarray is not None: + # Call ``__eq__()`` manually to prevent infinite-recursion with + # numpy<1.13. See #3748. + return all(self.__eq__(a) for a in asarray.flat) + + # Short-circuit exact equality, except for bool and np.bool_ + if is_bool(self.expected) and not is_bool(actual): + return False + elif actual == self.expected: + return True + + # If either type is non-numeric, fall back to strict equality. + # NB: we need Complex, rather than just Number, to ensure that __abs__, + # __sub__, and __float__ are defined. Also, consider bool to be + # non-numeric, even though it has the required arithmetic. + if is_bool(self.expected) or not ( + isinstance(self.expected, Complex | Decimal) + and isinstance(actual, Complex | Decimal) + ): + return False + + # Allow the user to control whether NaNs are considered equal to each + # other or not. The abs() calls are for compatibility with complex + # numbers. + if math.isnan(abs(self.expected)): + return self.nan_ok and math.isnan(abs(actual)) + + # Infinity shouldn't be approximately equal to anything but itself, but + # if there's a relative tolerance, it will be infinite and infinity + # will seem approximately equal to everything. The equal-to-itself + # case would have been short circuited above, so here we can just + # return false if the expected value is infinite. The abs() call is + # for compatibility with complex numbers. + if math.isinf(abs(self.expected)): + return False + + # Return true if the two numbers are within the tolerance. + result: bool = abs(self.expected - actual) <= self.tolerance + return result + + __hash__ = None + + @property + def tolerance(self): + """Return the tolerance for the comparison. + + This could be either an absolute tolerance or a relative tolerance, + depending on what the user specified or which would be larger. + """ + + def set_default(x, default): + return x if x is not None else default + + # Figure out what the absolute tolerance should be. ``self.abs`` is + # either None or a value specified by the user. + absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE) + + if absolute_tolerance < 0: + raise ValueError( + f"absolute tolerance can't be negative: {absolute_tolerance}" + ) + if math.isnan(absolute_tolerance): + raise ValueError("absolute tolerance can't be NaN.") + + # If the user specified an absolute tolerance but not a relative one, + # just return the absolute tolerance. + if self.rel is None: + if self.abs is not None: + return absolute_tolerance + + # Figure out what the relative tolerance should be. ``self.rel`` is + # either None or a value specified by the user. This is done after + # we've made sure the user didn't ask for an absolute tolerance only, + # because we don't want to raise errors about the relative tolerance if + # we aren't even going to use it. + relative_tolerance = set_default( + self.rel, self.DEFAULT_RELATIVE_TOLERANCE + ) * abs(self.expected) + + if relative_tolerance < 0: + raise ValueError( + f"relative tolerance can't be negative: {relative_tolerance}" + ) + if math.isnan(relative_tolerance): + raise ValueError("relative tolerance can't be NaN.") + + # Return the larger of the relative and absolute tolerances. + return max(relative_tolerance, absolute_tolerance) + + +class ApproxDecimal(ApproxScalar): + """Perform approximate comparisons where the expected value is a Decimal.""" + + DEFAULT_ABSOLUTE_TOLERANCE = Decimal("1e-12") + DEFAULT_RELATIVE_TOLERANCE = Decimal("1e-6") + + def __repr__(self) -> str: + if isinstance(self.rel, float): + rel = Decimal.from_float(self.rel) + else: + rel = self.rel + + if isinstance(self.abs, float): + abs_ = Decimal.from_float(self.abs) + else: + abs_ = self.abs + + tol_str = "???" + if rel is not None and Decimal("1e-3") <= rel <= Decimal("1e3"): + tol_str = f"{rel:.1e}" + elif abs_ is not None: + tol_str = f"{abs_:.1e}" + + return f"{self.expected} ± {tol_str}" + + +def approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase: + """Assert that two numbers (or two ordered sequences of numbers) are equal to each other + within some tolerance. + + Due to the :doc:`python:tutorial/floatingpoint`, numbers that we + would intuitively expect to be equal are not always so:: + + >>> 0.1 + 0.2 == 0.3 + False + + This problem is commonly encountered when writing tests, e.g. when making + sure that floating-point values are what you expect them to be. One way to + deal with this problem is to assert that two floating-point numbers are + equal to within some appropriate tolerance:: + + >>> abs((0.1 + 0.2) - 0.3) < 1e-6 + True + + However, comparisons like this are tedious to write and difficult to + understand. Furthermore, absolute comparisons like the one above are + usually discouraged because there's no tolerance that works well for all + situations. ``1e-6`` is good for numbers around ``1``, but too small for + very big numbers and too big for very small ones. It's better to express + the tolerance as a fraction of the expected value, but relative comparisons + like that are even more difficult to write correctly and concisely. + + The ``approx`` class performs floating-point comparisons using a syntax + that's as intuitive as possible:: + + >>> from pytest import approx + >>> 0.1 + 0.2 == approx(0.3) + True + + The same syntax also works for ordered sequences of numbers:: + + >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6)) + True + + ``numpy`` arrays:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP + True + + And for a ``numpy`` array against a scalar:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP + True + + Only ordered sequences are supported, because ``approx`` needs + to infer the relative position of the sequences without ambiguity. This means + ``sets`` and other unordered sequences are not supported. + + Finally, dictionary *values* can also be compared:: + + >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6}) + True + + The comparison will be true if both mappings have the same keys and their + respective values match the expected tolerances. + + **Tolerances** + + By default, ``approx`` considers numbers within a relative tolerance of + ``1e-6`` (i.e. one part in a million) of its expected value to be equal. + This treatment would lead to surprising results if the expected value was + ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``. + To handle this case less surprisingly, ``approx`` also considers numbers + within an absolute tolerance of ``1e-12`` of its expected value to be + equal. Infinity and NaN are special cases. Infinity is only considered + equal to itself, regardless of the relative tolerance. NaN is not + considered equal to anything by default, but you can make it be equal to + itself by setting the ``nan_ok`` argument to True. (This is meant to + facilitate comparing arrays that use NaN to mean "no data".) + + Both the relative and absolute tolerances can be changed by passing + arguments to the ``approx`` constructor:: + + >>> 1.0001 == approx(1) + False + >>> 1.0001 == approx(1, rel=1e-3) + True + >>> 1.0001 == approx(1, abs=1e-3) + True + + If you specify ``abs`` but not ``rel``, the comparison will not consider + the relative tolerance at all. In other words, two numbers that are within + the default relative tolerance of ``1e-6`` will still be considered unequal + if they exceed the specified absolute tolerance. If you specify both + ``abs`` and ``rel``, the numbers will be considered equal if either + tolerance is met:: + + >>> 1 + 1e-8 == approx(1) + True + >>> 1 + 1e-8 == approx(1, abs=1e-12) + False + >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12) + True + + **Non-numeric types** + + You can also use ``approx`` to compare non-numeric types, or dicts and + sequences containing non-numeric types, in which case it falls back to + strict equality. This can be useful for comparing dicts and sequences that + can contain optional values:: + + >>> {"required": 1.0000005, "optional": None} == approx({"required": 1, "optional": None}) + True + >>> [None, 1.0000005] == approx([None,1]) + True + >>> ["foo", 1.0000005] == approx([None,1]) + False + + If you're thinking about using ``approx``, then you might want to know how + it compares to other good ways of comparing floating-point numbers. All of + these algorithms are based on relative and absolute tolerances and should + agree for the most part, but they do have meaningful differences: + + - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative + tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute + tolerance is met. Because the relative tolerance is calculated w.r.t. + both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor + ``b`` is a "reference value"). You have to specify an absolute tolerance + if you want to compare to ``0.0`` because there is no tolerance by + default. More information: :py:func:`math.isclose`. + + - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference + between ``a`` and ``b`` is less that the sum of the relative tolerance + w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance + is only calculated w.r.t. ``b``, this test is asymmetric and you can + think of ``b`` as the reference value. Support for comparing sequences + is provided by :py:func:`numpy.allclose`. More information: + :std:doc:`numpy:reference/generated/numpy.isclose`. + + - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b`` + are within an absolute tolerance of ``1e-7``. No relative tolerance is + considered , so this function is not appropriate for very large or very + small numbers. Also, it's only available in subclasses of ``unittest.TestCase`` + and it's ugly because it doesn't follow PEP8. More information: + :py:meth:`unittest.TestCase.assertAlmostEqual`. + + - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative + tolerance is met w.r.t. ``b`` or if the absolute tolerance is met. + Because the relative tolerance is only calculated w.r.t. ``b``, this test + is asymmetric and you can think of ``b`` as the reference value. In the + special case that you explicitly specify an absolute tolerance but not a + relative tolerance, only the absolute tolerance is considered. + + .. note:: + + ``approx`` can handle numpy arrays, but we recommend the + specialised test helpers in :std:doc:`numpy:reference/routines.testing` + if you need support for comparisons, NaNs, or ULP-based tolerances. + + To match strings using regex, you can use + `Matches `_ + from the + `re_assert package `_. + + + .. note:: + + Unlike built-in equality, this function considers + booleans unequal to numeric zero or one. For example:: + + >>> 1 == approx(True) + False + + .. warning:: + + .. versionchanged:: 3.2 + + In order to avoid inconsistent behavior, :py:exc:`TypeError` is + raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons. + The example below illustrates the problem:: + + assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10) + assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10) + + In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)`` + to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to + comparison. This is because the call hierarchy of rich comparisons + follows a fixed behavior. More information: :py:meth:`object.__ge__` + + .. versionchanged:: 3.7.1 + ``approx`` raises ``TypeError`` when it encounters a dict value or + sequence element of non-numeric type. + + .. versionchanged:: 6.1.0 + ``approx`` falls back to strict equality for non-numeric types instead + of raising ``TypeError``. + """ + # Delegate the comparison to a class that knows how to deal with the type + # of the expected value (e.g. int, float, list, dict, numpy.array, etc). + # + # The primary responsibility of these classes is to implement ``__eq__()`` + # and ``__repr__()``. The former is used to actually check if some + # "actual" value is equivalent to the given expected value within the + # allowed tolerance. The latter is used to show the user the expected + # value and tolerance, in the case that a test failed. + # + # The actual logic for making approximate comparisons can be found in + # ApproxScalar, which is used to compare individual numbers. All of the + # other Approx classes eventually delegate to this class. The ApproxBase + # class provides some convenient methods and overloads, but isn't really + # essential. + + __tracebackhide__ = True + + if isinstance(expected, Decimal): + cls: type[ApproxBase] = ApproxDecimal + elif isinstance(expected, Mapping): + cls = ApproxMapping + elif _is_numpy_array(expected): + expected = _as_numpy_array(expected) + cls = ApproxNumpy + elif _is_sequence_like(expected): + cls = ApproxSequenceLike + elif isinstance(expected, Collection) and not isinstance(expected, str | bytes): + msg = f"pytest.approx() only supports ordered sequences, but got: {expected!r}" + raise TypeError(msg) + else: + cls = ApproxScalar + + return cls(expected, rel, abs, nan_ok) + + +def _is_sequence_like(expected: object) -> bool: + return ( + hasattr(expected, "__getitem__") + and isinstance(expected, Sized) + and not isinstance(expected, str | bytes) + ) + + +def _is_numpy_array(obj: object) -> bool: + """ + Return true if the given object is implicitly convertible to ndarray, + and numpy is already imported. + """ + return _as_numpy_array(obj) is not None + + +def _as_numpy_array(obj: object) -> ndarray | None: + """ + Return an ndarray if the given object is implicitly convertible to ndarray, + and numpy is already imported, otherwise None. + """ + np: Any = sys.modules.get("numpy") + if np is not None: + # avoid infinite recursion on numpy scalars, which have __array__ + if np.isscalar(obj): + return None + elif isinstance(obj, np.ndarray): + return obj + elif hasattr(obj, "__array__") or hasattr("obj", "__array_interface__"): + return np.asarray(obj) + return None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/raises.py b/Backend/venv/lib/python3.12/site-packages/_pytest/raises.py new file mode 100644 index 00000000..7c246fde --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/raises.py @@ -0,0 +1,1517 @@ +from __future__ import annotations + +from abc import ABC +from abc import abstractmethod +import re +from re import Pattern +import sys +from textwrap import indent +from typing import Any +from typing import cast +from typing import final +from typing import Generic +from typing import get_args +from typing import get_origin +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import warnings + +from _pytest._code import ExceptionInfo +from _pytest._code.code import stringify_exception +from _pytest.outcomes import fail +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from collections.abc import Callable + from collections.abc import Sequence + + # for some reason Sphinx does not play well with 'from types import TracebackType' + import types + from typing import TypeGuard + + from typing_extensions import ParamSpec + from typing_extensions import TypeVar + + P = ParamSpec("P") + + # this conditional definition is because we want to allow a TypeVar default + BaseExcT_co_default = TypeVar( + "BaseExcT_co_default", + bound=BaseException, + default=BaseException, + covariant=True, + ) + + # Use short name because it shows up in docs. + E = TypeVar("E", bound=BaseException, default=BaseException) +else: + from typing import TypeVar + + BaseExcT_co_default = TypeVar( + "BaseExcT_co_default", bound=BaseException, covariant=True + ) + +# RaisesGroup doesn't work with a default. +BaseExcT_co = TypeVar("BaseExcT_co", bound=BaseException, covariant=True) +BaseExcT_1 = TypeVar("BaseExcT_1", bound=BaseException) +BaseExcT_2 = TypeVar("BaseExcT_2", bound=BaseException) +ExcT_1 = TypeVar("ExcT_1", bound=Exception) +ExcT_2 = TypeVar("ExcT_2", bound=Exception) + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + from exceptiongroup import ExceptionGroup + + +# String patterns default to including the unicode flag. +_REGEX_NO_FLAGS = re.compile(r"").flags + + +# pytest.raises helper +@overload +def raises( + expected_exception: type[E] | tuple[type[E], ...], + *, + match: str | re.Pattern[str] | None = ..., + check: Callable[[E], bool] = ..., +) -> RaisesExc[E]: ... + + +@overload +def raises( + *, + match: str | re.Pattern[str], + # If exception_type is not provided, check() must do any typechecks itself. + check: Callable[[BaseException], bool] = ..., +) -> RaisesExc[BaseException]: ... + + +@overload +def raises(*, check: Callable[[BaseException], bool]) -> RaisesExc[BaseException]: ... + + +@overload +def raises( + expected_exception: type[E] | tuple[type[E], ...], + func: Callable[..., Any], + *args: Any, + **kwargs: Any, +) -> ExceptionInfo[E]: ... + + +def raises( + expected_exception: type[E] | tuple[type[E], ...] | None = None, + *args: Any, + **kwargs: Any, +) -> RaisesExc[BaseException] | ExceptionInfo[E]: + r"""Assert that a code block/function call raises an exception type, or one of its subclasses. + + :param expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. Note that subclasses of the passed exceptions + will also match. + + This is not a required parameter, you may opt to only use ``match`` and/or + ``check`` for verifying the raised exception. + + :kwparam str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception and its :pep:`678` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + (This is only used when ``pytest.raises`` is used as a context manager, + and passed through to the function otherwise. + When using ``pytest.raises`` as a function, you can use: + ``pytest.raises(Exc, func, match="passed on").match("my pattern")``.) + + :kwparam Callable[[BaseException], bool] check: + + .. versionadded:: 8.4 + + If specified, a callable that will be called with the exception as a parameter + after checking the type and the match regex if specified. + If it returns ``True`` it will be considered a match, if not it will + be considered a failed match. + + + Use ``pytest.raises`` as a context manager, which will capture the exception of the given + type, or any of its subclasses:: + + >>> import pytest + >>> with pytest.raises(ZeroDivisionError): + ... 1/0 + + If the code block does not raise the expected exception (:class:`ZeroDivisionError` in the example + above), or no exception at all, the check will fail instead. + + You can also use the keyword argument ``match`` to assert that the + exception matches a text or regex:: + + >>> with pytest.raises(ValueError, match='must be 0 or None'): + ... raise ValueError("value must be 0 or None") + + >>> with pytest.raises(ValueError, match=r'must be \d+$'): + ... raise ValueError("value must be 42") + + The ``match`` argument searches the formatted exception string, which includes any + `PEP-678 `__ ``__notes__``: + + >>> with pytest.raises(ValueError, match=r"had a note added"): # doctest: +SKIP + ... e = ValueError("value must be 42") + ... e.add_note("had a note added") + ... raise e + + The ``check`` argument, if provided, must return True when passed the raised exception + for the match to be successful, otherwise an :exc:`AssertionError` is raised. + + >>> import errno + >>> with pytest.raises(OSError, check=lambda e: e.errno == errno.EACCES): + ... raise OSError(errno.EACCES, "no permission to view") + + The context manager produces an :class:`ExceptionInfo` object which can be used to inspect the + details of the captured exception:: + + >>> with pytest.raises(ValueError) as exc_info: + ... raise ValueError("value must be 42") + >>> assert exc_info.type is ValueError + >>> assert exc_info.value.args[0] == "value must be 42" + + .. warning:: + + Given that ``pytest.raises`` matches subclasses, be wary of using it to match :class:`Exception` like this:: + + # Careful, this will catch ANY exception raised. + with pytest.raises(Exception): + some_function() + + Because :class:`Exception` is the base class of almost all exceptions, it is easy for this to hide + real bugs, where the user wrote this expecting a specific exception, but some other exception is being + raised due to a bug introduced during a refactoring. + + Avoid using ``pytest.raises`` to catch :class:`Exception` unless certain that you really want to catch + **any** exception raised. + + .. note:: + + When using ``pytest.raises`` as a context manager, it's worthwhile to + note that normal context manager rules apply and that the exception + raised *must* be the final line in the scope of the context manager. + Lines of code after that, within the scope of the context manager will + not be executed. For example:: + + >>> value = 15 + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... assert exc_info.type is ValueError # This will not execute. + + Instead, the following approach must be taken (note the difference in + scope):: + + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... + >>> assert exc_info.type is ValueError + + **Expecting exception groups** + + When expecting exceptions wrapped in :exc:`BaseExceptionGroup` or + :exc:`ExceptionGroup`, you should instead use :class:`pytest.RaisesGroup`. + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` + it is possible to parametrize tests such that + some runs raise an exception and others do not. + + See :ref:`parametrizing_conditional_raising` for an example. + + .. seealso:: + + :ref:`assertraises` for more examples and detailed discussion. + + **Legacy form** + + It is possible to specify a callable by passing a to-be-called lambda:: + + >>> raises(ZeroDivisionError, lambda: 1/0) + + + or you can specify an arbitrary callable with arguments:: + + >>> def f(x): return 1/x + ... + >>> raises(ZeroDivisionError, f, 0) + + >>> raises(ZeroDivisionError, f, x=0) + + + The form above is fully supported but discouraged for new code because the + context manager form is regarded as more readable and less error-prone. + + .. note:: + Similar to caught exception objects in Python, explicitly clearing + local references to returned ``ExceptionInfo`` objects can + help the Python interpreter speed up its garbage collection. + + Clearing those references breaks a reference cycle + (``ExceptionInfo`` --> caught exception --> frame stack raising + the exception --> current frame stack --> local variables --> + ``ExceptionInfo``) which makes Python keep all objects referenced + from that cycle (including all local variables in the current + frame) alive until the next cyclic garbage collection run. + More detailed information can be found in the official Python + documentation for :ref:`the try statement `. + """ + __tracebackhide__ = True + + if not args: + if set(kwargs) - {"match", "check", "expected_exception"}: + msg = "Unexpected keyword arguments passed to pytest.raises: " + msg += ", ".join(sorted(kwargs)) + msg += "\nUse context-manager form instead?" + raise TypeError(msg) + + if expected_exception is None: + return RaisesExc(**kwargs) + return RaisesExc(expected_exception, **kwargs) + + if not expected_exception: + raise ValueError( + f"Expected an exception type or a tuple of exception types, but got `{expected_exception!r}`. " + f"Raising exceptions is already understood as failing the test, so you don't need " + f"any special code to say 'this should never raise an exception'." + ) + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + with RaisesExc(expected_exception) as excinfo: + func(*args[1:], **kwargs) + try: + return excinfo + finally: + del excinfo + + +# note: RaisesExc/RaisesGroup uses fail() internally, so this alias +# indicates (to [internal] plugins?) that `pytest.raises` will +# raise `_pytest.outcomes.Failed`, where +# `outcomes.Failed is outcomes.fail.Exception is raises.Exception` +# note: this is *not* the same as `_pytest.main.Failed` +# note: mypy does not recognize this attribute, and it's not possible +# to use a protocol/decorator like the others in outcomes due to +# https://github.com/python/mypy/issues/18715 +raises.Exception = fail.Exception # type: ignore[attr-defined] + + +def _match_pattern(match: Pattern[str]) -> str | Pattern[str]: + """Helper function to remove redundant `re.compile` calls when printing regex""" + return match.pattern if match.flags == _REGEX_NO_FLAGS else match + + +def repr_callable(fun: Callable[[BaseExcT_1], bool]) -> str: + """Get the repr of a ``check`` parameter. + + Split out so it can be monkeypatched (e.g. by hypothesis) + """ + return repr(fun) + + +def backquote(s: str) -> str: + return "`" + s + "`" + + +def _exception_type_name( + e: type[BaseException] | tuple[type[BaseException], ...], +) -> str: + if isinstance(e, type): + return e.__name__ + if len(e) == 1: + return e[0].__name__ + return "(" + ", ".join(ee.__name__ for ee in e) + ")" + + +def _check_raw_type( + expected_type: type[BaseException] | tuple[type[BaseException], ...] | None, + exception: BaseException, +) -> str | None: + if expected_type is None or expected_type == (): + return None + + if not isinstance( + exception, + expected_type, + ): + actual_type_str = backquote(_exception_type_name(type(exception)) + "()") + expected_type_str = backquote(_exception_type_name(expected_type)) + if ( + isinstance(exception, BaseExceptionGroup) + and isinstance(expected_type, type) + and not issubclass(expected_type, BaseExceptionGroup) + ): + return f"Unexpected nested {actual_type_str}, expected {expected_type_str}" + return f"{actual_type_str} is not an instance of {expected_type_str}" + return None + + +def is_fully_escaped(s: str) -> bool: + # we know we won't compile with re.VERBOSE, so whitespace doesn't need to be escaped + metacharacters = "{}()+.*?^$[]" + return not any( + c in metacharacters and (i == 0 or s[i - 1] != "\\") for (i, c) in enumerate(s) + ) + + +def unescape(s: str) -> str: + return re.sub(r"\\([{}()+-.*?^$\[\]\s\\])", r"\1", s) + + +# These classes conceptually differ from ExceptionInfo in that ExceptionInfo is tied, and +# constructed from, a particular exception - whereas these are constructed with expected +# exceptions, and later allow matching towards particular exceptions. +# But there's overlap in `ExceptionInfo.match` and `AbstractRaises._check_match`, as with +# `AbstractRaises.matches` and `ExceptionInfo.errisinstance`+`ExceptionInfo.group_contains`. +# The interaction between these classes should perhaps be improved. +class AbstractRaises(ABC, Generic[BaseExcT_co]): + """ABC with common functionality shared between RaisesExc and RaisesGroup""" + + def __init__( + self, + *, + match: str | Pattern[str] | None, + check: Callable[[BaseExcT_co], bool] | None, + ) -> None: + if isinstance(match, str): + # juggle error in order to avoid context to fail (necessary?) + re_error = None + try: + self.match: Pattern[str] | None = re.compile(match) + except re.error as e: + re_error = e + if re_error is not None: + fail(f"Invalid regex pattern provided to 'match': {re_error}") + if match == "": + warnings.warn( + PytestWarning( + "matching against an empty string will *always* pass. If you want " + "to check for an empty message you need to pass '^$'. If you don't " + "want to match you should pass `None` or leave out the parameter." + ), + stacklevel=2, + ) + else: + self.match = match + + # check if this is a fully escaped regex and has ^$ to match fully + # in which case we can do a proper diff on error + self.rawmatch: str | None = None + if isinstance(match, str) or ( + isinstance(match, Pattern) and match.flags == _REGEX_NO_FLAGS + ): + if isinstance(match, Pattern): + match = match.pattern + if ( + match + and match[0] == "^" + and match[-1] == "$" + and is_fully_escaped(match[1:-1]) + ): + self.rawmatch = unescape(match[1:-1]) + + self.check = check + self._fail_reason: str | None = None + + # used to suppress repeated printing of `repr(self.check)` + self._nested: bool = False + + # set in self._parse_exc + self.is_baseexception = False + + def _parse_exc( + self, exc: type[BaseExcT_1] | types.GenericAlias, expected: str + ) -> type[BaseExcT_1]: + if isinstance(exc, type) and issubclass(exc, BaseException): + if not issubclass(exc, Exception): + self.is_baseexception = True + return exc + # because RaisesGroup does not support variable number of exceptions there's + # still a use for RaisesExc(ExceptionGroup[Exception]). + origin_exc: type[BaseException] | None = get_origin(exc) + if origin_exc and issubclass(origin_exc, BaseExceptionGroup): + exc_type = get_args(exc)[0] + if ( + issubclass(origin_exc, ExceptionGroup) and exc_type in (Exception, Any) + ) or ( + issubclass(origin_exc, BaseExceptionGroup) + and exc_type in (BaseException, Any) + ): + if not issubclass(origin_exc, ExceptionGroup): + self.is_baseexception = True + return cast(type[BaseExcT_1], origin_exc) + else: + raise ValueError( + f"Only `ExceptionGroup[Exception]` or `BaseExceptionGroup[BaseException]` " + f"are accepted as generic types but got `{exc}`. " + f"As `raises` will catch all instances of the specified group regardless of the " + f"generic argument specific nested exceptions has to be checked " + f"with `RaisesGroup`." + ) + # unclear if the Type/ValueError distinction is even helpful here + msg = f"Expected {expected}, but got " + if isinstance(exc, type): # type: ignore[unreachable] + raise ValueError(msg + f"{exc.__name__!r}") + if isinstance(exc, BaseException): # type: ignore[unreachable] + raise TypeError(msg + f"an exception instance: {type(exc).__name__}") + raise TypeError(msg + repr(type(exc).__name__)) + + @property + def fail_reason(self) -> str | None: + """Set after a call to :meth:`matches` to give a human-readable reason for why the match failed. + When used as a context manager the string will be printed as the reason for the + test failing.""" + return self._fail_reason + + def _check_check( + self: AbstractRaises[BaseExcT_1], + exception: BaseExcT_1, + ) -> bool: + if self.check is None: + return True + + if self.check(exception): + return True + + check_repr = "" if self._nested else " " + repr_callable(self.check) + self._fail_reason = f"check{check_repr} did not return True" + return False + + # TODO: harmonize with ExceptionInfo.match + def _check_match(self, e: BaseException) -> bool: + if self.match is None or re.search( + self.match, + stringified_exception := stringify_exception( + e, include_subexception_msg=False + ), + ): + return True + + # if we're matching a group, make sure we're explicit to reduce confusion + # if they're trying to match an exception contained within the group + maybe_specify_type = ( + f" the `{_exception_type_name(type(e))}()`" + if isinstance(e, BaseExceptionGroup) + else "" + ) + if isinstance(self.rawmatch, str): + # TODO: it instructs to use `-v` to print leading text, but that doesn't work + # I also don't know if this is the proper entry point, or tool to use at all + from _pytest.assertion.util import _diff_text + from _pytest.assertion.util import dummy_highlighter + + diff = _diff_text(self.rawmatch, stringified_exception, dummy_highlighter) + self._fail_reason = ("\n" if diff[0][0] == "-" else "") + "\n".join(diff) + return False + + self._fail_reason = ( + f"Regex pattern did not match{maybe_specify_type}.\n" + f" Expected regex: {_match_pattern(self.match)!r}\n" + f" Actual message: {stringified_exception!r}" + ) + if _match_pattern(self.match) == stringified_exception: + self._fail_reason += "\n Did you mean to `re.escape()` the regex?" + return False + + @abstractmethod + def matches( + self: AbstractRaises[BaseExcT_1], exception: BaseException + ) -> TypeGuard[BaseExcT_1]: + """Check if an exception matches the requirements of this AbstractRaises. + If it fails, :meth:`AbstractRaises.fail_reason` should be set. + """ + + +@final +class RaisesExc(AbstractRaises[BaseExcT_co_default]): + """ + .. versionadded:: 8.4 + + + This is the class constructed when calling :func:`pytest.raises`, but may be used + directly as a helper class with :class:`RaisesGroup` when you want to specify + requirements on sub-exceptions. + + You don't need this if you only want to specify the type, since :class:`RaisesGroup` + accepts ``type[BaseException]``. + + :param type[BaseException] | tuple[type[BaseException]] | None expected_exception: + The expected type, or one of several possible types. + May be ``None`` in order to only make use of ``match`` and/or ``check`` + + The type is checked with :func:`isinstance`, and does not need to be an exact match. + If that is wanted you can use the ``check`` parameter. + + :kwparam str | Pattern[str] match: + A regex to match. + + :kwparam Callable[[BaseException], bool] check: + If specified, a callable that will be called with the exception as a parameter + after checking the type and the match regex if specified. + If it returns ``True`` it will be considered a match, if not it will + be considered a failed match. + + :meth:`RaisesExc.matches` can also be used standalone to check individual exceptions. + + Examples:: + + with RaisesGroup(RaisesExc(ValueError, match="string")) + ... + with RaisesGroup(RaisesExc(check=lambda x: x.args == (3, "hello"))): + ... + with RaisesGroup(RaisesExc(check=lambda x: type(x) is ValueError)): + ... + """ + + # Trio bundled hypothesis monkeypatching, we will probably instead assume that + # hypothesis will handle that in their pytest plugin by the time this is released. + # Alternatively we could add a version of get_pretty_function_description ourselves + # https://github.com/HypothesisWorks/hypothesis/blob/8ced2f59f5c7bea3344e35d2d53e1f8f8eb9fcd8/hypothesis-python/src/hypothesis/internal/reflection.py#L439 + + # At least one of the three parameters must be passed. + @overload + def __init__( + self, + expected_exception: ( + type[BaseExcT_co_default] | tuple[type[BaseExcT_co_default], ...] + ), + /, + *, + match: str | Pattern[str] | None = ..., + check: Callable[[BaseExcT_co_default], bool] | None = ..., + ) -> None: ... + + @overload + def __init__( + self: RaisesExc[BaseException], # Give E a value. + /, + *, + match: str | Pattern[str] | None, + # If exception_type is not provided, check() must do any typechecks itself. + check: Callable[[BaseException], bool] | None = ..., + ) -> None: ... + + @overload + def __init__(self, /, *, check: Callable[[BaseException], bool]) -> None: ... + + def __init__( + self, + expected_exception: ( + type[BaseExcT_co_default] | tuple[type[BaseExcT_co_default], ...] | None + ) = None, + /, + *, + match: str | Pattern[str] | None = None, + check: Callable[[BaseExcT_co_default], bool] | None = None, + ): + super().__init__(match=match, check=check) + if isinstance(expected_exception, tuple): + expected_exceptions = expected_exception + elif expected_exception is None: + expected_exceptions = () + else: + expected_exceptions = (expected_exception,) + + if (expected_exceptions == ()) and match is None and check is None: + raise ValueError("You must specify at least one parameter to match on.") + + self.expected_exceptions = tuple( + self._parse_exc(e, expected="a BaseException type") + for e in expected_exceptions + ) + + self._just_propagate = False + + def matches( + self, + exception: BaseException | None, + ) -> TypeGuard[BaseExcT_co_default]: + """Check if an exception matches the requirements of this :class:`RaisesExc`. + If it fails, :attr:`RaisesExc.fail_reason` will be set. + + Examples:: + + assert RaisesExc(ValueError).matches(my_exception): + # is equivalent to + assert isinstance(my_exception, ValueError) + + # this can be useful when checking e.g. the ``__cause__`` of an exception. + with pytest.raises(ValueError) as excinfo: + ... + assert RaisesExc(SyntaxError, match="foo").matches(excinfo.value.__cause__) + # above line is equivalent to + assert isinstance(excinfo.value.__cause__, SyntaxError) + assert re.search("foo", str(excinfo.value.__cause__) + + """ + self._just_propagate = False + if exception is None: + self._fail_reason = "exception is None" + return False + if not self._check_type(exception): + self._just_propagate = True + return False + + if not self._check_match(exception): + return False + + return self._check_check(exception) + + def __repr__(self) -> str: + parameters = [] + if self.expected_exceptions: + parameters.append(_exception_type_name(self.expected_exceptions)) + if self.match is not None: + # If no flags were specified, discard the redundant re.compile() here. + parameters.append( + f"match={_match_pattern(self.match)!r}", + ) + if self.check is not None: + parameters.append(f"check={repr_callable(self.check)}") + return f"RaisesExc({', '.join(parameters)})" + + def _check_type(self, exception: BaseException) -> TypeGuard[BaseExcT_co_default]: + self._fail_reason = _check_raw_type(self.expected_exceptions, exception) + return self._fail_reason is None + + def __enter__(self) -> ExceptionInfo[BaseExcT_co_default]: + self.excinfo: ExceptionInfo[BaseExcT_co_default] = ExceptionInfo.for_later() + return self.excinfo + + # TODO: move common code into superclass + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool: + __tracebackhide__ = True + if exc_type is None: + if not self.expected_exceptions: + fail("DID NOT RAISE any exception") + if len(self.expected_exceptions) > 1: + fail(f"DID NOT RAISE any of {self.expected_exceptions!r}") + + fail(f"DID NOT RAISE {self.expected_exceptions[0]!r}") + + assert self.excinfo is not None, ( + "Internal error - should have been constructed in __enter__" + ) + + if not self.matches(exc_val): + if self._just_propagate: + return False + raise AssertionError(self._fail_reason) + + # Cast to narrow the exception type now that it's verified.... + # even though the TypeGuard in self.matches should be narrowing + exc_info = cast( + "tuple[type[BaseExcT_co_default], BaseExcT_co_default, types.TracebackType]", + (exc_type, exc_val, exc_tb), + ) + self.excinfo.fill_unfilled(exc_info) + return True + + +@final +class RaisesGroup(AbstractRaises[BaseExceptionGroup[BaseExcT_co]]): + """ + .. versionadded:: 8.4 + + Contextmanager for checking for an expected :exc:`ExceptionGroup`. + This works similar to :func:`pytest.raises`, but allows for specifying the structure of an :exc:`ExceptionGroup`. + :meth:`ExceptionInfo.group_contains` also tries to handle exception groups, + but it is very bad at checking that you *didn't* get unexpected exceptions. + + The catching behaviour differs from :ref:`except* `, being much + stricter about the structure by default. + By using ``allow_unwrapped=True`` and ``flatten_subgroups=True`` you can match + :ref:`except* ` fully when expecting a single exception. + + :param args: + Any number of exception types, :class:`RaisesGroup` or :class:`RaisesExc` + to specify the exceptions contained in this exception. + All specified exceptions must be present in the raised group, *and no others*. + + If you expect a variable number of exceptions you need to use + :func:`pytest.raises(ExceptionGroup) ` and manually check + the contained exceptions. Consider making use of :meth:`RaisesExc.matches`. + + It does not care about the order of the exceptions, so + ``RaisesGroup(ValueError, TypeError)`` + is equivalent to + ``RaisesGroup(TypeError, ValueError)``. + :kwparam str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception group and its :pep:`678` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + Note that " (5 subgroups)" will be stripped from the ``repr`` before matching. + :kwparam Callable[[E], bool] check: + If specified, a callable that will be called with the group as a parameter + after successfully matching the expected exceptions. If it returns ``True`` + it will be considered a match, if not it will be considered a failed match. + :kwparam bool allow_unwrapped: + If expecting a single exception or :class:`RaisesExc` it will match even + if the exception is not inside an exceptiongroup. + + Using this together with ``match``, ``check`` or expecting multiple exceptions + will raise an error. + :kwparam bool flatten_subgroups: + "flatten" any groups inside the raised exception group, extracting all exceptions + inside any nested groups, before matching. Without this it expects you to + fully specify the nesting structure by passing :class:`RaisesGroup` as expected + parameter. + + Examples:: + + with RaisesGroup(ValueError): + raise ExceptionGroup("", (ValueError(),)) + # match + with RaisesGroup( + ValueError, + ValueError, + RaisesExc(TypeError, match="^expected int$"), + match="^my group$", + ): + raise ExceptionGroup( + "my group", + [ + ValueError(), + TypeError("expected int"), + ValueError(), + ], + ) + # check + with RaisesGroup( + KeyboardInterrupt, + match="^hello$", + check=lambda x: isinstance(x.__cause__, ValueError), + ): + raise BaseExceptionGroup("hello", [KeyboardInterrupt()]) from ValueError + # nested groups + with RaisesGroup(RaisesGroup(ValueError)): + raise ExceptionGroup("", (ExceptionGroup("", (ValueError(),)),)) + + # flatten_subgroups + with RaisesGroup(ValueError, flatten_subgroups=True): + raise ExceptionGroup("", (ExceptionGroup("", (ValueError(),)),)) + + # allow_unwrapped + with RaisesGroup(ValueError, allow_unwrapped=True): + raise ValueError + + + :meth:`RaisesGroup.matches` can also be used directly to check a standalone exception group. + + + The matching algorithm is greedy, which means cases such as this may fail:: + + with RaisesGroup(ValueError, RaisesExc(ValueError, match="hello")): + raise ExceptionGroup("", (ValueError("hello"), ValueError("goodbye"))) + + even though it generally does not care about the order of the exceptions in the group. + To avoid the above you should specify the first :exc:`ValueError` with a :class:`RaisesExc` as well. + + .. note:: + When raised exceptions don't match the expected ones, you'll get a detailed error + message explaining why. This includes ``repr(check)`` if set, which in Python can be + overly verbose, showing memory locations etc etc. + + If installed and imported (in e.g. ``conftest.py``), the ``hypothesis`` library will + monkeypatch this output to provide shorter & more readable repr's. + """ + + # allow_unwrapped=True requires: singular exception, exception not being + # RaisesGroup instance, match is None, check is None + @overload + def __init__( + self, + expected_exception: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + /, + *, + allow_unwrapped: Literal[True], + flatten_subgroups: bool = False, + ) -> None: ... + + # flatten_subgroups = True also requires no nested RaisesGroup + @overload + def __init__( + self, + expected_exception: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + /, + *other_exceptions: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + flatten_subgroups: Literal[True], + match: str | Pattern[str] | None = None, + check: Callable[[BaseExceptionGroup[BaseExcT_co]], bool] | None = None, + ) -> None: ... + + # simplify the typevars if possible (the following 3 are equivalent but go simpler->complicated) + # ... the first handles RaisesGroup[ValueError], the second RaisesGroup[ExceptionGroup[ValueError]], + # the third RaisesGroup[ValueError | ExceptionGroup[ValueError]]. + # ... otherwise, we will get results like RaisesGroup[ValueError | ExceptionGroup[Never]] (I think) + # (technically correct but misleading) + @overload + def __init__( + self: RaisesGroup[ExcT_1], + expected_exception: type[ExcT_1] | RaisesExc[ExcT_1], + /, + *other_exceptions: type[ExcT_1] | RaisesExc[ExcT_1], + match: str | Pattern[str] | None = None, + check: Callable[[ExceptionGroup[ExcT_1]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[ExceptionGroup[ExcT_2]], + expected_exception: RaisesGroup[ExcT_2], + /, + *other_exceptions: RaisesGroup[ExcT_2], + match: str | Pattern[str] | None = None, + check: Callable[[ExceptionGroup[ExceptionGroup[ExcT_2]]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[ExcT_1 | ExceptionGroup[ExcT_2]], + expected_exception: type[ExcT_1] | RaisesExc[ExcT_1] | RaisesGroup[ExcT_2], + /, + *other_exceptions: type[ExcT_1] | RaisesExc[ExcT_1] | RaisesGroup[ExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[[ExceptionGroup[ExcT_1 | ExceptionGroup[ExcT_2]]], bool] | None + ) = None, + ) -> None: ... + + # same as the above 3 but handling BaseException + @overload + def __init__( + self: RaisesGroup[BaseExcT_1], + expected_exception: type[BaseExcT_1] | RaisesExc[BaseExcT_1], + /, + *other_exceptions: type[BaseExcT_1] | RaisesExc[BaseExcT_1], + match: str | Pattern[str] | None = None, + check: Callable[[BaseExceptionGroup[BaseExcT_1]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[BaseExceptionGroup[BaseExcT_2]], + expected_exception: RaisesGroup[BaseExcT_2], + /, + *other_exceptions: RaisesGroup[BaseExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[[BaseExceptionGroup[BaseExceptionGroup[BaseExcT_2]]], bool] | None + ) = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]], + expected_exception: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + /, + *other_exceptions: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[ + [BaseExceptionGroup[BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]]], + bool, + ] + | None + ) = None, + ) -> None: ... + + def __init__( + self: RaisesGroup[ExcT_1 | BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]], + expected_exception: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + /, + *other_exceptions: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + allow_unwrapped: bool = False, + flatten_subgroups: bool = False, + match: str | Pattern[str] | None = None, + check: ( + Callable[[BaseExceptionGroup[BaseExcT_1]], bool] + | Callable[[ExceptionGroup[ExcT_1]], bool] + | None + ) = None, + ): + # The type hint on the `self` and `check` parameters uses different formats + # that are *very* hard to reconcile while adhering to the overloads, so we cast + # it to avoid an error when passing it to super().__init__ + check = cast( + "Callable[[BaseExceptionGroup[ExcT_1|BaseExcT_1|BaseExceptionGroup[BaseExcT_2]]], bool]", + check, + ) + super().__init__(match=match, check=check) + self.allow_unwrapped = allow_unwrapped + self.flatten_subgroups: bool = flatten_subgroups + self.is_baseexception = False + + if allow_unwrapped and other_exceptions: + raise ValueError( + "You cannot specify multiple exceptions with `allow_unwrapped=True.`" + " If you want to match one of multiple possible exceptions you should" + " use a `RaisesExc`." + " E.g. `RaisesExc(check=lambda e: isinstance(e, (...)))`", + ) + if allow_unwrapped and isinstance(expected_exception, RaisesGroup): + raise ValueError( + "`allow_unwrapped=True` has no effect when expecting a `RaisesGroup`." + " You might want it in the expected `RaisesGroup`, or" + " `flatten_subgroups=True` if you don't care about the structure.", + ) + if allow_unwrapped and (match is not None or check is not None): + raise ValueError( + "`allow_unwrapped=True` bypasses the `match` and `check` parameters" + " if the exception is unwrapped. If you intended to match/check the" + " exception you should use a `RaisesExc` object. If you want to match/check" + " the exceptiongroup when the exception *is* wrapped you need to" + " do e.g. `if isinstance(exc.value, ExceptionGroup):" + " assert RaisesGroup(...).matches(exc.value)` afterwards.", + ) + + self.expected_exceptions: tuple[ + type[BaseExcT_co] | RaisesExc[BaseExcT_co] | RaisesGroup[BaseException], ... + ] = tuple( + self._parse_excgroup(e, "a BaseException type, RaisesExc, or RaisesGroup") + for e in ( + expected_exception, + *other_exceptions, + ) + ) + + def _parse_excgroup( + self, + exc: ( + type[BaseExcT_co] + | types.GenericAlias + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2] + ), + expected: str, + ) -> type[BaseExcT_co] | RaisesExc[BaseExcT_1] | RaisesGroup[BaseExcT_2]: + # verify exception type and set `self.is_baseexception` + if isinstance(exc, RaisesGroup): + if self.flatten_subgroups: + raise ValueError( + "You cannot specify a nested structure inside a RaisesGroup with" + " `flatten_subgroups=True`. The parameter will flatten subgroups" + " in the raised exceptiongroup before matching, which would never" + " match a nested structure.", + ) + self.is_baseexception |= exc.is_baseexception + exc._nested = True + return exc + elif isinstance(exc, RaisesExc): + self.is_baseexception |= exc.is_baseexception + exc._nested = True + return exc + elif isinstance(exc, tuple): + raise TypeError( + f"Expected {expected}, but got {type(exc).__name__!r}.\n" + "RaisesGroup does not support tuples of exception types when expecting one of " + "several possible exception types like RaisesExc.\n" + "If you meant to expect a group with multiple exceptions, list them as separate arguments." + ) + else: + return super()._parse_exc(exc, expected) + + @overload + def __enter__( + self: RaisesGroup[ExcT_1], + ) -> ExceptionInfo[ExceptionGroup[ExcT_1]]: ... + @overload + def __enter__( + self: RaisesGroup[BaseExcT_1], + ) -> ExceptionInfo[BaseExceptionGroup[BaseExcT_1]]: ... + + def __enter__(self) -> ExceptionInfo[BaseExceptionGroup[BaseException]]: + self.excinfo: ExceptionInfo[BaseExceptionGroup[BaseExcT_co]] = ( + ExceptionInfo.for_later() + ) + return self.excinfo + + def __repr__(self) -> str: + reqs = [ + e.__name__ if isinstance(e, type) else repr(e) + for e in self.expected_exceptions + ] + if self.allow_unwrapped: + reqs.append(f"allow_unwrapped={self.allow_unwrapped}") + if self.flatten_subgroups: + reqs.append(f"flatten_subgroups={self.flatten_subgroups}") + if self.match is not None: + # If no flags were specified, discard the redundant re.compile() here. + reqs.append(f"match={_match_pattern(self.match)!r}") + if self.check is not None: + reqs.append(f"check={repr_callable(self.check)}") + return f"RaisesGroup({', '.join(reqs)})" + + def _unroll_exceptions( + self, + exceptions: Sequence[BaseException], + ) -> Sequence[BaseException]: + """Used if `flatten_subgroups=True`.""" + res: list[BaseException] = [] + for exc in exceptions: + if isinstance(exc, BaseExceptionGroup): + res.extend(self._unroll_exceptions(exc.exceptions)) + + else: + res.append(exc) + return res + + @overload + def matches( + self: RaisesGroup[ExcT_1], + exception: BaseException | None, + ) -> TypeGuard[ExceptionGroup[ExcT_1]]: ... + @overload + def matches( + self: RaisesGroup[BaseExcT_1], + exception: BaseException | None, + ) -> TypeGuard[BaseExceptionGroup[BaseExcT_1]]: ... + + def matches( + self, + exception: BaseException | None, + ) -> bool: + """Check if an exception matches the requirements of this RaisesGroup. + If it fails, `RaisesGroup.fail_reason` will be set. + + Example:: + + with pytest.raises(TypeError) as excinfo: + ... + assert RaisesGroup(ValueError).matches(excinfo.value.__cause__) + # the above line is equivalent to + myexc = excinfo.value.__cause + assert isinstance(myexc, BaseExceptionGroup) + assert len(myexc.exceptions) == 1 + assert isinstance(myexc.exceptions[0], ValueError) + """ + self._fail_reason = None + if exception is None: + self._fail_reason = "exception is None" + return False + if not isinstance(exception, BaseExceptionGroup): + # we opt to only print type of the exception here, as the repr would + # likely be quite long + not_group_msg = f"`{type(exception).__name__}()` is not an exception group" + if len(self.expected_exceptions) > 1: + self._fail_reason = not_group_msg + return False + # if we have 1 expected exception, check if it would work even if + # allow_unwrapped is not set + res = self._check_expected(self.expected_exceptions[0], exception) + if res is None and self.allow_unwrapped: + return True + + if res is None: + self._fail_reason = ( + f"{not_group_msg}, but would match with `allow_unwrapped=True`" + ) + elif self.allow_unwrapped: + self._fail_reason = res + else: + self._fail_reason = not_group_msg + return False + + actual_exceptions: Sequence[BaseException] = exception.exceptions + if self.flatten_subgroups: + actual_exceptions = self._unroll_exceptions(actual_exceptions) + + if not self._check_match(exception): + self._fail_reason = cast(str, self._fail_reason) + old_reason = self._fail_reason + if ( + len(actual_exceptions) == len(self.expected_exceptions) == 1 + and isinstance(expected := self.expected_exceptions[0], type) + and isinstance(actual := actual_exceptions[0], expected) + and self._check_match(actual) + ): + assert self.match is not None, "can't be None if _check_match failed" + assert self._fail_reason is old_reason is not None + self._fail_reason += ( + f"\n" + f" but matched the expected `{self._repr_expected(expected)}`.\n" + f" You might want " + f"`RaisesGroup(RaisesExc({expected.__name__}, match={_match_pattern(self.match)!r}))`" + ) + else: + self._fail_reason = old_reason + return False + + # do the full check on expected exceptions + if not self._check_exceptions( + exception, + actual_exceptions, + ): + self._fail_reason = cast(str, self._fail_reason) + assert self._fail_reason is not None + old_reason = self._fail_reason + # if we're not expecting a nested structure, and there is one, do a second + # pass where we try flattening it + if ( + not self.flatten_subgroups + and not any( + isinstance(e, RaisesGroup) for e in self.expected_exceptions + ) + and any(isinstance(e, BaseExceptionGroup) for e in actual_exceptions) + and self._check_exceptions( + exception, + self._unroll_exceptions(exception.exceptions), + ) + ): + # only indent if it's a single-line reason. In a multi-line there's already + # indented lines that this does not belong to. + indent = " " if "\n" not in self._fail_reason else "" + self._fail_reason = ( + old_reason + + f"\n{indent}Did you mean to use `flatten_subgroups=True`?" + ) + else: + self._fail_reason = old_reason + return False + + # Only run `self.check` once we know `exception` is of the correct type. + if not self._check_check(exception): + reason = ( + cast(str, self._fail_reason) + f" on the {type(exception).__name__}" + ) + if ( + len(actual_exceptions) == len(self.expected_exceptions) == 1 + and isinstance(expected := self.expected_exceptions[0], type) + # we explicitly break typing here :) + and self._check_check(actual_exceptions[0]) # type: ignore[arg-type] + ): + self._fail_reason = reason + ( + f", but did return True for the expected {self._repr_expected(expected)}." + f" You might want RaisesGroup(RaisesExc({expected.__name__}, check=<...>))" + ) + else: + self._fail_reason = reason + return False + + return True + + @staticmethod + def _check_expected( + expected_type: ( + type[BaseException] | RaisesExc[BaseException] | RaisesGroup[BaseException] + ), + exception: BaseException, + ) -> str | None: + """Helper method for `RaisesGroup.matches` and `RaisesGroup._check_exceptions` + to check one of potentially several expected exceptions.""" + if isinstance(expected_type, type): + return _check_raw_type(expected_type, exception) + res = expected_type.matches(exception) + if res: + return None + assert expected_type.fail_reason is not None + if expected_type.fail_reason.startswith("\n"): + return f"\n{expected_type!r}: {indent(expected_type.fail_reason, ' ')}" + return f"{expected_type!r}: {expected_type.fail_reason}" + + @staticmethod + def _repr_expected(e: type[BaseException] | AbstractRaises[BaseException]) -> str: + """Get the repr of an expected type/RaisesExc/RaisesGroup, but we only want + the name if it's a type""" + if isinstance(e, type): + return _exception_type_name(e) + return repr(e) + + @overload + def _check_exceptions( + self: RaisesGroup[ExcT_1], + _exception: Exception, + actual_exceptions: Sequence[Exception], + ) -> TypeGuard[ExceptionGroup[ExcT_1]]: ... + @overload + def _check_exceptions( + self: RaisesGroup[BaseExcT_1], + _exception: BaseException, + actual_exceptions: Sequence[BaseException], + ) -> TypeGuard[BaseExceptionGroup[BaseExcT_1]]: ... + + def _check_exceptions( + self, + _exception: BaseException, + actual_exceptions: Sequence[BaseException], + ) -> bool: + """Helper method for RaisesGroup.matches that attempts to pair up expected and actual exceptions""" + # The _exception parameter is not used, but necessary for the TypeGuard + + # full table with all results + results = ResultHolder(self.expected_exceptions, actual_exceptions) + + # (indexes of) raised exceptions that haven't (yet) found an expected + remaining_actual = list(range(len(actual_exceptions))) + # (indexes of) expected exceptions that haven't found a matching raised + failed_expected: list[int] = [] + # successful greedy matches + matches: dict[int, int] = {} + + # loop over expected exceptions first to get a more predictable result + for i_exp, expected in enumerate(self.expected_exceptions): + for i_rem in remaining_actual: + res = self._check_expected(expected, actual_exceptions[i_rem]) + results.set_result(i_exp, i_rem, res) + if res is None: + remaining_actual.remove(i_rem) + matches[i_exp] = i_rem + break + else: + failed_expected.append(i_exp) + + # All exceptions matched up successfully + if not remaining_actual and not failed_expected: + return True + + # in case of a single expected and single raised we simplify the output + if 1 == len(actual_exceptions) == len(self.expected_exceptions): + assert not matches + self._fail_reason = res + return False + + # The test case is failing, so we can do a slow and exhaustive check to find + # duplicate matches etc that will be helpful in debugging + for i_exp, expected in enumerate(self.expected_exceptions): + for i_actual, actual in enumerate(actual_exceptions): + if results.has_result(i_exp, i_actual): + continue + results.set_result( + i_exp, i_actual, self._check_expected(expected, actual) + ) + + successful_str = ( + f"{len(matches)} matched exception{'s' if len(matches) > 1 else ''}. " + if matches + else "" + ) + + # all expected were found + if not failed_expected and results.no_match_for_actual(remaining_actual): + self._fail_reason = ( + f"{successful_str}Unexpected exception(s):" + f" {[actual_exceptions[i] for i in remaining_actual]!r}" + ) + return False + # all raised exceptions were expected + if not remaining_actual and results.no_match_for_expected(failed_expected): + no_match_for_str = ", ".join( + self._repr_expected(self.expected_exceptions[i]) + for i in failed_expected + ) + self._fail_reason = f"{successful_str}Too few exceptions raised, found no match for: [{no_match_for_str}]" + return False + + # if there's only one remaining and one failed, and the unmatched didn't match anything else, + # we elect to only print why the remaining and the failed didn't match. + if ( + 1 == len(remaining_actual) == len(failed_expected) + and results.no_match_for_actual(remaining_actual) + and results.no_match_for_expected(failed_expected) + ): + self._fail_reason = f"{successful_str}{results.get_result(failed_expected[0], remaining_actual[0])}" + return False + + # there's both expected and raised exceptions without matches + s = "" + if matches: + s += f"\n{successful_str}" + indent_1 = " " * 2 + indent_2 = " " * 4 + + if not remaining_actual: + s += "\nToo few exceptions raised!" + elif not failed_expected: + s += "\nUnexpected exception(s)!" + + if failed_expected: + s += "\nThe following expected exceptions did not find a match:" + rev_matches = {v: k for k, v in matches.items()} + for i_failed in failed_expected: + s += ( + f"\n{indent_1}{self._repr_expected(self.expected_exceptions[i_failed])}" + ) + for i_actual, actual in enumerate(actual_exceptions): + if results.get_result(i_exp, i_actual) is None: + # we print full repr of match target + s += ( + f"\n{indent_2}It matches {backquote(repr(actual))} which was paired with " + + backquote( + self._repr_expected( + self.expected_exceptions[rev_matches[i_actual]] + ) + ) + ) + + if remaining_actual: + s += "\nThe following raised exceptions did not find a match" + for i_actual in remaining_actual: + s += f"\n{indent_1}{actual_exceptions[i_actual]!r}:" + for i_exp, expected in enumerate(self.expected_exceptions): + res = results.get_result(i_exp, i_actual) + if i_exp in failed_expected: + assert res is not None + if res[0] != "\n": + s += "\n" + s += indent(res, indent_2) + if res is None: + # we print full repr of match target + s += ( + f"\n{indent_2}It matches {backquote(self._repr_expected(expected))} " + f"which was paired with {backquote(repr(actual_exceptions[matches[i_exp]]))}" + ) + + if len(self.expected_exceptions) == len(actual_exceptions) and possible_match( + results + ): + s += ( + "\nThere exist a possible match when attempting an exhaustive check," + " but RaisesGroup uses a greedy algorithm. " + "Please make your expected exceptions more stringent with `RaisesExc` etc" + " so the greedy algorithm can function." + ) + self._fail_reason = s + return False + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool: + __tracebackhide__ = True + if exc_type is None: + fail(f"DID NOT RAISE any exception, expected `{self.expected_type()}`") + + assert self.excinfo is not None, ( + "Internal error - should have been constructed in __enter__" + ) + + # group_str is the only thing that differs between RaisesExc and RaisesGroup... + # I might just scrap it? Or make it part of fail_reason + group_str = ( + "(group)" + if self.allow_unwrapped and not issubclass(exc_type, BaseExceptionGroup) + else "group" + ) + + if not self.matches(exc_val): + fail(f"Raised exception {group_str} did not match: {self._fail_reason}") + + # Cast to narrow the exception type now that it's verified.... + # even though the TypeGuard in self.matches should be narrowing + exc_info = cast( + "tuple[type[BaseExceptionGroup[BaseExcT_co]], BaseExceptionGroup[BaseExcT_co], types.TracebackType]", + (exc_type, exc_val, exc_tb), + ) + self.excinfo.fill_unfilled(exc_info) + return True + + def expected_type(self) -> str: + subexcs = [] + for e in self.expected_exceptions: + if isinstance(e, RaisesExc): + subexcs.append(repr(e)) + elif isinstance(e, RaisesGroup): + subexcs.append(e.expected_type()) + elif isinstance(e, type): + subexcs.append(e.__name__) + else: # pragma: no cover + raise AssertionError("unknown type") + group_type = "Base" if self.is_baseexception else "" + return f"{group_type}ExceptionGroup({', '.join(subexcs)})" + + +@final +class NotChecked: + """Singleton for unchecked values in ResultHolder""" + + +class ResultHolder: + """Container for results of checking exceptions. + Used in RaisesGroup._check_exceptions and possible_match. + """ + + def __init__( + self, + expected_exceptions: tuple[ + type[BaseException] | AbstractRaises[BaseException], ... + ], + actual_exceptions: Sequence[BaseException], + ) -> None: + self.results: list[list[str | type[NotChecked] | None]] = [ + [NotChecked for _ in expected_exceptions] for _ in actual_exceptions + ] + + def set_result(self, expected: int, actual: int, result: str | None) -> None: + self.results[actual][expected] = result + + def get_result(self, expected: int, actual: int) -> str | None: + res = self.results[actual][expected] + assert res is not NotChecked + # mypy doesn't support identity checking against anything but None + return res # type: ignore[return-value] + + def has_result(self, expected: int, actual: int) -> bool: + return self.results[actual][expected] is not NotChecked + + def no_match_for_expected(self, expected: list[int]) -> bool: + for i in expected: + for actual_results in self.results: + assert actual_results[i] is not NotChecked + if actual_results[i] is None: + return False + return True + + def no_match_for_actual(self, actual: list[int]) -> bool: + for i in actual: + for res in self.results[i]: + assert res is not NotChecked + if res is None: + return False + return True + + +def possible_match(results: ResultHolder, used: set[int] | None = None) -> bool: + if used is None: + used = set() + curr_row = len(used) + if curr_row == len(results.results): + return True + return any( + val is None and i not in used and possible_match(results, used | {i}) + for (i, val) in enumerate(results.results[curr_row]) + ) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/recwarn.py b/Backend/venv/lib/python3.12/site-packages/_pytest/recwarn.py new file mode 100644 index 00000000..e3db717b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/recwarn.py @@ -0,0 +1,367 @@ +# mypy: allow-untyped-defs +"""Record warnings during test function execution.""" + +from __future__ import annotations + +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterator +from pprint import pformat +import re +from types import TracebackType +from typing import Any +from typing import final +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar + + +if TYPE_CHECKING: + from typing_extensions import Self + +import warnings + +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.outcomes import Exit +from _pytest.outcomes import fail + + +T = TypeVar("T") + + +@fixture +def recwarn() -> Generator[WarningsRecorder]: + """Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions. + + See :ref:`warnings` for information on warning categories. + """ + wrec = WarningsRecorder(_ispytest=True) + with wrec: + warnings.simplefilter("default") + yield wrec + + +@overload +def deprecated_call( + *, match: str | re.Pattern[str] | None = ... +) -> WarningsRecorder: ... + + +@overload +def deprecated_call(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: ... + + +def deprecated_call( + func: Callable[..., Any] | None = None, *args: Any, **kwargs: Any +) -> WarningsRecorder | Any: + """Assert that code produces a ``DeprecationWarning`` or ``PendingDeprecationWarning`` or ``FutureWarning``. + + This function can be used as a context manager:: + + >>> import warnings + >>> def api_call_v2(): + ... warnings.warn('use v3 of this api', DeprecationWarning) + ... return 200 + + >>> import pytest + >>> with pytest.deprecated_call(): + ... assert api_call_v2() == 200 + + It can also be used by passing a function and ``*args`` and ``**kwargs``, + in which case it will ensure calling ``func(*args, **kwargs)`` produces one of + the warnings types above. The return value is the return value of the function. + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex. + + The context manager produces a list of :class:`warnings.WarningMessage` objects, + one for each warning raised. + """ + __tracebackhide__ = True + if func is not None: + args = (func, *args) + return warns( + (DeprecationWarning, PendingDeprecationWarning, FutureWarning), *args, **kwargs + ) + + +@overload +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...] = ..., + *, + match: str | re.Pattern[str] | None = ..., +) -> WarningsChecker: ... + + +@overload +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...], + func: Callable[..., T], + *args: Any, + **kwargs: Any, +) -> T: ... + + +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...] = Warning, + *args: Any, + match: str | re.Pattern[str] | None = None, + **kwargs: Any, +) -> WarningsChecker | Any: + r"""Assert that code raises a particular class of warning. + + Specifically, the parameter ``expected_warning`` can be a warning class or tuple + of warning classes, and the code inside the ``with`` block must issue at least one + warning of that class or classes. + + This helper produces a list of :class:`warnings.WarningMessage` objects, one for + each warning emitted (regardless of whether it is an ``expected_warning`` or not). + Since pytest 8.0, unmatched warnings are also re-emitted when the context closes. + + This function can be used as a context manager:: + + >>> import pytest + >>> with pytest.warns(RuntimeWarning): + ... warnings.warn("my warning", RuntimeWarning) + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex:: + + >>> with pytest.warns(UserWarning, match='must be 0 or None'): + ... warnings.warn("value must be 0 or None", UserWarning) + + >>> with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("value must be 42", UserWarning) + + >>> with pytest.warns(UserWarning): # catch re-emitted warning + ... with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("this is not here", UserWarning) + Traceback (most recent call last): + ... + Failed: DID NOT WARN. No warnings of type ...UserWarning... were emitted... + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` it is possible to parametrize tests + such that some runs raise a warning and others do not. + + This could be achieved in the same way as with exceptions, see + :ref:`parametrizing_conditional_raising` for an example. + + """ + __tracebackhide__ = True + if not args: + if kwargs: + argnames = ", ".join(sorted(kwargs)) + raise TypeError( + f"Unexpected keyword arguments passed to pytest.warns: {argnames}" + "\nUse context-manager form instead?" + ) + return WarningsChecker(expected_warning, match_expr=match, _ispytest=True) + else: + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + with WarningsChecker(expected_warning, _ispytest=True): + return func(*args[1:], **kwargs) + + +class WarningsRecorder(warnings.catch_warnings): + """A context manager to record raised warnings. + + Each recorded warning is an instance of :class:`warnings.WarningMessage`. + + Adapted from `warnings.catch_warnings`. + + .. note:: + ``DeprecationWarning`` and ``PendingDeprecationWarning`` are treated + differently; see :ref:`ensuring_function_triggers`. + + """ + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + super().__init__(record=True) + self._entered = False + self._list: list[warnings.WarningMessage] = [] + + @property + def list(self) -> list[warnings.WarningMessage]: + """The list of recorded warnings.""" + return self._list + + def __getitem__(self, i: int) -> warnings.WarningMessage: + """Get a recorded warning by index.""" + return self._list[i] + + def __iter__(self) -> Iterator[warnings.WarningMessage]: + """Iterate through the recorded warnings.""" + return iter(self._list) + + def __len__(self) -> int: + """The number of recorded warnings.""" + return len(self._list) + + def pop(self, cls: type[Warning] = Warning) -> warnings.WarningMessage: + """Pop the first recorded warning which is an instance of ``cls``, + but not an instance of a child class of any other match. + Raises ``AssertionError`` if there is no match. + """ + best_idx: int | None = None + for i, w in enumerate(self._list): + if w.category == cls: + return self._list.pop(i) # exact match, stop looking + if issubclass(w.category, cls) and ( + best_idx is None + or not issubclass(w.category, self._list[best_idx].category) + ): + best_idx = i + if best_idx is not None: + return self._list.pop(best_idx) + __tracebackhide__ = True + raise AssertionError(f"{cls!r} not found in warning list") + + def clear(self) -> None: + """Clear the list of recorded warnings.""" + self._list[:] = [] + + # Type ignored because we basically want the `catch_warnings` generic type + # parameter to be ourselves but that is not possible(?). + def __enter__(self) -> Self: # type: ignore[override] + if self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot enter {self!r} twice") + _list = super().__enter__() + # record=True means it's None. + assert _list is not None + self._list = _list + warnings.simplefilter("always") + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if not self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot exit {self!r} without entering first") + + super().__exit__(exc_type, exc_val, exc_tb) + + # Built-in catch_warnings does not reset entered state so we do it + # manually here for this context manager to become reusable. + self._entered = False + + +@final +class WarningsChecker(WarningsRecorder): + def __init__( + self, + expected_warning: type[Warning] | tuple[type[Warning], ...] = Warning, + match_expr: str | re.Pattern[str] | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + super().__init__(_ispytest=True) + + msg = "exceptions must be derived from Warning, not %s" + if isinstance(expected_warning, tuple): + for exc in expected_warning: + if not issubclass(exc, Warning): + raise TypeError(msg % type(exc)) + expected_warning_tup = expected_warning + elif isinstance(expected_warning, type) and issubclass( + expected_warning, Warning + ): + expected_warning_tup = (expected_warning,) + else: + raise TypeError(msg % type(expected_warning)) + + self.expected_warning = expected_warning_tup + self.match_expr = match_expr + + def matches(self, warning: warnings.WarningMessage) -> bool: + assert self.expected_warning is not None + return issubclass(warning.category, self.expected_warning) and bool( + self.match_expr is None or re.search(self.match_expr, str(warning.message)) + ) + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + super().__exit__(exc_type, exc_val, exc_tb) + + __tracebackhide__ = True + + # BaseExceptions like pytest.{skip,fail,xfail,exit} or Ctrl-C within + # pytest.warns should *not* trigger "DID NOT WARN" and get suppressed + # when the warning doesn't happen. Control-flow exceptions should always + # propagate. + if exc_val is not None and ( + not isinstance(exc_val, Exception) + # Exit is an Exception, not a BaseException, for some reason. + or isinstance(exc_val, Exit) + ): + return + + def found_str() -> str: + return pformat([record.message for record in self], indent=2) + + try: + if not any(issubclass(w.category, self.expected_warning) for w in self): + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} were emitted.\n" + f" Emitted warnings: {found_str()}." + ) + elif not any(self.matches(w) for w in self): + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} matching the regex were emitted.\n" + f" Regex: {self.match_expr}\n" + f" Emitted warnings: {found_str()}." + ) + finally: + # Whether or not any warnings matched, we want to re-emit all unmatched warnings. + for w in self: + if not self.matches(w): + warnings.warn_explicit( + message=w.message, + category=w.category, + filename=w.filename, + lineno=w.lineno, + module=w.__module__, + source=w.source, + ) + + # Currently in Python it is possible to pass other types than an + # `str` message when creating `Warning` instances, however this + # causes an exception when :func:`warnings.filterwarnings` is used + # to filter those warnings. See + # https://github.com/python/cpython/issues/103577 for a discussion. + # While this can be considered a bug in CPython, we put guards in + # pytest as the error message produced without this check in place + # is confusing (#10865). + for w in self: + if type(w.message) is not UserWarning: + # If the warning was of an incorrect type then `warnings.warn()` + # creates a UserWarning. Any other warning must have been specified + # explicitly. + continue + if not w.message.args: + # UserWarning() without arguments must have been specified explicitly. + continue + msg = w.message.args[0] + if isinstance(msg, str): + continue + # It's possible that UserWarning was explicitly specified, and + # its first argument was not a string. But that case can't be + # distinguished from an invalid type. + raise TypeError( + f"Warning must be str or Warning, got {msg!r} (type {type(msg).__name__})" + ) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/reports.py b/Backend/venv/lib/python3.12/site-packages/_pytest/reports.py new file mode 100644 index 00000000..011a69db --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/reports.py @@ -0,0 +1,694 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +from io import StringIO +import os +from pprint import pprint +import sys +from typing import Any +from typing import cast +from typing import final +from typing import Literal +from typing import NoReturn +from typing import TYPE_CHECKING + +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprEntry +from _pytest._code.code import ReprEntryNative +from _pytest._code.code import ReprExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import ReprFuncArgs +from _pytest._code.code import ReprLocals +from _pytest._code.code import ReprTraceback +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.config import Config +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import skip + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +if TYPE_CHECKING: + from typing_extensions import Self + + from _pytest.runner import CallInfo + + +def getworkerinfoline(node): + try: + return node._workerinfocache + except AttributeError: + d = node.workerinfo + ver = "{}.{}.{}".format(*d["version_info"][:3]) + node._workerinfocache = s = "[{}] {} -- Python {} {}".format( + d["id"], d["sysplatform"], ver, d["executable"] + ) + return s + + +class BaseReport: + when: str | None + location: tuple[str, int | None, str] | None + longrepr: ( + None | ExceptionInfo[BaseException] | tuple[str, int, str] | str | TerminalRepr + ) + sections: list[tuple[str, str]] + nodeid: str + outcome: Literal["passed", "failed", "skipped"] + + def __init__(self, **kw: Any) -> None: + self.__dict__.update(kw) + + if TYPE_CHECKING: + # Can have arbitrary fields given to __init__(). + def __getattr__(self, key: str) -> Any: ... + + def toterminal(self, out: TerminalWriter) -> None: + if hasattr(self, "node"): + worker_info = getworkerinfoline(self.node) + if worker_info: + out.line(worker_info) + + longrepr = self.longrepr + if longrepr is None: + return + + if hasattr(longrepr, "toterminal"): + longrepr_terminal = cast(TerminalRepr, longrepr) + longrepr_terminal.toterminal(out) + else: + try: + s = str(longrepr) + except UnicodeEncodeError: + s = "" + out.line(s) + + def get_sections(self, prefix: str) -> Iterator[tuple[str, str]]: + for name, content in self.sections: + if name.startswith(prefix): + yield prefix, content + + @property + def longreprtext(self) -> str: + """Read-only property that returns the full string representation of + ``longrepr``. + + .. versionadded:: 3.0 + """ + file = StringIO() + tw = TerminalWriter(file) + tw.hasmarkup = False + self.toterminal(tw) + exc = file.getvalue() + return exc.strip() + + @property + def caplog(self) -> str: + """Return captured log lines, if log capturing is enabled. + + .. versionadded:: 3.5 + """ + return "\n".join( + content for (prefix, content) in self.get_sections("Captured log") + ) + + @property + def capstdout(self) -> str: + """Return captured text from stdout, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stdout") + ) + + @property + def capstderr(self) -> str: + """Return captured text from stderr, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stderr") + ) + + @property + def passed(self) -> bool: + """Whether the outcome is passed.""" + return self.outcome == "passed" + + @property + def failed(self) -> bool: + """Whether the outcome is failed.""" + return self.outcome == "failed" + + @property + def skipped(self) -> bool: + """Whether the outcome is skipped.""" + return self.outcome == "skipped" + + @property + def fspath(self) -> str: + """The path portion of the reported node, as a string.""" + return self.nodeid.split("::")[0] + + @property + def count_towards_summary(self) -> bool: + """**Experimental** Whether this report should be counted towards the + totals shown at the end of the test session: "1 passed, 1 failure, etc". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + return True + + @property + def head_line(self) -> str | None: + """**Experimental** The head line shown with longrepr output for this + report, more commonly during traceback representation during + failures:: + + ________ Test.foo ________ + + + In the example above, the head_line is "Test.foo". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + if self.location is not None: + _fspath, _lineno, domain = self.location + return domain + return None + + def _get_verbose_word_with_markup( + self, config: Config, default_markup: Mapping[str, bool] + ) -> tuple[str, Mapping[str, bool]]: + _category, _short, verbose = config.hook.pytest_report_teststatus( + report=self, config=config + ) + + if isinstance(verbose, str): + return verbose, default_markup + + if isinstance(verbose, Sequence) and len(verbose) == 2: + word, markup = verbose + if isinstance(word, str) and isinstance(markup, Mapping): + return word, markup + + fail( # pragma: no cover + "pytest_report_teststatus() hook (from a plugin) returned " + f"an invalid verbose value: {verbose!r}.\nExpected either a string " + "or a tuple of (word, markup)." + ) + + def _to_json(self) -> dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + return _report_to_json(self) + + @classmethod + def _from_json(cls, reportdict: dict[str, object]) -> Self: + """Create either a TestReport or CollectReport, depending on the calling class. + + It is the callers responsibility to know which class to pass here. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + kwargs = _report_kwargs_from_json(reportdict) + return cls(**kwargs) + + +def _report_unserialization_failure( + type_name: str, report_class: type[BaseReport], reportdict +) -> NoReturn: + url = "https://github.com/pytest-dev/pytest/issues" + stream = StringIO() + pprint("-" * 100, stream=stream) + pprint(f"INTERNALERROR: Unknown entry type returned: {type_name}", stream=stream) + pprint(f"report_name: {report_class}", stream=stream) + pprint(reportdict, stream=stream) + pprint(f"Please report this bug at {url}", stream=stream) + pprint("-" * 100, stream=stream) + raise RuntimeError(stream.getvalue()) + + +def _format_failed_longrepr( + item: Item, call: CallInfo[None], excinfo: ExceptionInfo[BaseException] +): + if call.when == "call": + longrepr = item.repr_failure(excinfo) + else: + # Exception in setup or teardown. + longrepr = item._repr_failure_py( + excinfo, style=item.config.getoption("tbstyle", "auto") + ) + return longrepr + + +def _format_exception_group_all_skipped_longrepr( + item: Item, + excinfo: ExceptionInfo[BaseExceptionGroup[BaseException | BaseExceptionGroup]], +) -> tuple[str, int, str]: + r = excinfo._getreprcrash() + assert r is not None, ( + "There should always be a traceback entry for skipping a test." + ) + if all( + getattr(skip, "_use_item_location", False) for skip in excinfo.value.exceptions + ): + path, line = item.reportinfo()[:2] + assert line is not None + loc = (os.fspath(path), line + 1) + default_msg = "skipped" + else: + loc = (str(r.path), r.lineno) + default_msg = r.message + + # Get all unique skip messages. + msgs: list[str] = [] + for exception in excinfo.value.exceptions: + m = getattr(exception, "msg", None) or ( + exception.args[0] if exception.args else None + ) + if m and m not in msgs: + msgs.append(m) + + reason = "; ".join(msgs) if msgs else default_msg + longrepr = (*loc, reason) + return longrepr + + +class TestReport(BaseReport): + """Basic test report object (also used for setup and teardown calls if + they fail). + + Reports can contain arbitrary extra attributes. + """ + + __test__ = False + + # Defined by skipping plugin. + # xfail reason if xfailed, otherwise not defined. Use hasattr to distinguish. + wasxfail: str + + def __init__( + self, + nodeid: str, + location: tuple[str, int | None, str], + keywords: Mapping[str, Any], + outcome: Literal["passed", "failed", "skipped"], + longrepr: None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr, + when: Literal["setup", "call", "teardown"], + sections: Iterable[tuple[str, str]] = (), + duration: float = 0, + start: float = 0, + stop: float = 0, + user_properties: Iterable[tuple[str, object]] | None = None, + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: A (filesystempath, lineno, domaininfo) tuple indicating the + #: actual location of a test item - it might be different from the + #: collected one e.g. if a method is inherited from a different module. + #: The filesystempath may be relative to ``config.rootdir``. + #: The line number is 0-based. + self.location: tuple[str, int | None, str] = location + + #: A name -> value dictionary containing all keywords and + #: markers associated with a test invocation. + self.keywords: Mapping[str, Any] = keywords + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: One of 'setup', 'call', 'teardown' to indicate runtest phase. + self.when: Literal["setup", "call", "teardown"] = when + + #: User properties is a list of tuples (name, value) that holds user + #: defined properties of the test. + self.user_properties = list(user_properties or []) + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + #: Time it took to run just the test. + self.duration: float = duration + + #: The system time when the call started, in seconds since the epoch. + self.start: float = start + #: The system time when the call ended, in seconds since the epoch. + self.stop: float = stop + + self.__dict__.update(extra) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.nodeid!r} when={self.when!r} outcome={self.outcome!r}>" + + @classmethod + def from_item_and_call(cls, item: Item, call: CallInfo[None]) -> TestReport: + """Create and fill a TestReport with standard item and call info. + + :param item: The item. + :param call: The call info. + """ + when = call.when + # Remove "collect" from the Literal type -- only for collection calls. + assert when != "collect" + duration = call.duration + start = call.start + stop = call.stop + keywords = {x: 1 for x in item.keywords} + excinfo = call.excinfo + sections = [] + if not call.excinfo: + outcome: Literal["passed", "failed", "skipped"] = "passed" + longrepr: ( + None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr + ) = None + else: + if not isinstance(excinfo, ExceptionInfo): + outcome = "failed" + longrepr = excinfo + elif isinstance(excinfo.value, skip.Exception): + outcome = "skipped" + r = excinfo._getreprcrash() + assert r is not None, ( + "There should always be a traceback entry for skipping a test." + ) + if excinfo.value._use_item_location: + path, line = item.reportinfo()[:2] + assert line is not None + longrepr = (os.fspath(path), line + 1, r.message) + else: + longrepr = (str(r.path), r.lineno, r.message) + elif isinstance(excinfo.value, BaseExceptionGroup) and ( + excinfo.value.split(skip.Exception)[1] is None + ): + # All exceptions in the group are skip exceptions. + outcome = "skipped" + excinfo = cast( + ExceptionInfo[ + BaseExceptionGroup[BaseException | BaseExceptionGroup] + ], + excinfo, + ) + longrepr = _format_exception_group_all_skipped_longrepr(item, excinfo) + else: + outcome = "failed" + longrepr = _format_failed_longrepr(item, call, excinfo) + for rwhen, key, content in item._report_sections: + sections.append((f"Captured {key} {rwhen}", content)) + return cls( + item.nodeid, + item.location, + keywords, + outcome, + longrepr, + when, + sections, + duration, + start, + stop, + user_properties=item.user_properties, + ) + + +@final +class CollectReport(BaseReport): + """Collection report object. + + Reports can contain arbitrary extra attributes. + """ + + when = "collect" + + def __init__( + self, + nodeid: str, + outcome: Literal["passed", "failed", "skipped"], + longrepr: None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr, + result: list[Item | Collector] | None, + sections: Iterable[tuple[str, str]] = (), + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: The collected items and collection nodes. + self.result = result or [] + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + self.__dict__.update(extra) + + @property + def location( # type:ignore[override] + self, + ) -> tuple[str, int | None, str] | None: + return (self.fspath, None, self.fspath) + + def __repr__(self) -> str: + return f"" + + +class CollectErrorRepr(TerminalRepr): + def __init__(self, msg: str) -> None: + self.longrepr = msg + + def toterminal(self, out: TerminalWriter) -> None: + out.line(self.longrepr, red=True) + + +def pytest_report_to_serializable( + report: CollectReport | TestReport, +) -> dict[str, Any] | None: + if isinstance(report, TestReport | CollectReport): + data = report._to_json() + data["$report_type"] = report.__class__.__name__ + return data + # TODO: Check if this is actually reachable. + return None # type: ignore[unreachable] + + +def pytest_report_from_serializable( + data: dict[str, Any], +) -> CollectReport | TestReport | None: + if "$report_type" in data: + if data["$report_type"] == "TestReport": + return TestReport._from_json(data) + elif data["$report_type"] == "CollectReport": + return CollectReport._from_json(data) + assert False, "Unknown report_type unserialize data: {}".format( + data["$report_type"] + ) + return None + + +def _report_to_json(report: BaseReport) -> dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def serialize_repr_entry( + entry: ReprEntry | ReprEntryNative, + ) -> dict[str, Any]: + data = dataclasses.asdict(entry) + for key, value in data.items(): + if hasattr(value, "__dict__"): + data[key] = dataclasses.asdict(value) + entry_data = {"type": type(entry).__name__, "data": data} + return entry_data + + def serialize_repr_traceback(reprtraceback: ReprTraceback) -> dict[str, Any]: + result = dataclasses.asdict(reprtraceback) + result["reprentries"] = [ + serialize_repr_entry(x) for x in reprtraceback.reprentries + ] + return result + + def serialize_repr_crash( + reprcrash: ReprFileLocation | None, + ) -> dict[str, Any] | None: + if reprcrash is not None: + return dataclasses.asdict(reprcrash) + else: + return None + + def serialize_exception_longrepr(rep: BaseReport) -> dict[str, Any]: + assert rep.longrepr is not None + # TODO: Investigate whether the duck typing is really necessary here. + longrepr = cast(ExceptionRepr, rep.longrepr) + result: dict[str, Any] = { + "reprcrash": serialize_repr_crash(longrepr.reprcrash), + "reprtraceback": serialize_repr_traceback(longrepr.reprtraceback), + "sections": longrepr.sections, + } + if isinstance(longrepr, ExceptionChainRepr): + result["chain"] = [] + for repr_traceback, repr_crash, description in longrepr.chain: + result["chain"].append( + ( + serialize_repr_traceback(repr_traceback), + serialize_repr_crash(repr_crash), + description, + ) + ) + else: + result["chain"] = None + return result + + d = report.__dict__.copy() + if hasattr(report.longrepr, "toterminal"): + if hasattr(report.longrepr, "reprtraceback") and hasattr( + report.longrepr, "reprcrash" + ): + d["longrepr"] = serialize_exception_longrepr(report) + else: + d["longrepr"] = str(report.longrepr) + else: + d["longrepr"] = report.longrepr + for name in d: + if isinstance(d[name], os.PathLike): + d[name] = os.fspath(d[name]) + elif name == "result": + d[name] = None # for now + return d + + +def _report_kwargs_from_json(reportdict: dict[str, Any]) -> dict[str, Any]: + """Return **kwargs that can be used to construct a TestReport or + CollectReport instance. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def deserialize_repr_entry(entry_data): + data = entry_data["data"] + entry_type = entry_data["type"] + if entry_type == "ReprEntry": + reprfuncargs = None + reprfileloc = None + reprlocals = None + if data["reprfuncargs"]: + reprfuncargs = ReprFuncArgs(**data["reprfuncargs"]) + if data["reprfileloc"]: + reprfileloc = ReprFileLocation(**data["reprfileloc"]) + if data["reprlocals"]: + reprlocals = ReprLocals(data["reprlocals"]["lines"]) + + reprentry: ReprEntry | ReprEntryNative = ReprEntry( + lines=data["lines"], + reprfuncargs=reprfuncargs, + reprlocals=reprlocals, + reprfileloc=reprfileloc, + style=data["style"], + ) + elif entry_type == "ReprEntryNative": + reprentry = ReprEntryNative(data["lines"]) + else: + _report_unserialization_failure(entry_type, TestReport, reportdict) + return reprentry + + def deserialize_repr_traceback(repr_traceback_dict): + repr_traceback_dict["reprentries"] = [ + deserialize_repr_entry(x) for x in repr_traceback_dict["reprentries"] + ] + return ReprTraceback(**repr_traceback_dict) + + def deserialize_repr_crash(repr_crash_dict: dict[str, Any] | None): + if repr_crash_dict is not None: + return ReprFileLocation(**repr_crash_dict) + else: + return None + + if ( + reportdict["longrepr"] + and "reprcrash" in reportdict["longrepr"] + and "reprtraceback" in reportdict["longrepr"] + ): + reprtraceback = deserialize_repr_traceback( + reportdict["longrepr"]["reprtraceback"] + ) + reprcrash = deserialize_repr_crash(reportdict["longrepr"]["reprcrash"]) + if reportdict["longrepr"]["chain"]: + chain = [] + for repr_traceback_data, repr_crash_data, description in reportdict[ + "longrepr" + ]["chain"]: + chain.append( + ( + deserialize_repr_traceback(repr_traceback_data), + deserialize_repr_crash(repr_crash_data), + description, + ) + ) + exception_info: ExceptionChainRepr | ReprExceptionInfo = ExceptionChainRepr( + chain + ) + else: + exception_info = ReprExceptionInfo( + reprtraceback=reprtraceback, + reprcrash=reprcrash, + ) + + for section in reportdict["longrepr"]["sections"]: + exception_info.addsection(*section) + reportdict["longrepr"] = exception_info + + return reportdict diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/runner.py b/Backend/venv/lib/python3.12/site-packages/_pytest/runner.py new file mode 100644 index 00000000..9c20ff9e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/runner.py @@ -0,0 +1,580 @@ +# mypy: allow-untyped-defs +"""Basic collect and runtest protocol implementations.""" + +from __future__ import annotations + +import bdb +from collections.abc import Callable +import dataclasses +import os +import sys +import types +from typing import cast +from typing import final +from typing import Generic +from typing import Literal +from typing import TYPE_CHECKING +from typing import TypeVar + +from .config import Config +from .reports import BaseReport +from .reports import CollectErrorRepr +from .reports import CollectReport +from .reports import TestReport +from _pytest import timing +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.nodes import Collector +from _pytest.nodes import Directory +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.outcomes import Exit +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import Skipped +from _pytest.outcomes import TEST_OUTCOME + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + +if TYPE_CHECKING: + from _pytest.main import Session + from _pytest.terminal import TerminalReporter + +# +# pytest plugin hooks. + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group.addoption( + "--durations", + action="store", + type=int, + default=None, + metavar="N", + help="Show N slowest setup/test durations (N=0 for all)", + ) + group.addoption( + "--durations-min", + action="store", + type=float, + default=None, + metavar="N", + help="Minimal duration in seconds for inclusion in slowest list. " + "Default: 0.005 (or 0.0 if -vv is given).", + ) + + +def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None: + durations = terminalreporter.config.option.durations + durations_min = terminalreporter.config.option.durations_min + verbose = terminalreporter.config.get_verbosity() + if durations is None: + return + if durations_min is None: + durations_min = 0.005 if verbose < 2 else 0.0 + tr = terminalreporter + dlist = [] + for replist in tr.stats.values(): + for rep in replist: + if hasattr(rep, "duration"): + dlist.append(rep) + if not dlist: + return + dlist.sort(key=lambda x: x.duration, reverse=True) + if not durations: + tr.write_sep("=", "slowest durations") + else: + tr.write_sep("=", f"slowest {durations} durations") + dlist = dlist[:durations] + + for i, rep in enumerate(dlist): + if rep.duration < durations_min: + tr.write_line("") + message = f"({len(dlist) - i} durations < {durations_min:g}s hidden." + if terminalreporter.config.option.durations_min is None: + message += " Use -vv to show these durations." + message += ")" + tr.write_line(message) + break + tr.write_line(f"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}") + + +def pytest_sessionstart(session: Session) -> None: + session._setupstate = SetupState() + + +def pytest_sessionfinish(session: Session) -> None: + session._setupstate.teardown_exact(None) + + +def pytest_runtest_protocol(item: Item, nextitem: Item | None) -> bool: + ihook = item.ihook + ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location) + runtestprotocol(item, nextitem=nextitem) + ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location) + return True + + +def runtestprotocol( + item: Item, log: bool = True, nextitem: Item | None = None +) -> list[TestReport]: + hasrequest = hasattr(item, "_request") + if hasrequest and not item._request: # type: ignore[attr-defined] + # This only happens if the item is re-run, as is done by + # pytest-rerunfailures. + item._initrequest() # type: ignore[attr-defined] + rep = call_and_report(item, "setup", log) + reports = [rep] + if rep.passed: + if item.config.getoption("setupshow", False): + show_test_item(item) + if not item.config.getoption("setuponly", False): + reports.append(call_and_report(item, "call", log)) + # If the session is about to fail or stop, teardown everything - this is + # necessary to correctly report fixture teardown errors (see #11706) + if item.session.shouldfail or item.session.shouldstop: + nextitem = None + reports.append(call_and_report(item, "teardown", log, nextitem=nextitem)) + # After all teardown hooks have been called + # want funcargs and request info to go away. + if hasrequest: + item._request = False # type: ignore[attr-defined] + item.funcargs = None # type: ignore[attr-defined] + return reports + + +def show_test_item(item: Item) -> None: + """Show test function, parameters and the fixtures of the test item.""" + tw = item.config.get_terminal_writer() + tw.line() + tw.write(" " * 8) + tw.write(item.nodeid) + used_fixtures = sorted(getattr(item, "fixturenames", [])) + if used_fixtures: + tw.write(" (fixtures used: {})".format(", ".join(used_fixtures))) + tw.flush() + + +def pytest_runtest_setup(item: Item) -> None: + _update_current_test_var(item, "setup") + item.session._setupstate.setup(item) + + +def pytest_runtest_call(item: Item) -> None: + _update_current_test_var(item, "call") + try: + del sys.last_type + del sys.last_value + del sys.last_traceback + if sys.version_info >= (3, 12, 0): + del sys.last_exc # type:ignore[attr-defined] + except AttributeError: + pass + try: + item.runtest() + except Exception as e: + # Store trace info to allow postmortem debugging + sys.last_type = type(e) + sys.last_value = e + if sys.version_info >= (3, 12, 0): + sys.last_exc = e # type:ignore[attr-defined] + assert e.__traceback__ is not None + # Skip *this* frame + sys.last_traceback = e.__traceback__.tb_next + raise + + +def pytest_runtest_teardown(item: Item, nextitem: Item | None) -> None: + _update_current_test_var(item, "teardown") + item.session._setupstate.teardown_exact(nextitem) + _update_current_test_var(item, None) + + +def _update_current_test_var( + item: Item, when: Literal["setup", "call", "teardown"] | None +) -> None: + """Update :envvar:`PYTEST_CURRENT_TEST` to reflect the current item and stage. + + If ``when`` is None, delete ``PYTEST_CURRENT_TEST`` from the environment. + """ + var_name = "PYTEST_CURRENT_TEST" + if when: + value = f"{item.nodeid} ({when})" + # don't allow null bytes on environment variables (see #2644, #2957) + value = value.replace("\x00", "(null)") + os.environ[var_name] = value + else: + os.environ.pop(var_name) + + +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str] | None: + if report.when in ("setup", "teardown"): + if report.failed: + # category, shortletter, verbose-word + return "error", "E", "ERROR" + elif report.skipped: + return "skipped", "s", "SKIPPED" + else: + return "", "", "" + return None + + +# +# Implementation + + +def call_and_report( + item: Item, when: Literal["setup", "call", "teardown"], log: bool = True, **kwds +) -> TestReport: + ihook = item.ihook + if when == "setup": + runtest_hook: Callable[..., None] = ihook.pytest_runtest_setup + elif when == "call": + runtest_hook = ihook.pytest_runtest_call + elif when == "teardown": + runtest_hook = ihook.pytest_runtest_teardown + else: + assert False, f"Unhandled runtest hook case: {when}" + + call = CallInfo.from_call( + lambda: runtest_hook(item=item, **kwds), + when=when, + reraise=get_reraise_exceptions(item.config), + ) + report: TestReport = ihook.pytest_runtest_makereport(item=item, call=call) + if log: + ihook.pytest_runtest_logreport(report=report) + if check_interactive_exception(call, report): + ihook.pytest_exception_interact(node=item, call=call, report=report) + return report + + +def get_reraise_exceptions(config: Config) -> tuple[type[BaseException], ...]: + """Return exception types that should not be suppressed in general.""" + reraise: tuple[type[BaseException], ...] = (Exit,) + if not config.getoption("usepdb", False): + reraise += (KeyboardInterrupt,) + return reraise + + +def check_interactive_exception(call: CallInfo[object], report: BaseReport) -> bool: + """Check whether the call raised an exception that should be reported as + interactive.""" + if call.excinfo is None: + # Didn't raise. + return False + if hasattr(report, "wasxfail"): + # Exception was expected. + return False + if isinstance(call.excinfo.value, Skipped | bdb.BdbQuit): + # Special control flow exception. + return False + return True + + +TResult = TypeVar("TResult", covariant=True) + + +@final +@dataclasses.dataclass +class CallInfo(Generic[TResult]): + """Result/Exception info of a function invocation.""" + + _result: TResult | None + #: The captured exception of the call, if it raised. + excinfo: ExceptionInfo[BaseException] | None + #: The system time when the call started, in seconds since the epoch. + start: float + #: The system time when the call ended, in seconds since the epoch. + stop: float + #: The call duration, in seconds. + duration: float + #: The context of invocation: "collect", "setup", "call" or "teardown". + when: Literal["collect", "setup", "call", "teardown"] + + def __init__( + self, + result: TResult | None, + excinfo: ExceptionInfo[BaseException] | None, + start: float, + stop: float, + duration: float, + when: Literal["collect", "setup", "call", "teardown"], + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._result = result + self.excinfo = excinfo + self.start = start + self.stop = stop + self.duration = duration + self.when = when + + @property + def result(self) -> TResult: + """The return value of the call, if it didn't raise. + + Can only be accessed if excinfo is None. + """ + if self.excinfo is not None: + raise AttributeError(f"{self!r} has no valid result") + # The cast is safe because an exception wasn't raised, hence + # _result has the expected function return type (which may be + # None, that's why a cast and not an assert). + return cast(TResult, self._result) + + @classmethod + def from_call( + cls, + func: Callable[[], TResult], + when: Literal["collect", "setup", "call", "teardown"], + reraise: type[BaseException] | tuple[type[BaseException], ...] | None = None, + ) -> CallInfo[TResult]: + """Call func, wrapping the result in a CallInfo. + + :param func: + The function to call. Called without arguments. + :type func: Callable[[], _pytest.runner.TResult] + :param when: + The phase in which the function is called. + :param reraise: + Exception or exceptions that shall propagate if raised by the + function, instead of being wrapped in the CallInfo. + """ + excinfo = None + instant = timing.Instant() + try: + result: TResult | None = func() + except BaseException: + excinfo = ExceptionInfo.from_current() + if reraise is not None and isinstance(excinfo.value, reraise): + raise + result = None + duration = instant.elapsed() + return cls( + start=duration.start.time, + stop=duration.stop.time, + duration=duration.seconds, + when=when, + result=result, + excinfo=excinfo, + _ispytest=True, + ) + + def __repr__(self) -> str: + if self.excinfo is None: + return f"" + return f"" + + +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport: + return TestReport.from_item_and_call(item, call) + + +def pytest_make_collect_report(collector: Collector) -> CollectReport: + def collect() -> list[Item | Collector]: + # Before collecting, if this is a Directory, load the conftests. + # If a conftest import fails to load, it is considered a collection + # error of the Directory collector. This is why it's done inside of the + # CallInfo wrapper. + # + # Note: initial conftests are loaded early, not here. + if isinstance(collector, Directory): + collector.config.pluginmanager._loadconftestmodules( + collector.path, + collector.config.getoption("importmode"), + rootpath=collector.config.rootpath, + consider_namespace_packages=collector.config.getini( + "consider_namespace_packages" + ), + ) + + return list(collector.collect()) + + call = CallInfo.from_call( + collect, "collect", reraise=(KeyboardInterrupt, SystemExit) + ) + longrepr: None | tuple[str, int, str] | str | TerminalRepr = None + if not call.excinfo: + outcome: Literal["passed", "skipped", "failed"] = "passed" + else: + skip_exceptions = [Skipped] + unittest = sys.modules.get("unittest") + if unittest is not None: + skip_exceptions.append(unittest.SkipTest) + if isinstance(call.excinfo.value, tuple(skip_exceptions)): + outcome = "skipped" + r_ = collector._repr_failure_py(call.excinfo, "line") + assert isinstance(r_, ExceptionChainRepr), repr(r_) + r = r_.reprcrash + assert r + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + errorinfo = collector.repr_failure(call.excinfo) + if not hasattr(errorinfo, "toterminal"): + assert isinstance(errorinfo, str) + errorinfo = CollectErrorRepr(errorinfo) + longrepr = errorinfo + result = call.result if not call.excinfo else None + rep = CollectReport(collector.nodeid, outcome, longrepr, result) + rep.call = call # type: ignore # see collect_one_node + return rep + + +class SetupState: + """Shared state for setting up/tearing down test items or collectors + in a session. + + Suppose we have a collection tree as follows: + + + + + + + + The SetupState maintains a stack. The stack starts out empty: + + [] + + During the setup phase of item1, setup(item1) is called. What it does + is: + + push session to stack, run session.setup() + push mod1 to stack, run mod1.setup() + push item1 to stack, run item1.setup() + + The stack is: + + [session, mod1, item1] + + While the stack is in this shape, it is allowed to add finalizers to + each of session, mod1, item1 using addfinalizer(). + + During the teardown phase of item1, teardown_exact(item2) is called, + where item2 is the next item to item1. What it does is: + + pop item1 from stack, run its teardowns + pop mod1 from stack, run its teardowns + + mod1 was popped because it ended its purpose with item1. The stack is: + + [session] + + During the setup phase of item2, setup(item2) is called. What it does + is: + + push mod2 to stack, run mod2.setup() + push item2 to stack, run item2.setup() + + Stack: + + [session, mod2, item2] + + During the teardown phase of item2, teardown_exact(None) is called, + because item2 is the last item. What it does is: + + pop item2 from stack, run its teardowns + pop mod2 from stack, run its teardowns + pop session from stack, run its teardowns + + Stack: + + [] + + The end! + """ + + def __init__(self) -> None: + # The stack is in the dict insertion order. + self.stack: dict[ + Node, + tuple[ + # Node's finalizers. + list[Callable[[], object]], + # Node's exception and original traceback, if its setup raised. + tuple[OutcomeException | Exception, types.TracebackType | None] | None, + ], + ] = {} + + def setup(self, item: Item) -> None: + """Setup objects along the collector chain to the item.""" + needed_collectors = item.listchain() + + # If a collector fails its setup, fail its entire subtree of items. + # The setup is not retried for each item - the same exception is used. + for col, (finalizers, exc) in self.stack.items(): + assert col in needed_collectors, "previous item was not torn down properly" + if exc: + raise exc[0].with_traceback(exc[1]) + + for col in needed_collectors[len(self.stack) :]: + assert col not in self.stack + # Push onto the stack. + self.stack[col] = ([col.teardown], None) + try: + col.setup() + except TEST_OUTCOME as exc: + self.stack[col] = (self.stack[col][0], (exc, exc.__traceback__)) + raise + + def addfinalizer(self, finalizer: Callable[[], object], node: Node) -> None: + """Attach a finalizer to the given node. + + The node must be currently active in the stack. + """ + assert node and not isinstance(node, tuple) + assert callable(finalizer) + assert node in self.stack, (node, self.stack) + self.stack[node][0].append(finalizer) + + def teardown_exact(self, nextitem: Item | None) -> None: + """Teardown the current stack up until reaching nodes that nextitem + also descends from. + + When nextitem is None (meaning we're at the last item), the entire + stack is torn down. + """ + needed_collectors = (nextitem and nextitem.listchain()) or [] + exceptions: list[BaseException] = [] + while self.stack: + if list(self.stack.keys()) == needed_collectors[: len(self.stack)]: + break + node, (finalizers, _) = self.stack.popitem() + these_exceptions = [] + while finalizers: + fin = finalizers.pop() + try: + fin() + except TEST_OUTCOME as e: + these_exceptions.append(e) + + if len(these_exceptions) == 1: + exceptions.extend(these_exceptions) + elif these_exceptions: + msg = f"errors while tearing down {node!r}" + exceptions.append(BaseExceptionGroup(msg, these_exceptions[::-1])) + + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup("errors during test teardown", exceptions[::-1]) + if nextitem is None: + assert not self.stack + + +def collect_one_node(collector: Collector) -> CollectReport: + ihook = collector.ihook + ihook.pytest_collectstart(collector=collector) + rep: CollectReport = ihook.pytest_make_collect_report(collector=collector) + call = rep.__dict__.pop("call", None) + if call and check_interactive_exception(call, rep): + ihook.pytest_exception_interact(node=collector, call=call, report=rep) + return rep diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/scope.py b/Backend/venv/lib/python3.12/site-packages/_pytest/scope.py new file mode 100644 index 00000000..2b007e87 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/scope.py @@ -0,0 +1,91 @@ +""" +Scope definition and related utilities. + +Those are defined here, instead of in the 'fixtures' module because +their use is spread across many other pytest modules, and centralizing it in 'fixtures' +would cause circular references. + +Also this makes the module light to import, as it should. +""" + +from __future__ import annotations + +from enum import Enum +from functools import total_ordering +from typing import Literal + + +_ScopeName = Literal["session", "package", "module", "class", "function"] + + +@total_ordering +class Scope(Enum): + """ + Represents one of the possible fixture scopes in pytest. + + Scopes are ordered from lower to higher, that is: + + ->>> higher ->>> + + Function < Class < Module < Package < Session + + <<<- lower <<<- + """ + + # Scopes need to be listed from lower to higher. + Function = "function" + Class = "class" + Module = "module" + Package = "package" + Session = "session" + + def next_lower(self) -> Scope: + """Return the next lower scope.""" + index = _SCOPE_INDICES[self] + if index == 0: + raise ValueError(f"{self} is the lower-most scope") + return _ALL_SCOPES[index - 1] + + def next_higher(self) -> Scope: + """Return the next higher scope.""" + index = _SCOPE_INDICES[self] + if index == len(_SCOPE_INDICES) - 1: + raise ValueError(f"{self} is the upper-most scope") + return _ALL_SCOPES[index + 1] + + def __lt__(self, other: Scope) -> bool: + self_index = _SCOPE_INDICES[self] + other_index = _SCOPE_INDICES[other] + return self_index < other_index + + @classmethod + def from_user( + cls, scope_name: _ScopeName, descr: str, where: str | None = None + ) -> Scope: + """ + Given a scope name from the user, return the equivalent Scope enum. Should be used + whenever we want to convert a user provided scope name to its enum object. + + If the scope name is invalid, construct a user friendly message and call pytest.fail. + """ + from _pytest.outcomes import fail + + try: + # Holding this reference is necessary for mypy at the moment. + scope = Scope(scope_name) + except ValueError: + fail( + "{} {}got an unexpected scope value '{}'".format( + descr, f"from {where} " if where else "", scope_name + ), + pytrace=False, + ) + return scope + + +_ALL_SCOPES = list(Scope) +_SCOPE_INDICES = {scope: index for index, scope in enumerate(_ALL_SCOPES)} + + +# Ordered list of scopes which can contain many tests (in practice all except Function). +HIGH_SCOPES = [x for x in Scope if x is not Scope.Function] diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/setuponly.py b/Backend/venv/lib/python3.12/site-packages/_pytest/setuponly.py new file mode 100644 index 00000000..7e6b46bc --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/setuponly.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +from collections.abc import Generator + +from _pytest._io.saferepr import saferepr +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +from _pytest.scope import Scope +import pytest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setuponly", + "--setup-only", + action="store_true", + help="Only setup fixtures, do not execute tests", + ) + group.addoption( + "--setupshow", + "--setup-show", + action="store_true", + help="Show setup of fixtures while executing tests", + ) + + +@pytest.hookimpl(wrapper=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> Generator[None, object, object]: + try: + return (yield) + finally: + if request.config.option.setupshow: + if hasattr(request, "param"): + # Save the fixture parameter so ._show_fixture_action() can + # display it now and during the teardown (in .finish()). + if fixturedef.ids: + if callable(fixturedef.ids): + param = fixturedef.ids(request.param) + else: + param = fixturedef.ids[request.param_index] + else: + param = request.param + fixturedef.cached_param = param # type: ignore[attr-defined] + _show_fixture_action(fixturedef, request.config, "SETUP") + + +def pytest_fixture_post_finalizer( + fixturedef: FixtureDef[object], request: SubRequest +) -> None: + if fixturedef.cached_result is not None: + config = request.config + if config.option.setupshow: + _show_fixture_action(fixturedef, request.config, "TEARDOWN") + if hasattr(fixturedef, "cached_param"): + del fixturedef.cached_param + + +def _show_fixture_action( + fixturedef: FixtureDef[object], config: Config, msg: str +) -> None: + capman = config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture() + + tw = config.get_terminal_writer() + tw.line() + # Use smaller indentation the higher the scope: Session = 0, Package = 1, etc. + scope_indent = list(reversed(Scope)).index(fixturedef._scope) + tw.write(" " * 2 * scope_indent) + + scopename = fixturedef.scope[0].upper() + tw.write(f"{msg:<8} {scopename} {fixturedef.argname}") + + if msg == "SETUP": + deps = sorted(arg for arg in fixturedef.argnames if arg != "request") + if deps: + tw.write(" (fixtures used: {})".format(", ".join(deps))) + + if hasattr(fixturedef, "cached_param"): + tw.write(f"[{saferepr(fixturedef.cached_param, maxsize=42)}]") + + tw.flush() + + if capman: + capman.resume_global_capture() + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.setuponly: + config.option.setupshow = True + return None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/setupplan.py b/Backend/venv/lib/python3.12/site-packages/_pytest/setupplan.py new file mode 100644 index 00000000..4e124cce --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/setupplan.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +import pytest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setupplan", + "--setup-plan", + action="store_true", + help="Show what fixtures and tests would be executed but " + "don't execute anything", + ) + + +@pytest.hookimpl(tryfirst=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> object | None: + # Will return a dummy fixture if the setuponly option is provided. + if request.config.option.setupplan: + my_cache_key = fixturedef.cache_key(request) + fixturedef.cached_result = (None, my_cache_key, None) + return fixturedef.cached_result + return None + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.setupplan: + config.option.setuponly = True + config.option.setupshow = True + return None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/skipping.py b/Backend/venv/lib/python3.12/site-packages/_pytest/skipping.py new file mode 100644 index 00000000..3b067629 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/skipping.py @@ -0,0 +1,321 @@ +# mypy: allow-untyped-defs +"""Support for skip/xfail functions and markers.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +import dataclasses +import os +import platform +import sys +import traceback + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.mark.structures import Mark +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.raises import AbstractRaises +from _pytest.reports import BaseReport +from _pytest.reports import TestReport +from _pytest.runner import CallInfo +from _pytest.stash import StashKey + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--runxfail", + action="store_true", + dest="runxfail", + default=False, + help="Report the results of xfail tests as if they were not marked", + ) + + parser.addini( + "strict_xfail", + "Default for the strict parameter of xfail " + "markers when not given explicitly (default: False) (alias: xfail_strict)", + type="bool", + # None => fallback to `strict`. + default=None, + aliases=["xfail_strict"], + ) + + +def pytest_configure(config: Config) -> None: + if config.option.runxfail: + # yay a hack + import pytest + + old = pytest.xfail + config.add_cleanup(lambda: setattr(pytest, "xfail", old)) + + def nop(*args, **kwargs): + pass + + nop.Exception = xfail.Exception # type: ignore[attr-defined] + setattr(pytest, "xfail", nop) + + config.addinivalue_line( + "markers", + "skip(reason=None): skip the given test function with an optional reason. " + 'Example: skip(reason="no way of currently testing this") skips the ' + "test.", + ) + config.addinivalue_line( + "markers", + "skipif(condition, ..., *, reason=...): " + "skip the given test function if any of the conditions evaluate to True. " + "Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. " + "See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-skipif", + ) + config.addinivalue_line( + "markers", + "xfail(condition, ..., *, reason=..., run=True, raises=None, strict=strict_xfail): " + "mark the test function as an expected failure if any of the conditions " + "evaluate to True. Optionally specify a reason for better reporting " + "and run=False if you don't even want to execute the test function. " + "If only specific exception(s) are expected, you can list them in " + "raises, and if the test fails in other ways, it will be reported as " + "a true failure. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-xfail", + ) + + +def evaluate_condition(item: Item, mark: Mark, condition: object) -> tuple[bool, str]: + """Evaluate a single skipif/xfail condition. + + If an old-style string condition is given, it is eval()'d, otherwise the + condition is bool()'d. If this fails, an appropriately formatted pytest.fail + is raised. + + Returns (result, reason). The reason is only relevant if the result is True. + """ + # String condition. + if isinstance(condition, str): + globals_ = { + "os": os, + "sys": sys, + "platform": platform, + "config": item.config, + } + for dictionary in reversed( + item.ihook.pytest_markeval_namespace(config=item.config) + ): + if not isinstance(dictionary, Mapping): + raise ValueError( + f"pytest_markeval_namespace() needs to return a dict, got {dictionary!r}" + ) + globals_.update(dictionary) + if hasattr(item, "obj"): + globals_.update(item.obj.__globals__) + try: + filename = f"<{mark.name} condition>" + condition_code = compile(condition, filename, "eval") + result = eval(condition_code, globals_) + except SyntaxError as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition", + " " + condition, + " " + " " * (exc.offset or 0) + "^", + "SyntaxError: invalid syntax", + ] + fail("\n".join(msglines), pytrace=False) + except Exception as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition", + " " + condition, + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + # Boolean condition. + else: + try: + result = bool(condition) + except Exception as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition as a boolean", + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + reason = mark.kwargs.get("reason", None) + if reason is None: + if isinstance(condition, str): + reason = "condition: " + condition + else: + # XXX better be checked at collection time + msg = ( + f"Error evaluating {mark.name!r}: " + + "you need to specify reason=STRING when using booleans as conditions." + ) + fail(msg, pytrace=False) + + return result, reason + + +@dataclasses.dataclass(frozen=True) +class Skip: + """The result of evaluate_skip_marks().""" + + reason: str = "unconditional skip" + + +def evaluate_skip_marks(item: Item) -> Skip | None: + """Evaluate skip and skipif marks on item, returning Skip if triggered.""" + for mark in item.iter_markers(name="skipif"): + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Skip(reason) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Skip(reason) + + for mark in item.iter_markers(name="skip"): + try: + return Skip(*mark.args, **mark.kwargs) + except TypeError as e: + raise TypeError(str(e) + " - maybe you meant pytest.mark.skipif?") from None + + return None + + +@dataclasses.dataclass(frozen=True) +class Xfail: + """The result of evaluate_xfail_marks().""" + + __slots__ = ("raises", "reason", "run", "strict") + + reason: str + run: bool + strict: bool + raises: ( + type[BaseException] + | tuple[type[BaseException], ...] + | AbstractRaises[BaseException] + | None + ) + + +def evaluate_xfail_marks(item: Item) -> Xfail | None: + """Evaluate xfail marks on item, returning Xfail if triggered.""" + for mark in item.iter_markers(name="xfail"): + run = mark.kwargs.get("run", True) + strict = mark.kwargs.get("strict") + if strict is None: + strict = item.config.getini("strict_xfail") + if strict is None: + strict = item.config.getini("strict") + raises = mark.kwargs.get("raises", None) + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Xfail(reason, run, strict, raises) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Xfail(reason, run, strict, raises) + + return None + + +# Saves the xfail mark evaluation. Can be refreshed during call if None. +xfailed_key = StashKey[Xfail | None]() + + +@hookimpl(tryfirst=True) +def pytest_runtest_setup(item: Item) -> None: + skipped = evaluate_skip_marks(item) + if skipped: + raise skip.Exception(skipped.reason, _use_item_location=True) + + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + +@hookimpl(wrapper=True) +def pytest_runtest_call(item: Item) -> Generator[None]: + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + try: + return (yield) + finally: + # The test run may have added an xfail mark dynamically. + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + +@hookimpl(wrapper=True) +def pytest_runtest_makereport( + item: Item, call: CallInfo[None] +) -> Generator[None, TestReport, TestReport]: + rep = yield + xfailed = item.stash.get(xfailed_key, None) + if item.config.option.runxfail: + pass # don't interfere + elif call.excinfo and isinstance(call.excinfo.value, xfail.Exception): + assert call.excinfo.value.msg is not None + rep.wasxfail = call.excinfo.value.msg + rep.outcome = "skipped" + elif not rep.skipped and xfailed: + if call.excinfo: + raises = xfailed.raises + if raises is None or ( + ( + isinstance(raises, type | tuple) + and isinstance(call.excinfo.value, raises) + ) + or ( + isinstance(raises, AbstractRaises) + and raises.matches(call.excinfo.value) + ) + ): + rep.outcome = "skipped" + rep.wasxfail = xfailed.reason + else: + rep.outcome = "failed" + elif call.when == "call": + if xfailed.strict: + rep.outcome = "failed" + rep.longrepr = "[XPASS(strict)] " + xfailed.reason + else: + rep.outcome = "passed" + rep.wasxfail = xfailed.reason + return rep + + +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str] | None: + if hasattr(report, "wasxfail"): + if report.skipped: + return "xfailed", "x", "XFAIL" + elif report.passed: + return "xpassed", "X", "XPASS" + return None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/stash.py b/Backend/venv/lib/python3.12/site-packages/_pytest/stash.py new file mode 100644 index 00000000..6a9ff884 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/stash.py @@ -0,0 +1,116 @@ +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Generic +from typing import TypeVar + + +__all__ = ["Stash", "StashKey"] + + +T = TypeVar("T") +D = TypeVar("D") + + +class StashKey(Generic[T]): + """``StashKey`` is an object used as a key to a :class:`Stash`. + + A ``StashKey`` is associated with the type ``T`` of the value of the key. + + A ``StashKey`` is unique and cannot conflict with another key. + + .. versionadded:: 7.0 + """ + + __slots__ = () + + +class Stash: + r"""``Stash`` is a type-safe heterogeneous mutable mapping that + allows keys and value types to be defined separately from + where it (the ``Stash``) is created. + + Usually you will be given an object which has a ``Stash``, for example + :class:`~pytest.Config` or a :class:`~_pytest.nodes.Node`: + + .. code-block:: python + + stash: Stash = some_object.stash + + If a module or plugin wants to store data in this ``Stash``, it creates + :class:`StashKey`\s for its keys (at the module level): + + .. code-block:: python + + # At the top-level of the module + some_str_key = StashKey[str]() + some_bool_key = StashKey[bool]() + + To store information: + + .. code-block:: python + + # Value type must match the key. + stash[some_str_key] = "value" + stash[some_bool_key] = True + + To retrieve the information: + + .. code-block:: python + + # The static type of some_str is str. + some_str = stash[some_str_key] + # The static type of some_bool is bool. + some_bool = stash[some_bool_key] + + .. versionadded:: 7.0 + """ + + __slots__ = ("_storage",) + + def __init__(self) -> None: + self._storage: dict[StashKey[Any], object] = {} + + def __setitem__(self, key: StashKey[T], value: T) -> None: + """Set a value for key.""" + self._storage[key] = value + + def __getitem__(self, key: StashKey[T]) -> T: + """Get the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + return cast(T, self._storage[key]) + + def get(self, key: StashKey[T], default: D) -> T | D: + """Get the value for key, or return default if the key wasn't set + before.""" + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key: StashKey[T], default: T) -> T: + """Return the value of key if already set, otherwise set the value + of key to default and return default.""" + try: + return self[key] + except KeyError: + self[key] = default + return default + + def __delitem__(self, key: StashKey[T]) -> None: + """Delete the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + del self._storage[key] + + def __contains__(self, key: StashKey[T]) -> bool: + """Return whether key was set.""" + return key in self._storage + + def __len__(self) -> int: + """Return how many items exist in the stash.""" + return len(self._storage) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/stepwise.py b/Backend/venv/lib/python3.12/site-packages/_pytest/stepwise.py new file mode 100644 index 00000000..8901540e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/stepwise.py @@ -0,0 +1,209 @@ +from __future__ import annotations + +import dataclasses +from datetime import datetime +from datetime import timedelta +from typing import Any +from typing import TYPE_CHECKING + +from _pytest import nodes +from _pytest.cacheprovider import Cache +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.main import Session +from _pytest.reports import TestReport + + +if TYPE_CHECKING: + from typing_extensions import Self + +STEPWISE_CACHE_DIR = "cache/stepwise" + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--sw", + "--stepwise", + action="store_true", + default=False, + dest="stepwise", + help="Exit on test failure and continue from last failing test next time", + ) + group.addoption( + "--sw-skip", + "--stepwise-skip", + action="store_true", + default=False, + dest="stepwise_skip", + help="Ignore the first failing test but stop on the next failing test. " + "Implicitly enables --stepwise.", + ) + group.addoption( + "--sw-reset", + "--stepwise-reset", + action="store_true", + default=False, + dest="stepwise_reset", + help="Resets stepwise state, restarting the stepwise workflow. " + "Implicitly enables --stepwise.", + ) + + +def pytest_configure(config: Config) -> None: + # --stepwise-skip/--stepwise-reset implies stepwise. + if config.option.stepwise_skip or config.option.stepwise_reset: + config.option.stepwise = True + if config.getoption("stepwise"): + config.pluginmanager.register(StepwisePlugin(config), "stepwiseplugin") + + +def pytest_sessionfinish(session: Session) -> None: + if not session.config.getoption("stepwise"): + assert session.config.cache is not None + if hasattr(session.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + + +@dataclasses.dataclass +class StepwiseCacheInfo: + # The nodeid of the last failed test. + last_failed: str | None + + # The number of tests in the last time --stepwise was run. + # We use this information as a simple way to invalidate the cache information, avoiding + # confusing behavior in case the cache is stale. + last_test_count: int | None + + # The date when the cache was last updated, for information purposes only. + last_cache_date_str: str + + @property + def last_cache_date(self) -> datetime: + return datetime.fromisoformat(self.last_cache_date_str) + + @classmethod + def empty(cls) -> Self: + return cls( + last_failed=None, + last_test_count=None, + last_cache_date_str=datetime.now().isoformat(), + ) + + def update_date_to_now(self) -> None: + self.last_cache_date_str = datetime.now().isoformat() + + +class StepwisePlugin: + def __init__(self, config: Config) -> None: + self.config = config + self.session: Session | None = None + self.report_status: list[str] = [] + assert config.cache is not None + self.cache: Cache = config.cache + self.skip: bool = config.getoption("stepwise_skip") + self.reset: bool = config.getoption("stepwise_reset") + self.cached_info = self._load_cached_info() + + def _load_cached_info(self) -> StepwiseCacheInfo: + cached_dict: dict[str, Any] | None = self.cache.get(STEPWISE_CACHE_DIR, None) + if cached_dict: + try: + return StepwiseCacheInfo( + cached_dict["last_failed"], + cached_dict["last_test_count"], + cached_dict["last_cache_date_str"], + ) + except (KeyError, TypeError) as e: + error = f"{type(e).__name__}: {e}" + self.report_status.append(f"error reading cache, discarding ({error})") + + # Cache not found or error during load, return a new cache. + return StepwiseCacheInfo.empty() + + def pytest_sessionstart(self, session: Session) -> None: + self.session = session + + def pytest_collection_modifyitems( + self, config: Config, items: list[nodes.Item] + ) -> None: + last_test_count = self.cached_info.last_test_count + self.cached_info.last_test_count = len(items) + + if self.reset: + self.report_status.append("resetting state, not skipping.") + self.cached_info.last_failed = None + return + + if not self.cached_info.last_failed: + self.report_status.append("no previously failed tests, not skipping.") + return + + if last_test_count is not None and last_test_count != len(items): + self.report_status.append( + f"test count changed, not skipping (now {len(items)} tests, previously {last_test_count})." + ) + self.cached_info.last_failed = None + return + + # Check all item nodes until we find a match on last failed. + failed_index = None + for index, item in enumerate(items): + if item.nodeid == self.cached_info.last_failed: + failed_index = index + break + + # If the previously failed test was not found among the test items, + # do not skip any tests. + if failed_index is None: + self.report_status.append("previously failed test not found, not skipping.") + else: + cache_age = datetime.now() - self.cached_info.last_cache_date + # Round up to avoid showing microseconds. + cache_age = timedelta(seconds=int(cache_age.total_seconds())) + self.report_status.append( + f"skipping {failed_index} already passed items (cache from {cache_age} ago," + f" use --sw-reset to discard)." + ) + deselected = items[:failed_index] + del items[:failed_index] + config.hook.pytest_deselected(items=deselected) + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if report.failed: + if self.skip: + # Remove test from the failed ones (if it exists) and unset the skip option + # to make sure the following tests will not be skipped. + if report.nodeid == self.cached_info.last_failed: + self.cached_info.last_failed = None + + self.skip = False + else: + # Mark test as the last failing and interrupt the test session. + self.cached_info.last_failed = report.nodeid + assert self.session is not None + self.session.shouldstop = ( + "Test failed, continuing from this test next run." + ) + + else: + # If the test was actually run and did pass. + if report.when == "call": + # Remove test from the failed ones, if exists. + if report.nodeid == self.cached_info.last_failed: + self.cached_info.last_failed = None + + def pytest_report_collectionfinish(self) -> list[str] | None: + if self.config.get_verbosity() >= 0 and self.report_status: + return [f"stepwise: {x}" for x in self.report_status] + return None + + def pytest_sessionfinish(self) -> None: + if hasattr(self.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + self.cached_info.update_date_to_now() + self.cache.set(STEPWISE_CACHE_DIR, dataclasses.asdict(self.cached_info)) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/subtests.py b/Backend/venv/lib/python3.12/site-packages/_pytest/subtests.py new file mode 100644 index 00000000..e0ceb27f --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/subtests.py @@ -0,0 +1,411 @@ +"""Builtin plugin that adds subtests support.""" + +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Callable +from collections.abc import Iterator +from collections.abc import Mapping +from contextlib import AbstractContextManager +from contextlib import contextmanager +from contextlib import ExitStack +from contextlib import nullcontext +import dataclasses +import time +from types import TracebackType +from typing import Any +from typing import TYPE_CHECKING + +import pluggy + +from _pytest._code import ExceptionInfo +from _pytest._io.saferepr import saferepr +from _pytest.capture import CaptureFixture +from _pytest.capture import FDCapture +from _pytest.capture import SysCapture +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import SubRequest +from _pytest.logging import catching_logs +from _pytest.logging import LogCaptureHandler +from _pytest.logging import LoggingPlugin +from _pytest.reports import TestReport +from _pytest.runner import CallInfo +from _pytest.runner import check_interactive_exception +from _pytest.runner import get_reraise_exceptions +from _pytest.stash import StashKey + + +if TYPE_CHECKING: + from typing_extensions import Self + + +def pytest_addoption(parser: Parser) -> None: + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_SUBTESTS, + help=( + "Specify verbosity level for subtests. " + "Higher levels will generate output for passed subtests. Failed subtests are always reported." + ), + ) + + +@dataclasses.dataclass(frozen=True, slots=True, kw_only=True) +class SubtestContext: + """The values passed to Subtests.test() that are included in the test report.""" + + msg: str | None + kwargs: Mapping[str, Any] + + def _to_json(self) -> dict[str, Any]: + return dataclasses.asdict(self) + + @classmethod + def _from_json(cls, d: dict[str, Any]) -> Self: + return cls(msg=d["msg"], kwargs=d["kwargs"]) + + +@dataclasses.dataclass(init=False) +class SubtestReport(TestReport): + context: SubtestContext + + @property + def head_line(self) -> str: + _, _, domain = self.location + return f"{domain} {self._sub_test_description()}" + + def _sub_test_description(self) -> str: + parts = [] + if self.context.msg is not None: + parts.append(f"[{self.context.msg}]") + if self.context.kwargs: + params_desc = ", ".join( + f"{k}={saferepr(v)}" for (k, v) in self.context.kwargs.items() + ) + parts.append(f"({params_desc})") + return " ".join(parts) or "()" + + def _to_json(self) -> dict[str, Any]: + data = super()._to_json() + del data["context"] + data["_report_type"] = "SubTestReport" + data["_subtest.context"] = self.context._to_json() + return data + + @classmethod + def _from_json(cls, reportdict: dict[str, Any]) -> SubtestReport: + report = super()._from_json(reportdict) + report.context = SubtestContext._from_json(reportdict["_subtest.context"]) + return report + + @classmethod + def _new( + cls, + test_report: TestReport, + context: SubtestContext, + captured_output: Captured | None, + captured_logs: CapturedLogs | None, + ) -> Self: + result = super()._from_json(test_report._to_json()) + result.context = context + + if captured_output: + if captured_output.out: + result.sections.append(("Captured stdout call", captured_output.out)) + if captured_output.err: + result.sections.append(("Captured stderr call", captured_output.err)) + + if captured_logs and (log := captured_logs.handler.stream.getvalue()): + result.sections.append(("Captured log call", log)) + + return result + + +@fixture +def subtests(request: SubRequest) -> Subtests: + """Provides subtests functionality.""" + capmam = request.node.config.pluginmanager.get_plugin("capturemanager") + suspend_capture_ctx = ( + capmam.global_and_fixture_disabled if capmam is not None else nullcontext + ) + return Subtests(request.node.ihook, suspend_capture_ctx, request, _ispytest=True) + + +class Subtests: + """Subtests fixture, enables declaring subtests inside test functions via the :meth:`test` method.""" + + def __init__( + self, + ihook: pluggy.HookRelay, + suspend_capture_ctx: Callable[[], AbstractContextManager[None]], + request: SubRequest, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._ihook = ihook + self._suspend_capture_ctx = suspend_capture_ctx + self._request = request + + def test( + self, + msg: str | None = None, + **kwargs: Any, + ) -> _SubTestContextManager: + """ + Context manager for subtests, capturing exceptions raised inside the subtest scope and + reporting assertion failures and errors individually. + + Usage + ----- + + .. code-block:: python + + def test(subtests): + for i in range(5): + with subtests.test("custom message", i=i): + assert i % 2 == 0 + + :param msg: + If given, the message will be shown in the test report in case of subtest failure. + + :param kwargs: + Arbitrary values that are also added to the subtest report. + """ + return _SubTestContextManager( + self._ihook, + msg, + kwargs, + request=self._request, + suspend_capture_ctx=self._suspend_capture_ctx, + config=self._request.config, + ) + + +@dataclasses.dataclass +class _SubTestContextManager: + """ + Context manager for subtests, capturing exceptions raised inside the subtest scope and handling + them through the pytest machinery. + """ + + # Note: initially the logic for this context manager was implemented directly + # in Subtests.test() as a @contextmanager, however, it is not possible to control the output fully when + # exiting from it due to an exception when in `--exitfirst` mode, so this was refactored into an + # explicit context manager class (pytest-dev/pytest-subtests#134). + + ihook: pluggy.HookRelay + msg: str | None + kwargs: dict[str, Any] + suspend_capture_ctx: Callable[[], AbstractContextManager[None]] + request: SubRequest + config: Config + + def __enter__(self) -> None: + __tracebackhide__ = True + + self._start = time.time() + self._precise_start = time.perf_counter() + self._exc_info = None + + self._exit_stack = ExitStack() + self._captured_output = self._exit_stack.enter_context( + capturing_output(self.request) + ) + self._captured_logs = self._exit_stack.enter_context( + capturing_logs(self.request) + ) + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + __tracebackhide__ = True + if exc_val is not None: + exc_info = ExceptionInfo.from_exception(exc_val) + else: + exc_info = None + + self._exit_stack.close() + + precise_stop = time.perf_counter() + duration = precise_stop - self._precise_start + stop = time.time() + + call_info = CallInfo[None]( + None, + exc_info, + start=self._start, + stop=stop, + duration=duration, + when="call", + _ispytest=True, + ) + report = self.ihook.pytest_runtest_makereport( + item=self.request.node, call=call_info + ) + sub_report = SubtestReport._new( + report, + SubtestContext(msg=self.msg, kwargs=self.kwargs), + captured_output=self._captured_output, + captured_logs=self._captured_logs, + ) + + if sub_report.failed: + failed_subtests = self.config.stash[failed_subtests_key] + failed_subtests[self.request.node.nodeid] += 1 + + with self.suspend_capture_ctx(): + self.ihook.pytest_runtest_logreport(report=sub_report) + + if check_interactive_exception(call_info, sub_report): + self.ihook.pytest_exception_interact( + node=self.request.node, call=call_info, report=sub_report + ) + + if exc_val is not None: + if isinstance(exc_val, get_reraise_exceptions(self.config)): + return False + if self.request.session.shouldfail: + return False + return True + + +@contextmanager +def capturing_output(request: SubRequest) -> Iterator[Captured]: + option = request.config.getoption("capture", None) + + capman = request.config.pluginmanager.getplugin("capturemanager") + if getattr(capman, "_capture_fixture", None): + # capsys or capfd are active, subtest should not capture. + fixture = None + elif option == "sys": + fixture = CaptureFixture(SysCapture, request, _ispytest=True) + elif option == "fd": + fixture = CaptureFixture(FDCapture, request, _ispytest=True) + else: + fixture = None + + if fixture is not None: + fixture._start() + + captured = Captured() + try: + yield captured + finally: + if fixture is not None: + out, err = fixture.readouterr() + fixture.close() + captured.out = out + captured.err = err + + +@contextmanager +def capturing_logs( + request: SubRequest, +) -> Iterator[CapturedLogs | None]: + logging_plugin: LoggingPlugin | None = request.config.pluginmanager.getplugin( + "logging-plugin" + ) + if logging_plugin is None: + yield None + else: + handler = LogCaptureHandler() + handler.setFormatter(logging_plugin.formatter) + + captured_logs = CapturedLogs(handler) + with catching_logs(handler, level=logging_plugin.log_level): + yield captured_logs + + +@dataclasses.dataclass +class Captured: + out: str = "" + err: str = "" + + +@dataclasses.dataclass +class CapturedLogs: + handler: LogCaptureHandler + + +def pytest_report_to_serializable(report: TestReport) -> dict[str, Any] | None: + if isinstance(report, SubtestReport): + return report._to_json() + return None + + +def pytest_report_from_serializable(data: dict[str, Any]) -> SubtestReport | None: + if data.get("_report_type") == "SubTestReport": + return SubtestReport._from_json(data) + return None + + +# Dict of nodeid -> number of failed subtests. +# Used to fail top-level tests that passed but contain failed subtests. +failed_subtests_key = StashKey[defaultdict[str, int]]() + + +def pytest_configure(config: Config) -> None: + config.stash[failed_subtests_key] = defaultdict(lambda: 0) + + +@hookimpl(tryfirst=True) +def pytest_report_teststatus( + report: TestReport, + config: Config, +) -> tuple[str, str, str | Mapping[str, bool]] | None: + if report.when != "call": + return None + + quiet = config.get_verbosity(Config.VERBOSITY_SUBTESTS) == 0 + if isinstance(report, SubtestReport): + outcome = report.outcome + description = report._sub_test_description() + + if hasattr(report, "wasxfail"): + if quiet: + return "", "", "" + elif outcome == "skipped": + category = "xfailed" + short = "y" # x letter is used for regular xfail, y for subtest xfail + status = "SUBXFAIL" + # outcome == "passed" in an xfail is only possible via a @pytest.mark.xfail mark, which + # is not applicable to a subtest, which only handles pytest.xfail(). + else: # pragma: no cover + # This should not normally happen, unless some plugin is setting wasxfail without + # the correct outcome. Pytest expects the call outcome to be either skipped or + # passed in case of xfail. + # Let's pass this report to the next hook. + return None + return category, short, f"{status}{description}" + + if report.failed: + return outcome, "u", f"SUBFAILED{description}" + else: + if report.passed: + if quiet: + return "", "", "" + else: + return f"subtests {outcome}", "u", f"SUBPASSED{description}" + elif report.skipped: + if quiet: + return "", "", "" + else: + return outcome, "-", f"SUBSKIPPED{description}" + + else: + failed_subtests_count = config.stash[failed_subtests_key][report.nodeid] + # Top-level test, fail if it contains failed subtests and it has passed. + if report.passed and failed_subtests_count > 0: + report.outcome = "failed" + suffix = "s" if failed_subtests_count > 1 else "" + report.longrepr = f"contains {failed_subtests_count} failed subtest{suffix}" + + return None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/terminal.py b/Backend/venv/lib/python3.12/site-packages/_pytest/terminal.py new file mode 100644 index 00000000..4517b05b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/terminal.py @@ -0,0 +1,1770 @@ +# mypy: allow-untyped-defs +"""Terminal reporting of the full testing process. + +This is a good source for looking at the various reporting hooks. +""" + +from __future__ import annotations + +import argparse +from collections import Counter +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import datetime +from functools import partial +import inspect +import os +from pathlib import Path +import platform +import sys +import textwrap +from typing import Any +from typing import ClassVar +from typing import final +from typing import Literal +from typing import NamedTuple +from typing import TextIO +from typing import TYPE_CHECKING +import warnings + +import pluggy + +from _pytest import compat +from _pytest import nodes +from _pytest import timing +from _pytest._code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._io import TerminalWriter +from _pytest._io.wcwidth import wcswidth +import _pytest._version +from _pytest.compat import running_on_ci +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.reports import BaseReport +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +if TYPE_CHECKING: + from _pytest.main import Session + + +REPORT_COLLECTING_RESOLUTION = 0.5 + +KNOWN_TYPES = ( + "failed", + "passed", + "skipped", + "deselected", + "xfailed", + "xpassed", + "warnings", + "error", + "subtests passed", + "subtests failed", + "subtests skipped", +) + +_REPORTCHARS_DEFAULT = "fE" + + +class MoreQuietAction(argparse.Action): + """A modified copy of the argparse count action which counts down and updates + the legacy quiet attribute at the same time. + + Used to unify verbosity handling. + """ + + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: object = None, + required: bool = False, + help: str | None = None, + ) -> None: + super().__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + default=default, + required=required, + help=help, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[object] | None, + option_string: str | None = None, + ) -> None: + new_count = getattr(namespace, self.dest, 0) - 1 + setattr(namespace, self.dest, new_count) + # todo Deprecate config.quiet + namespace.quiet = getattr(namespace, "quiet", 0) + 1 + + +class TestShortLogReport(NamedTuple): + """Used to store the test status result category, shortletter and verbose word. + For example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :ivar category: + The class of result, for example ``“passed”``, ``“skipped”``, ``“error”``, or the empty string. + + :ivar letter: + The short letter shown as testing progresses, for example ``"."``, ``"s"``, ``"E"``, or the empty string. + + :ivar word: + Verbose word is shown as testing progresses in verbose mode, for example ``"PASSED"``, ``"SKIPPED"``, + ``"ERROR"``, or the empty string. + """ + + category: str + letter: str + word: str | tuple[str, Mapping[str, bool]] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group._addoption( # private to use reserved lower-case short option + "-v", + "--verbose", + action="count", + default=0, + dest="verbose", + help="Increase verbosity", + ) + group.addoption( + "--no-header", + action="store_true", + default=False, + dest="no_header", + help="Disable header", + ) + group.addoption( + "--no-summary", + action="store_true", + default=False, + dest="no_summary", + help="Disable summary", + ) + group.addoption( + "--no-fold-skipped", + action="store_false", + dest="fold_skipped", + default=True, + help="Do not fold skipped tests in short summary.", + ) + group.addoption( + "--force-short-summary", + action="store_true", + dest="force_short_summary", + default=False, + help="Force condensed summary output regardless of verbosity level.", + ) + group._addoption( # private to use reserved lower-case short option + "-q", + "--quiet", + action=MoreQuietAction, + default=0, + dest="verbose", + help="Decrease verbosity", + ) + group.addoption( + "--verbosity", + dest="verbose", + type=int, + default=0, + help="Set verbosity. Default: 0.", + ) + group._addoption( # private to use reserved lower-case short option + "-r", + action="store", + dest="reportchars", + default=_REPORTCHARS_DEFAULT, + metavar="chars", + help="Show extra test summary info as specified by chars: (f)ailed, " + "(E)rror, (s)kipped, (x)failed, (X)passed, " + "(p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. " + "(w)arnings are enabled by default (see --disable-warnings), " + "'N' can be used to reset the list. (default: 'fE').", + ) + group.addoption( + "--disable-warnings", + "--disable-pytest-warnings", + default=False, + dest="disable_warnings", + action="store_true", + help="Disable warnings summary", + ) + group._addoption( # private to use reserved lower-case short option + "-l", + "--showlocals", + action="store_true", + dest="showlocals", + default=False, + help="Show locals in tracebacks (disabled by default)", + ) + group.addoption( + "--no-showlocals", + action="store_false", + dest="showlocals", + help="Hide locals in tracebacks (negate --showlocals passed through addopts)", + ) + group.addoption( + "--tb", + metavar="style", + action="store", + dest="tbstyle", + default="auto", + choices=["auto", "long", "short", "no", "line", "native"], + help="Traceback print mode (auto/long/short/line/native/no)", + ) + group.addoption( + "--xfail-tb", + action="store_true", + dest="xfail_tb", + default=False, + help="Show tracebacks for xfail (as long as --tb != no)", + ) + group.addoption( + "--show-capture", + action="store", + dest="showcapture", + choices=["no", "stdout", "stderr", "log", "all"], + default="all", + help="Controls how captured stdout/stderr/log is shown on failed tests. " + "Default: all.", + ) + group.addoption( + "--fulltrace", + "--full-trace", + action="store_true", + default=False, + help="Don't cut any tracebacks (default is to cut)", + ) + group.addoption( + "--color", + metavar="color", + action="store", + dest="color", + default="auto", + choices=["yes", "no", "auto"], + help="Color terminal output (yes/no/auto)", + ) + group.addoption( + "--code-highlight", + default="yes", + choices=["yes", "no"], + help="Whether code should be highlighted (only if --color is also enabled). " + "Default: yes.", + ) + + parser.addini( + "console_output_style", + help='Console output: "classic", or with additional progress information ' + '("progress" (percentage) | "count" | "progress-even-when-capture-no" (forces ' + "progress even when capture=no)", + default="progress", + ) + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_TEST_CASES, + help=( + "Specify a verbosity level for test case execution, overriding the main level. " + "Higher levels will provide more detailed information about each test case executed." + ), + ) + + +def pytest_configure(config: Config) -> None: + reporter = TerminalReporter(config, sys.stdout) + config.pluginmanager.register(reporter, "terminalreporter") + if config.option.debug or config.option.traceconfig: + + def mywriter(tags, args): + msg = " ".join(map(str, args)) + reporter.write_line("[traceconfig] " + msg) + + config.trace.root.setprocessor("pytest:config", mywriter) + + if reporter.isatty(): + # Some terminals interpret OSC 9;4 as desktop notification, + # skip on those we know (#13896). + should_skip_terminal_progress = ( + # iTerm2 (reported on version 3.6.5). + "ITERM_SESSION_ID" in os.environ + ) + if not should_skip_terminal_progress: + plugin = TerminalProgressPlugin(reporter) + config.pluginmanager.register(plugin, "terminalprogress") + + +def getreportopt(config: Config) -> str: + reportchars: str = config.option.reportchars + + old_aliases = {"F", "S"} + reportopts = "" + for char in reportchars: + if char in old_aliases: + char = char.lower() + if char == "a": + reportopts = "sxXEf" + elif char == "A": + reportopts = "PpsxXEf" + elif char == "N": + reportopts = "" + elif char not in reportopts: + reportopts += char + + if not config.option.disable_warnings and "w" not in reportopts: + reportopts = "w" + reportopts + elif config.option.disable_warnings and "w" in reportopts: + reportopts = reportopts.replace("w", "") + + return reportopts + + +@hookimpl(trylast=True) # after _pytest.runner +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str]: + letter = "F" + if report.passed: + letter = "." + elif report.skipped: + letter = "s" + + outcome: str = report.outcome + if report.when in ("collect", "setup", "teardown") and outcome == "failed": + outcome = "error" + letter = "E" + + return outcome, letter, outcome.upper() + + +@dataclasses.dataclass +class WarningReport: + """Simple structure to hold warnings information captured by ``pytest_warning_recorded``. + + :ivar str message: + User friendly message about the warning. + :ivar str|None nodeid: + nodeid that generated the warning (see ``get_location``). + :ivar tuple fslocation: + File system location of the source of the warning (see ``get_location``). + """ + + message: str + nodeid: str | None = None + fslocation: tuple[str, int] | None = None + + count_towards_summary: ClassVar = True + + def get_location(self, config: Config) -> str | None: + """Return the more user-friendly information about the location of a warning, or None.""" + if self.nodeid: + return self.nodeid + if self.fslocation: + filename, linenum = self.fslocation + relpath = bestrelpath(config.invocation_params.dir, absolutepath(filename)) + return f"{relpath}:{linenum}" + return None + + +@final +class TerminalReporter: + def __init__(self, config: Config, file: TextIO | None = None) -> None: + import _pytest.config + + self.config = config + self._numcollected = 0 + self._session: Session | None = None + self._showfspath: bool | None = None + + self.stats: dict[str, list[Any]] = {} + self._main_color: str | None = None + self._known_types: list[str] | None = None + self.startpath = config.invocation_params.dir + if file is None: + file = sys.stdout + self._tw = _pytest.config.create_terminal_writer(config, file) + self._screen_width = self._tw.fullwidth + self.currentfspath: None | Path | str | int = None + self.reportchars = getreportopt(config) + self.foldskipped = config.option.fold_skipped + self.hasmarkup = self._tw.hasmarkup + # isatty should be a method but was wrongly implemented as a boolean. + # We use CallableBool here to support both. + self.isatty = compat.CallableBool(file.isatty()) + self._progress_nodeids_reported: set[str] = set() + self._timing_nodeids_reported: set[str] = set() + self._show_progress_info = self._determine_show_progress_info() + self._collect_report_last_write = timing.Instant() + self._already_displayed_warnings: int | None = None + self._keyboardinterrupt_memo: ExceptionRepr | None = None + + def _determine_show_progress_info( + self, + ) -> Literal["progress", "count", "times", False]: + """Return whether we should display progress information based on the current config.""" + # do not show progress if we are not capturing output (#3038) unless explicitly + # overridden by progress-even-when-capture-no + if ( + self.config.getoption("capture", "no") == "no" + and self.config.getini("console_output_style") + != "progress-even-when-capture-no" + ): + return False + # do not show progress if we are showing fixture setup/teardown + if self.config.getoption("setupshow", False): + return False + cfg: str = self.config.getini("console_output_style") + if cfg in {"progress", "progress-even-when-capture-no"}: + return "progress" + elif cfg == "count": + return "count" + elif cfg == "times": + return "times" + else: + return False + + @property + def verbosity(self) -> int: + verbosity: int = self.config.option.verbose + return verbosity + + @property + def showheader(self) -> bool: + return self.verbosity >= 0 + + @property + def no_header(self) -> bool: + return bool(self.config.option.no_header) + + @property + def no_summary(self) -> bool: + return bool(self.config.option.no_summary) + + @property + def showfspath(self) -> bool: + if self._showfspath is None: + return self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) >= 0 + return self._showfspath + + @showfspath.setter + def showfspath(self, value: bool | None) -> None: + self._showfspath = value + + @property + def showlongtestinfo(self) -> bool: + return self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) > 0 + + @property + def reported_progress(self) -> int: + """The amount of items reported in the progress so far. + + :meta private: + """ + return len(self._progress_nodeids_reported) + + def hasopt(self, char: str) -> bool: + char = {"xfailed": "x", "skipped": "s"}.get(char, char) + return char in self.reportchars + + def write_fspath_result(self, nodeid: str, res: str, **markup: bool) -> None: + fspath = self.config.rootpath / nodeid.split("::")[0] + if self.currentfspath is None or fspath != self.currentfspath: + if self.currentfspath is not None and self._show_progress_info: + self._write_progress_information_filling_space() + self.currentfspath = fspath + relfspath = bestrelpath(self.startpath, fspath) + self._tw.line() + self._tw.write(relfspath + " ") + self._tw.write(res, flush=True, **markup) + + def write_ensure_prefix(self, prefix: str, extra: str = "", **kwargs) -> None: + if self.currentfspath != prefix: + self._tw.line() + self.currentfspath = prefix + self._tw.write(prefix) + if extra: + self._tw.write(extra, **kwargs) + self.currentfspath = -2 + + def ensure_newline(self) -> None: + if self.currentfspath: + self._tw.line() + self.currentfspath = None + + def wrap_write( + self, + content: str, + *, + flush: bool = False, + margin: int = 8, + line_sep: str = "\n", + **markup: bool, + ) -> None: + """Wrap message with margin for progress info.""" + width_of_current_line = self._tw.width_of_current_line + wrapped = line_sep.join( + textwrap.wrap( + " " * width_of_current_line + content, + width=self._screen_width - margin, + drop_whitespace=True, + replace_whitespace=False, + ), + ) + wrapped = wrapped[width_of_current_line:] + self._tw.write(wrapped, flush=flush, **markup) + + def write(self, content: str, *, flush: bool = False, **markup: bool) -> None: + self._tw.write(content, flush=flush, **markup) + + def write_raw(self, content: str, *, flush: bool = False) -> None: + self._tw.write_raw(content, flush=flush) + + def flush(self) -> None: + self._tw.flush() + + def write_line(self, line: str | bytes, **markup: bool) -> None: + if not isinstance(line, str): + line = str(line, errors="replace") + self.ensure_newline() + self._tw.line(line, **markup) + + def rewrite(self, line: str, **markup: bool) -> None: + """Rewinds the terminal cursor to the beginning and writes the given line. + + :param erase: + If True, will also add spaces until the full terminal width to ensure + previous lines are properly erased. + + The rest of the keyword arguments are markup instructions. + """ + erase = markup.pop("erase", False) + if erase: + fill_count = self._tw.fullwidth - len(line) - 1 + fill = " " * fill_count + else: + fill = "" + line = str(line) + self._tw.write("\r" + line + fill, **markup) + + def write_sep( + self, + sep: str, + title: str | None = None, + fullwidth: int | None = None, + **markup: bool, + ) -> None: + self.ensure_newline() + self._tw.sep(sep, title, fullwidth, **markup) + + def section(self, title: str, sep: str = "=", **kw: bool) -> None: + self._tw.sep(sep, title, **kw) + + def line(self, msg: str, **kw: bool) -> None: + self._tw.line(msg, **kw) + + def _add_stats(self, category: str, items: Sequence[Any]) -> None: + set_main_color = category not in self.stats + self.stats.setdefault(category, []).extend(items) + if set_main_color: + self._set_main_color() + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> bool: + for line in str(excrepr).split("\n"): + self.write_line("INTERNALERROR> " + line) + return True + + def pytest_warning_recorded( + self, + warning_message: warnings.WarningMessage, + nodeid: str, + ) -> None: + from _pytest.warnings import warning_record_to_str + + fslocation = warning_message.filename, warning_message.lineno + message = warning_record_to_str(warning_message) + + warning_report = WarningReport( + fslocation=fslocation, message=message, nodeid=nodeid + ) + self._add_stats("warnings", [warning_report]) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None: + if self.config.option.traceconfig: + msg = f"PLUGIN registered: {plugin}" + # XXX This event may happen during setup/teardown time + # which unfortunately captures our output here + # which garbles our output if we use self.write_line. + self.write_line(msg) + + def pytest_deselected(self, items: Sequence[Item]) -> None: + self._add_stats("deselected", items) + + def pytest_runtest_logstart( + self, nodeid: str, location: tuple[str, int | None, str] + ) -> None: + fspath, lineno, domain = location + # Ensure that the path is printed before the + # 1st test of a module starts running. + if self.showlongtestinfo: + line = self._locationline(nodeid, fspath, lineno, domain) + self.write_ensure_prefix(line, "") + self.flush() + elif self.showfspath: + self.write_fspath_result(nodeid, "") + self.flush() + + def pytest_runtest_logreport(self, report: TestReport) -> None: + self._tests_ran = True + rep = report + + res = TestShortLogReport( + *self.config.hook.pytest_report_teststatus(report=rep, config=self.config) + ) + category, letter, word = res.category, res.letter, res.word + if not isinstance(word, tuple): + markup = None + else: + word, markup = word + self._add_stats(category, [rep]) + if not letter and not word: + # Probably passed setup/teardown. + return + if markup is None: + was_xfail = hasattr(report, "wasxfail") + if rep.passed and not was_xfail: + markup = {"green": True} + elif rep.passed and was_xfail: + markup = {"yellow": True} + elif rep.failed: + markup = {"red": True} + elif rep.skipped: + markup = {"yellow": True} + else: + markup = {} + self._progress_nodeids_reported.add(rep.nodeid) + if self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) <= 0: + self._tw.write(letter, **markup) + # When running in xdist, the logreport and logfinish of multiple + # items are interspersed, e.g. `logreport`, `logreport`, + # `logfinish`, `logfinish`. To avoid the "past edge" calculation + # from getting confused and overflowing (#7166), do the past edge + # printing here and not in logfinish, except for the 100% which + # should only be printed after all teardowns are finished. + if self._show_progress_info and not self._is_last_item: + self._write_progress_information_if_past_edge() + else: + line = self._locationline(rep.nodeid, *rep.location) + running_xdist = hasattr(rep, "node") + if not running_xdist: + self.write_ensure_prefix(line, word, **markup) + if rep.skipped or hasattr(report, "wasxfail"): + reason = _get_raw_skip_reason(rep) + if self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) < 2: + available_width = ( + (self._tw.fullwidth - self._tw.width_of_current_line) + - len(" [100%]") + - 1 + ) + formatted_reason = _format_trimmed( + " ({})", reason, available_width + ) + else: + formatted_reason = f" ({reason})" + + if reason and formatted_reason is not None: + self.wrap_write(formatted_reason) + if self._show_progress_info: + self._write_progress_information_filling_space() + else: + self.ensure_newline() + self._tw.write(f"[{rep.node.gateway.id}]") + if self._show_progress_info: + self._tw.write( + self._get_progress_information_message() + " ", cyan=True + ) + else: + self._tw.write(" ") + self._tw.write(word, **markup) + self._tw.write(" " + line) + self.currentfspath = -2 + self.flush() + + @property + def _is_last_item(self) -> bool: + assert self._session is not None + return self.reported_progress == self._session.testscollected + + @hookimpl(wrapper=True) + def pytest_runtestloop(self) -> Generator[None, object, object]: + result = yield + + # Write the final/100% progress -- deferred until the loop is complete. + if ( + self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) <= 0 + and self._show_progress_info + and self.reported_progress + ): + self._write_progress_information_filling_space() + + return result + + def _get_progress_information_message(self) -> str: + assert self._session + collected = self._session.testscollected + if self._show_progress_info == "count": + if collected: + progress = self.reported_progress + counter_format = f"{{:{len(str(collected))}d}}" + format_string = f" [{counter_format}/{{}}]" + return format_string.format(progress, collected) + return f" [ {collected} / {collected} ]" + if self._show_progress_info == "times": + if not collected: + return "" + all_reports = ( + self._get_reports_to_display("passed") + + self._get_reports_to_display("xpassed") + + self._get_reports_to_display("failed") + + self._get_reports_to_display("xfailed") + + self._get_reports_to_display("skipped") + + self._get_reports_to_display("error") + + self._get_reports_to_display("") + ) + current_location = all_reports[-1].location[0] + not_reported = [ + r for r in all_reports if r.nodeid not in self._timing_nodeids_reported + ] + tests_in_module = sum( + i.location[0] == current_location for i in self._session.items + ) + tests_completed = sum( + r.when == "setup" + for r in not_reported + if r.location[0] == current_location + ) + last_in_module = tests_completed == tests_in_module + if self.showlongtestinfo or last_in_module: + self._timing_nodeids_reported.update(r.nodeid for r in not_reported) + return format_node_duration( + sum(r.duration for r in not_reported if isinstance(r, TestReport)) + ) + return "" + if collected: + return f" [{self.reported_progress * 100 // collected:3d}%]" + return " [100%]" + + def _write_progress_information_if_past_edge(self) -> None: + w = self._width_of_current_line + if self._show_progress_info == "count": + assert self._session + num_tests = self._session.testscollected + progress_length = len(f" [{num_tests}/{num_tests}]") + elif self._show_progress_info == "times": + progress_length = len(" 99h 59m") + else: + progress_length = len(" [100%]") + past_edge = w + progress_length + 1 >= self._screen_width + if past_edge: + main_color, _ = self._get_main_color() + msg = self._get_progress_information_message() + self._tw.write(msg + "\n", **{main_color: True}) + + def _write_progress_information_filling_space(self) -> None: + color, _ = self._get_main_color() + msg = self._get_progress_information_message() + w = self._width_of_current_line + fill = self._tw.fullwidth - w - 1 + self.write(msg.rjust(fill), flush=True, **{color: True}) + + @property + def _width_of_current_line(self) -> int: + """Return the width of the current line.""" + return self._tw.width_of_current_line + + def pytest_collection(self) -> None: + if self.isatty(): + if self.config.option.verbose >= 0: + self.write("collecting ... ", flush=True, bold=True) + elif self.config.option.verbose >= 1: + self.write("collecting ... ", flush=True, bold=True) + + def pytest_collectreport(self, report: CollectReport) -> None: + if report.failed: + self._add_stats("error", [report]) + elif report.skipped: + self._add_stats("skipped", [report]) + items = [x for x in report.result if isinstance(x, Item)] + self._numcollected += len(items) + if self.isatty(): + self.report_collect() + + def report_collect(self, final: bool = False) -> None: + if self.config.option.verbose < 0: + return + + if not final: + # Only write the "collecting" report every `REPORT_COLLECTING_RESOLUTION`. + if ( + self._collect_report_last_write.elapsed().seconds + < REPORT_COLLECTING_RESOLUTION + ): + return + self._collect_report_last_write = timing.Instant() + + errors = len(self.stats.get("error", [])) + skipped = len(self.stats.get("skipped", [])) + deselected = len(self.stats.get("deselected", [])) + selected = self._numcollected - deselected + line = "collected " if final else "collecting " + line += ( + str(self._numcollected) + " item" + ("" if self._numcollected == 1 else "s") + ) + if errors: + line += f" / {errors} error{'s' if errors != 1 else ''}" + if deselected: + line += f" / {deselected} deselected" + if skipped: + line += f" / {skipped} skipped" + if self._numcollected > selected: + line += f" / {selected} selected" + if self.isatty(): + self.rewrite(line, bold=True, erase=True) + if final: + self.write("\n") + else: + self.write_line(line) + + @hookimpl(trylast=True) + def pytest_sessionstart(self, session: Session) -> None: + self._session = session + self._session_start = timing.Instant() + if not self.showheader: + return + self.write_sep("=", "test session starts", bold=True) + verinfo = platform.python_version() + if not self.no_header: + msg = f"platform {sys.platform} -- Python {verinfo}" + pypy_version_info = getattr(sys, "pypy_version_info", None) + if pypy_version_info: + verinfo = ".".join(map(str, pypy_version_info[:3])) + msg += f"[pypy-{verinfo}-{pypy_version_info[3]}]" + msg += f", pytest-{_pytest._version.version}, pluggy-{pluggy.__version__}" + if ( + self.verbosity > 0 + or self.config.option.debug + or getattr(self.config.option, "pastebin", None) + ): + msg += " -- " + str(sys.executable) + self.write_line(msg) + lines = self.config.hook.pytest_report_header( + config=self.config, start_path=self.startpath + ) + self._write_report_lines_from_hooks(lines) + + def _write_report_lines_from_hooks( + self, lines: Sequence[str | Sequence[str]] + ) -> None: + for line_or_lines in reversed(lines): + if isinstance(line_or_lines, str): + self.write_line(line_or_lines) + else: + for line in line_or_lines: + self.write_line(line) + + def pytest_report_header(self, config: Config) -> list[str]: + result = [f"rootdir: {config.rootpath}"] + + if config.inipath: + warning = "" + if config._ignored_config_files: + warning = f" (WARNING: ignoring pytest config in {', '.join(config._ignored_config_files)}!)" + result.append( + "configfile: " + bestrelpath(config.rootpath, config.inipath) + warning + ) + + if config.args_source == Config.ArgsSource.TESTPATHS: + testpaths: list[str] = config.getini("testpaths") + result.append("testpaths: {}".format(", ".join(testpaths))) + + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + result.append( + "plugins: {}".format(", ".join(_plugin_nameversions(plugininfo))) + ) + return result + + def pytest_collection_finish(self, session: Session) -> None: + self.report_collect(True) + + lines = self.config.hook.pytest_report_collectionfinish( + config=self.config, + start_path=self.startpath, + items=session.items, + ) + self._write_report_lines_from_hooks(lines) + + if self.config.getoption("collectonly"): + if session.items: + if self.config.option.verbose > -1: + self._tw.line("") + self._printcollecteditems(session.items) + + failed = self.stats.get("failed") + if failed: + self._tw.sep("!", "collection failures") + for rep in failed: + rep.toterminal(self._tw) + + def _printcollecteditems(self, items: Sequence[Item]) -> None: + test_cases_verbosity = self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) + if test_cases_verbosity < 0: + if test_cases_verbosity < -1: + counts = Counter(item.nodeid.split("::", 1)[0] for item in items) + for name, count in sorted(counts.items()): + self._tw.line(f"{name}: {count}") + else: + for item in items: + self._tw.line(item.nodeid) + return + stack: list[Node] = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[: len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack) :]: + stack.append(col) + indent = (len(stack) - 1) * " " + self._tw.line(f"{indent}{col}") + if test_cases_verbosity >= 1: + obj = getattr(col, "obj", None) + doc = inspect.getdoc(obj) if obj else None + if doc: + for line in doc.splitlines(): + self._tw.line("{}{}".format(indent + " ", line)) + + @hookimpl(wrapper=True) + def pytest_sessionfinish( + self, session: Session, exitstatus: int | ExitCode + ) -> Generator[None]: + result = yield + self._tw.line("") + summary_exit_codes = ( + ExitCode.OK, + ExitCode.TESTS_FAILED, + ExitCode.INTERRUPTED, + ExitCode.USAGE_ERROR, + ExitCode.NO_TESTS_COLLECTED, + ) + if exitstatus in summary_exit_codes and not self.no_summary: + self.config.hook.pytest_terminal_summary( + terminalreporter=self, exitstatus=exitstatus, config=self.config + ) + if session.shouldfail: + self.write_sep("!", str(session.shouldfail), red=True) + if exitstatus == ExitCode.INTERRUPTED: + self._report_keyboardinterrupt() + self._keyboardinterrupt_memo = None + elif session.shouldstop: + self.write_sep("!", str(session.shouldstop), red=True) + self.summary_stats() + return result + + @hookimpl(wrapper=True) + def pytest_terminal_summary(self) -> Generator[None]: + self.summary_errors() + self.summary_failures() + self.summary_xfailures() + self.summary_warnings() + self.summary_passes() + self.summary_xpasses() + try: + return (yield) + finally: + self.short_test_summary() + # Display any extra warnings from teardown here (if any). + self.summary_warnings() + + def pytest_keyboard_interrupt(self, excinfo: ExceptionInfo[BaseException]) -> None: + self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) + + def pytest_unconfigure(self) -> None: + if self._keyboardinterrupt_memo is not None: + self._report_keyboardinterrupt() + + def _report_keyboardinterrupt(self) -> None: + excrepr = self._keyboardinterrupt_memo + assert excrepr is not None + assert excrepr.reprcrash is not None + msg = excrepr.reprcrash.message + self.write_sep("!", msg) + if "KeyboardInterrupt" in msg: + if self.config.option.fulltrace: + excrepr.toterminal(self._tw) + else: + excrepr.reprcrash.toterminal(self._tw) + self._tw.line( + "(to show a full traceback on KeyboardInterrupt use --full-trace)", + yellow=True, + ) + + def _locationline( + self, nodeid: str, fspath: str, lineno: int | None, domain: str + ) -> str: + def mkrel(nodeid: str) -> str: + line = self.config.cwd_relative_nodeid(nodeid) + if domain and line.endswith(domain): + line = line[: -len(domain)] + values = domain.split("[") + values[0] = values[0].replace(".", "::") # don't replace '.' in params + line += "[".join(values) + return line + + # fspath comes from testid which has a "/"-normalized path. + if fspath: + res = mkrel(nodeid) + if self.verbosity >= 2 and nodeid.split("::")[0] != fspath.replace( + "\\", nodes.SEP + ): + res += " <- " + bestrelpath(self.startpath, Path(fspath)) + else: + res = "[location]" + return res + " " + + def _getfailureheadline(self, rep): + head_line = rep.head_line + if head_line: + return head_line + return "test session" # XXX? + + def _getcrashline(self, rep): + try: + return str(rep.longrepr.reprcrash) + except AttributeError: + try: + return str(rep.longrepr)[:50] + except AttributeError: + return "" + + # + # Summaries for sessionfinish. + # + def getreports(self, name: str): + return [x for x in self.stats.get(name, ()) if not hasattr(x, "_pdbshown")] + + def summary_warnings(self) -> None: + if self.hasopt("w"): + all_warnings: list[WarningReport] | None = self.stats.get("warnings") + if not all_warnings: + return + + final = self._already_displayed_warnings is not None + if final: + warning_reports = all_warnings[self._already_displayed_warnings :] + else: + warning_reports = all_warnings + self._already_displayed_warnings = len(warning_reports) + if not warning_reports: + return + + reports_grouped_by_message: dict[str, list[WarningReport]] = {} + for wr in warning_reports: + reports_grouped_by_message.setdefault(wr.message, []).append(wr) + + def collapsed_location_report(reports: list[WarningReport]) -> str: + locations = [] + for w in reports: + location = w.get_location(self.config) + if location: + locations.append(location) + + if len(locations) < 10: + return "\n".join(map(str, locations)) + + counts_by_filename = Counter( + str(loc).split("::", 1)[0] for loc in locations + ) + return "\n".join( + "{}: {} warning{}".format(k, v, "s" if v > 1 else "") + for k, v in counts_by_filename.items() + ) + + title = "warnings summary (final)" if final else "warnings summary" + self.write_sep("=", title, yellow=True, bold=False) + for message, message_reports in reports_grouped_by_message.items(): + maybe_location = collapsed_location_report(message_reports) + if maybe_location: + self._tw.line(maybe_location) + lines = message.splitlines() + indented = "\n".join(" " + x for x in lines) + message = indented.rstrip() + else: + message = message.rstrip() + self._tw.line(message) + self._tw.line() + self._tw.line( + "-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html" + ) + + def summary_passes(self) -> None: + self.summary_passes_combined("passed", "PASSES", "P") + + def summary_xpasses(self) -> None: + self.summary_passes_combined("xpassed", "XPASSES", "X") + + def summary_passes_combined( + self, which_reports: str, sep_title: str, needed_opt: str + ) -> None: + if self.config.option.tbstyle != "no": + if self.hasopt(needed_opt): + reports: list[TestReport] = self.getreports(which_reports) + if not reports: + return + self.write_sep("=", sep_title) + for rep in reports: + if rep.sections: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, green=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def _get_teardown_reports(self, nodeid: str) -> list[TestReport]: + reports = self.getreports("") + return [ + report + for report in reports + if report.when == "teardown" and report.nodeid == nodeid + ] + + def _handle_teardown_sections(self, nodeid: str) -> None: + for report in self._get_teardown_reports(nodeid): + self.print_teardown_sections(report) + + def print_teardown_sections(self, rep: TestReport) -> None: + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + if "teardown" in secname: + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_failures(self) -> None: + style = self.config.option.tbstyle + self.summary_failures_combined("failed", "FAILURES", style=style) + + def summary_xfailures(self) -> None: + show_tb = self.config.option.xfail_tb + style = self.config.option.tbstyle if show_tb else "no" + self.summary_failures_combined("xfailed", "XFAILURES", style=style) + + def summary_failures_combined( + self, + which_reports: str, + sep_title: str, + *, + style: str, + needed_opt: str | None = None, + ) -> None: + if style != "no": + if not needed_opt or self.hasopt(needed_opt): + reports: list[BaseReport] = self.getreports(which_reports) + if not reports: + return + self.write_sep("=", sep_title) + if style == "line": + for rep in reports: + line = self._getcrashline(rep) + self._outrep_summary(rep) + self.write_line(line) + else: + for rep in reports: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def summary_errors(self) -> None: + if self.config.option.tbstyle != "no": + reports: list[BaseReport] = self.getreports("error") + if not reports: + return + self.write_sep("=", "ERRORS") + for rep in self.stats["error"]: + msg = self._getfailureheadline(rep) + if rep.when == "collect": + msg = "ERROR collecting " + msg + else: + msg = f"ERROR at {rep.when} of {msg}" + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + + def _outrep_summary(self, rep: BaseReport) -> None: + rep.toterminal(self._tw) + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_stats(self) -> None: + if self.verbosity < -1: + return + + session_duration = self._session_start.elapsed() + (parts, main_color) = self.build_summary_stats_line() + line_parts = [] + + display_sep = self.verbosity >= 0 + if display_sep: + fullwidth = self._tw.fullwidth + for text, markup in parts: + with_markup = self._tw.markup(text, **markup) + if display_sep: + fullwidth += len(with_markup) - len(text) + line_parts.append(with_markup) + msg = ", ".join(line_parts) + + main_markup = {main_color: True} + duration = f" in {format_session_duration(session_duration.seconds)}" + duration_with_markup = self._tw.markup(duration, **main_markup) + if display_sep: + fullwidth += len(duration_with_markup) - len(duration) + msg += duration_with_markup + + if display_sep: + markup_for_end_sep = self._tw.markup("", **main_markup) + if markup_for_end_sep.endswith("\x1b[0m"): + markup_for_end_sep = markup_for_end_sep[:-4] + fullwidth += len(markup_for_end_sep) + msg += markup_for_end_sep + + if display_sep: + self.write_sep("=", msg, fullwidth=fullwidth, **main_markup) + else: + self.write_line(msg, **main_markup) + + def short_test_summary(self) -> None: + if not self.reportchars: + return + + def show_simple(lines: list[str], *, stat: str) -> None: + failed = self.stats.get(stat, []) + if not failed: + return + config = self.config + for rep in failed: + color = _color_for_type.get(stat, _color_for_type_default) + line = _get_line_with_reprcrash_message( + config, rep, self._tw, {color: True} + ) + lines.append(line) + + def show_xfailed(lines: list[str]) -> None: + xfailed = self.stats.get("xfailed", []) + for rep in xfailed: + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + + lines.append(line) + + def show_xpassed(lines: list[str]) -> None: + xpassed = self.stats.get("xpassed", []) + for rep in xpassed: + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + lines.append(line) + + def show_skipped_folded(lines: list[str]) -> None: + skipped: list[CollectReport] = self.stats.get("skipped", []) + fskips = _folded_skips(self.startpath, skipped) if skipped else [] + if not fskips: + return + verbose_word, verbose_markup = skipped[0]._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + prefix = "Skipped: " + for num, fspath, lineno, reason in fskips: + if reason.startswith(prefix): + reason = reason[len(prefix) :] + if lineno is not None: + lines.append(f"{markup_word} [{num}] {fspath}:{lineno}: {reason}") + else: + lines.append(f"{markup_word} [{num}] {fspath}: {reason}") + + def show_skipped_unfolded(lines: list[str]) -> None: + skipped: list[CollectReport] = self.stats.get("skipped", []) + + for rep in skipped: + assert rep.longrepr is not None + assert isinstance(rep.longrepr, tuple), (rep, rep.longrepr) + assert len(rep.longrepr) == 3, (rep, rep.longrepr) + + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.longrepr[2] + if reason: + line += " - " + str(reason) + lines.append(line) + + def show_skipped(lines: list[str]) -> None: + if self.foldskipped: + show_skipped_folded(lines) + else: + show_skipped_unfolded(lines) + + REPORTCHAR_ACTIONS: Mapping[str, Callable[[list[str]], None]] = { + "x": show_xfailed, + "X": show_xpassed, + "f": partial(show_simple, stat="failed"), + "s": show_skipped, + "p": partial(show_simple, stat="passed"), + "E": partial(show_simple, stat="error"), + } + + lines: list[str] = [] + for char in self.reportchars: + action = REPORTCHAR_ACTIONS.get(char) + if action: # skipping e.g. "P" (passed with output) here. + action(lines) + + if lines: + self.write_sep("=", "short test summary info", cyan=True, bold=True) + for line in lines: + self.write_line(line) + + def _get_main_color(self) -> tuple[str, list[str]]: + if self._main_color is None or self._known_types is None or self._is_last_item: + self._set_main_color() + assert self._main_color + assert self._known_types + return self._main_color, self._known_types + + def _determine_main_color(self, unknown_type_seen: bool) -> str: + stats = self.stats + if "failed" in stats or "error" in stats: + main_color = "red" + elif "warnings" in stats or "xpassed" in stats or unknown_type_seen: + main_color = "yellow" + elif "passed" in stats or not self._is_last_item: + main_color = "green" + else: + main_color = "yellow" + return main_color + + def _set_main_color(self) -> None: + unknown_types: list[str] = [] + for found_type in self.stats: + if found_type: # setup/teardown reports have an empty key, ignore them + if found_type not in KNOWN_TYPES and found_type not in unknown_types: + unknown_types.append(found_type) + self._known_types = list(KNOWN_TYPES) + unknown_types + self._main_color = self._determine_main_color(bool(unknown_types)) + + def build_summary_stats_line(self) -> tuple[list[tuple[str, dict[str, bool]]], str]: + """ + Build the parts used in the last summary stats line. + + The summary stats line is the line shown at the end, "=== 12 passed, 2 errors in Xs===". + + This function builds a list of the "parts" that make up for the text in that line, in + the example above it would be:: + + [ + ("12 passed", {"green": True}), + ("2 errors", {"red": True} + ] + + That last dict for each line is a "markup dictionary", used by TerminalWriter to + color output. + + The final color of the line is also determined by this function, and is the second + element of the returned tuple. + """ + if self.config.getoption("collectonly"): + return self._build_collect_only_summary_stats_line() + else: + return self._build_normal_summary_stats_line() + + def _get_reports_to_display(self, key: str) -> list[Any]: + """Get test/collection reports for the given status key, such as `passed` or `error`.""" + reports = self.stats.get(key, []) + return [x for x in reports if getattr(x, "count_towards_summary", True)] + + def _build_normal_summary_stats_line( + self, + ) -> tuple[list[tuple[str, dict[str, bool]]], str]: + main_color, known_types = self._get_main_color() + parts = [] + + for key in known_types: + reports = self._get_reports_to_display(key) + if reports: + count = len(reports) + color = _color_for_type.get(key, _color_for_type_default) + markup = {color: True, "bold": color == main_color} + parts.append(("%d %s" % pluralize(count, key), markup)) # noqa: UP031 + + if not parts: + parts = [("no tests ran", {_color_for_type_default: True})] + + return parts, main_color + + def _build_collect_only_summary_stats_line( + self, + ) -> tuple[list[tuple[str, dict[str, bool]]], str]: + deselected = len(self._get_reports_to_display("deselected")) + errors = len(self._get_reports_to_display("error")) + + if self._numcollected == 0: + parts = [("no tests collected", {"yellow": True})] + main_color = "yellow" + + elif deselected == 0: + main_color = "green" + collected_output = "%d %s collected" % pluralize(self._numcollected, "test") # noqa: UP031 + parts = [(collected_output, {main_color: True})] + else: + all_tests_were_deselected = self._numcollected == deselected + if all_tests_were_deselected: + main_color = "yellow" + collected_output = f"no tests collected ({deselected} deselected)" + else: + main_color = "green" + selected = self._numcollected - deselected + collected_output = f"{selected}/{self._numcollected} tests collected ({deselected} deselected)" + + parts = [(collected_output, {main_color: True})] + + if errors: + main_color = _color_for_type["error"] + parts += [("%d %s" % pluralize(errors, "error"), {main_color: True})] # noqa: UP031 + + return parts, main_color + + +def _get_node_id_with_markup(tw: TerminalWriter, config: Config, rep: BaseReport): + nodeid = config.cwd_relative_nodeid(rep.nodeid) + path, *parts = nodeid.split("::") + if parts: + parts_markup = tw.markup("::".join(parts), bold=True) + return path + "::" + parts_markup + else: + return path + + +def _format_trimmed(format: str, msg: str, available_width: int) -> str | None: + """Format msg into format, ellipsizing it if doesn't fit in available_width. + + Returns None if even the ellipsis can't fit. + """ + # Only use the first line. + i = msg.find("\n") + if i != -1: + msg = msg[:i] + + ellipsis = "..." + format_width = wcswidth(format.format("")) + if format_width + len(ellipsis) > available_width: + return None + + if format_width + wcswidth(msg) > available_width: + available_width -= len(ellipsis) + msg = msg[:available_width] + while format_width + wcswidth(msg) > available_width: + msg = msg[:-1] + msg += ellipsis + + return format.format(msg) + + +def _get_line_with_reprcrash_message( + config: Config, rep: BaseReport, tw: TerminalWriter, word_markup: dict[str, bool] +) -> str: + """Get summary line for a report, trying to add reprcrash message.""" + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + config, word_markup + ) + word = tw.markup(verbose_word, **verbose_markup) + node = _get_node_id_with_markup(tw, config, rep) + + line = f"{word} {node}" + line_width = wcswidth(line) + + msg: str | None + try: + if isinstance(rep.longrepr, str): + msg = rep.longrepr + else: + # Type ignored intentionally -- possible AttributeError expected. + msg = rep.longrepr.reprcrash.message # type: ignore[union-attr] + except AttributeError: + pass + else: + if ( + running_on_ci() or config.option.verbose >= 2 + ) and not config.option.force_short_summary: + msg = f" - {msg}" + else: + available_width = tw.fullwidth - line_width + msg = _format_trimmed(" - {}", msg, available_width) + if msg is not None: + line += msg + + return line + + +def _folded_skips( + startpath: Path, + skipped: Sequence[CollectReport], +) -> list[tuple[int, str, int | None, str]]: + d: dict[tuple[str, int | None, str], list[CollectReport]] = {} + for event in skipped: + assert event.longrepr is not None + assert isinstance(event.longrepr, tuple), (event, event.longrepr) + assert len(event.longrepr) == 3, (event, event.longrepr) + fspath, lineno, reason = event.longrepr + # For consistency, report all fspaths in relative form. + fspath = bestrelpath(startpath, Path(fspath)) + keywords = getattr(event, "keywords", {}) + # Folding reports with global pytestmark variable. + # This is a workaround, because for now we cannot identify the scope of a skip marker + # TODO: Revisit after marks scope would be fixed. + if ( + event.when == "setup" + and "skip" in keywords + and "pytestmark" not in keywords + ): + key: tuple[str, int | None, str] = (fspath, None, reason) + else: + key = (fspath, lineno, reason) + d.setdefault(key, []).append(event) + values: list[tuple[int, str, int | None, str]] = [] + for key, events in d.items(): + values.append((len(events), *key)) + return values + + +_color_for_type = { + "failed": "red", + "error": "red", + "warnings": "yellow", + "passed": "green", + "subtests passed": "green", + "subtests failed": "red", +} +_color_for_type_default = "yellow" + + +def pluralize(count: int, noun: str) -> tuple[int, str]: + # No need to pluralize words such as `failed` or `passed`. + if noun not in ["error", "warnings", "test"]: + return count, noun + + # The `warnings` key is plural. To avoid API breakage, we keep it that way but + # set it to singular here so we can determine plurality in the same way as we do + # for `error`. + noun = noun.replace("warnings", "warning") + + return count, noun + "s" if count != 1 else noun + + +def _plugin_nameversions(plugininfo) -> list[str]: + values: list[str] = [] + for plugin, dist in plugininfo: + # Gets us name and version! + name = f"{dist.project_name}-{dist.version}" + # Questionable convenience, but it keeps things short. + if name.startswith("pytest-"): + name = name[7:] + # We decided to print python package names they can have more than one plugin. + if name not in values: + values.append(name) + return values + + +def format_session_duration(seconds: float) -> str: + """Format the given seconds in a human readable manner to show in the final summary.""" + if seconds < 60: + return f"{seconds:.2f}s" + else: + dt = datetime.timedelta(seconds=int(seconds)) + return f"{seconds:.2f}s ({dt})" + + +def format_node_duration(seconds: float) -> str: + """Format the given seconds in a human readable manner to show in the test progress.""" + # The formatting is designed to be compact and readable, with at most 7 characters + # for durations below 100 hours. + if seconds < 0.00001: + return f" {seconds * 1000000:.3f}us" + if seconds < 0.0001: + return f" {seconds * 1000000:.2f}us" + if seconds < 0.001: + return f" {seconds * 1000000:.1f}us" + if seconds < 0.01: + return f" {seconds * 1000:.3f}ms" + if seconds < 0.1: + return f" {seconds * 1000:.2f}ms" + if seconds < 1: + return f" {seconds * 1000:.1f}ms" + if seconds < 60: + return f" {seconds:.3f}s" + if seconds < 3600: + return f" {seconds // 60:.0f}m {seconds % 60:.0f}s" + return f" {seconds // 3600:.0f}h {(seconds % 3600) // 60:.0f}m" + + +def _get_raw_skip_reason(report: TestReport) -> str: + """Get the reason string of a skip/xfail/xpass test report. + + The string is just the part given by the user. + """ + if hasattr(report, "wasxfail"): + reason = report.wasxfail + if reason.startswith("reason: "): + reason = reason[len("reason: ") :] + return reason + else: + assert report.skipped + assert isinstance(report.longrepr, tuple) + _, _, reason = report.longrepr + if reason.startswith("Skipped: "): + reason = reason[len("Skipped: ") :] + elif reason == "Skipped": + reason = "" + return reason + + +class TerminalProgressPlugin: + """Terminal progress reporting plugin using OSC 9;4 ANSI sequences. + + Emits OSC 9;4 sequences to indicate test progress to terminal + tabs/windows/etc. + + Not all terminal emulators support this feature. + + Ref: https://conemu.github.io/en/AnsiEscapeCodes.html#ConEmu_specific_OSC + """ + + def __init__(self, tr: TerminalReporter) -> None: + self._tr = tr + self._session: Session | None = None + self._has_failures = False + + def _emit_progress( + self, + state: Literal["remove", "normal", "error", "indeterminate", "paused"], + progress: int | None = None, + ) -> None: + """Emit OSC 9;4 sequence for indicating progress to the terminal. + + :param state: + Progress state to set. + :param progress: + Progress value 0-100. Required for "normal", optional for "error" + and "paused", otherwise ignored. + """ + assert progress is None or 0 <= progress <= 100 + + # OSC 9;4 sequence: ESC ] 9 ; 4 ; state ; progress ST + # ST can be ESC \ or BEL. ESC \ seems better supported. + match state: + case "remove": + sequence = "\x1b]9;4;0;\x1b\\" + case "normal": + assert progress is not None + sequence = f"\x1b]9;4;1;{progress}\x1b\\" + case "error": + if progress is not None: + sequence = f"\x1b]9;4;2;{progress}\x1b\\" + else: + sequence = "\x1b]9;4;2;\x1b\\" + case "indeterminate": + sequence = "\x1b]9;4;3;\x1b\\" + case "paused": + if progress is not None: + sequence = f"\x1b]9;4;4;{progress}\x1b\\" + else: + sequence = "\x1b]9;4;4;\x1b\\" + + self._tr.write_raw(sequence, flush=True) + + @hookimpl + def pytest_sessionstart(self, session: Session) -> None: + self._session = session + # Show indeterminate progress during collection. + self._emit_progress("indeterminate") + + @hookimpl + def pytest_collection_finish(self) -> None: + assert self._session is not None + if self._session.testscollected > 0: + # Switch from indeterminate to 0% progress. + self._emit_progress("normal", 0) + + @hookimpl + def pytest_runtest_logreport(self, report: TestReport) -> None: + if report.failed: + self._has_failures = True + + # Let's consider the "call" phase for progress. + if report.when != "call": + return + + # Calculate and emit progress. + assert self._session is not None + collected = self._session.testscollected + if collected > 0: + reported = self._tr.reported_progress + progress = min(reported * 100 // collected, 100) + self._emit_progress("error" if self._has_failures else "normal", progress) + + @hookimpl + def pytest_sessionfinish(self) -> None: + self._emit_progress("remove") diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/threadexception.py b/Backend/venv/lib/python3.12/site-packages/_pytest/threadexception.py new file mode 100644 index 00000000..eb57783b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/threadexception.py @@ -0,0 +1,152 @@ +from __future__ import annotations + +import collections +from collections.abc import Callable +import functools +import sys +import threading +import traceback +from typing import NamedTuple +from typing import TYPE_CHECKING +import warnings + +from _pytest.config import Config +from _pytest.nodes import Item +from _pytest.stash import StashKey +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +if TYPE_CHECKING: + pass + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + + +class ThreadExceptionMeta(NamedTuple): + msg: str + cause_msg: str + exc_value: BaseException | None + + +thread_exceptions: StashKey[collections.deque[ThreadExceptionMeta | BaseException]] = ( + StashKey() +) + + +def collect_thread_exception(config: Config) -> None: + pop_thread_exception = config.stash[thread_exceptions].pop + errors: list[pytest.PytestUnhandledThreadExceptionWarning | RuntimeError] = [] + meta = None + hook_error = None + try: + while True: + try: + meta = pop_thread_exception() + except IndexError: + break + + if isinstance(meta, BaseException): + hook_error = RuntimeError("Failed to process thread exception") + hook_error.__cause__ = meta + errors.append(hook_error) + continue + + msg = meta.msg + try: + warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg)) + except pytest.PytestUnhandledThreadExceptionWarning as e: + # This except happens when the warning is treated as an error (e.g. `-Werror`). + if meta.exc_value is not None: + # Exceptions have a better way to show the traceback, but + # warnings do not, so hide the traceback from the msg and + # set the cause so the traceback shows up in the right place. + e.args = (meta.cause_msg,) + e.__cause__ = meta.exc_value + errors.append(e) + + if len(errors) == 1: + raise errors[0] + if errors: + raise ExceptionGroup("multiple thread exception warnings", errors) + finally: + del errors, meta, hook_error + + +def cleanup( + *, config: Config, prev_hook: Callable[[threading.ExceptHookArgs], object] +) -> None: + try: + try: + # We don't join threads here, so exceptions raised from any + # threads still running by the time _threading_atexits joins them + # do not get captured (see #13027). + collect_thread_exception(config) + finally: + threading.excepthook = prev_hook + finally: + del config.stash[thread_exceptions] + + +def thread_exception_hook( + args: threading.ExceptHookArgs, + /, + *, + append: Callable[[ThreadExceptionMeta | BaseException], object], +) -> None: + try: + # we need to compute these strings here as they might change after + # the excepthook finishes and before the metadata object is + # collected by a pytest hook + thread_name = "" if args.thread is None else args.thread.name + summary = f"Exception in thread {thread_name}" + traceback_message = "\n\n" + "".join( + traceback.format_exception( + args.exc_type, + args.exc_value, + args.exc_traceback, + ) + ) + tracemalloc_tb = "\n" + tracemalloc_message(args.thread) + msg = summary + traceback_message + tracemalloc_tb + cause_msg = summary + tracemalloc_tb + + append( + ThreadExceptionMeta( + # Compute these strings here as they might change later + msg=msg, + cause_msg=cause_msg, + exc_value=args.exc_value, + ) + ) + except BaseException as e: + append(e) + # Raising this will cause the exception to be logged twice, once in our + # collect_thread_exception and once by sys.excepthook + # which is fine - this should never happen anyway and if it does + # it should probably be reported as a pytest bug. + raise + + +def pytest_configure(config: Config) -> None: + prev_hook = threading.excepthook + deque: collections.deque[ThreadExceptionMeta | BaseException] = collections.deque() + config.stash[thread_exceptions] = deque + config.add_cleanup(functools.partial(cleanup, config=config, prev_hook=prev_hook)) + threading.excepthook = functools.partial(thread_exception_hook, append=deque.append) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_setup(item: Item) -> None: + collect_thread_exception(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_call(item: Item) -> None: + collect_thread_exception(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_teardown(item: Item) -> None: + collect_thread_exception(item.config) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/timing.py b/Backend/venv/lib/python3.12/site-packages/_pytest/timing.py new file mode 100644 index 00000000..51c3db23 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/timing.py @@ -0,0 +1,95 @@ +"""Indirection for time functions. + +We intentionally grab some "time" functions internally to avoid tests mocking "time" to affect +pytest runtime information (issue #185). + +Fixture "mock_timing" also interacts with this module for pytest's own tests. +""" + +from __future__ import annotations + +import dataclasses +from datetime import datetime +from datetime import timezone +from time import perf_counter +from time import sleep +from time import time +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from pytest import MonkeyPatch + + +@dataclasses.dataclass(frozen=True) +class Instant: + """ + Represents an instant in time, used to both get the timestamp value and to measure + the duration of a time span. + + Inspired by Rust's `std::time::Instant`. + """ + + # Creation time of this instant, using time.time(), to measure actual time. + # Note: using a `lambda` to correctly get the mocked time via `MockTiming`. + time: float = dataclasses.field(default_factory=lambda: time(), init=False) + + # Performance counter tick of the instant, used to measure precise elapsed time. + # Note: using a `lambda` to correctly get the mocked time via `MockTiming`. + perf_count: float = dataclasses.field( + default_factory=lambda: perf_counter(), init=False + ) + + def elapsed(self) -> Duration: + """Measure the duration since `Instant` was created.""" + return Duration(start=self, stop=Instant()) + + def as_utc(self) -> datetime: + """Instant as UTC datetime.""" + return datetime.fromtimestamp(self.time, timezone.utc) + + +@dataclasses.dataclass(frozen=True) +class Duration: + """A span of time as measured by `Instant.elapsed()`.""" + + start: Instant + stop: Instant + + @property + def seconds(self) -> float: + """Elapsed time of the duration in seconds, measured using a performance counter for precise timing.""" + return self.stop.perf_count - self.start.perf_count + + +@dataclasses.dataclass +class MockTiming: + """Mocks _pytest.timing with a known object that can be used to control timing in tests + deterministically. + + pytest itself should always use functions from `_pytest.timing` instead of `time` directly. + + This then allows us more control over time during testing, if testing code also + uses `_pytest.timing` functions. + + Time is static, and only advances through `sleep` calls, thus tests might sleep over large + numbers and obtain accurate time() calls at the end, making tests reliable and instant.""" + + _current_time: float = datetime(2020, 5, 22, 14, 20, 50).timestamp() + + def sleep(self, seconds: float) -> None: + self._current_time += seconds + + def time(self) -> float: + return self._current_time + + def patch(self, monkeypatch: MonkeyPatch) -> None: + # pylint: disable-next=import-self + from _pytest import timing # noqa: PLW0406 + + monkeypatch.setattr(timing, "sleep", self.sleep) + monkeypatch.setattr(timing, "time", self.time) + monkeypatch.setattr(timing, "perf_counter", self.time) + + +__all__ = ["perf_counter", "sleep", "time"] diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/tmpdir.py b/Backend/venv/lib/python3.12/site-packages/_pytest/tmpdir.py new file mode 100644 index 00000000..dcd5784f --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/tmpdir.py @@ -0,0 +1,312 @@ +# mypy: allow-untyped-defs +"""Support for providing temporary directories to test functions.""" + +from __future__ import annotations + +from collections.abc import Generator +import dataclasses +import os +from pathlib import Path +import re +from shutil import rmtree +import tempfile +from typing import Any +from typing import final +from typing import Literal + +from .pathlib import cleanup_dead_symlinks +from .pathlib import LOCK_TIMEOUT +from .pathlib import make_numbered_dir +from .pathlib import make_numbered_dir_with_cleanup +from .pathlib import rm_rf +from _pytest.compat import get_user_id +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Item +from _pytest.reports import TestReport +from _pytest.stash import StashKey + + +tmppath_result_key = StashKey[dict[str, bool]]() +RetentionType = Literal["all", "failed", "none"] + + +@final +@dataclasses.dataclass +class TempPathFactory: + """Factory for temporary directories under the common base temp directory, + as discussed at :ref:`temporary directory location and retention`. + """ + + _given_basetemp: Path | None + # pluggy TagTracerSub, not currently exposed, so Any. + _trace: Any + _basetemp: Path | None + _retention_count: int + _retention_policy: RetentionType + + def __init__( + self, + given_basetemp: Path | None, + retention_count: int, + retention_policy: RetentionType, + trace, + basetemp: Path | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + if given_basetemp is None: + self._given_basetemp = None + else: + # Use os.path.abspath() to get absolute path instead of resolve() as it + # does not work the same in all platforms (see #4427). + # Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012). + self._given_basetemp = Path(os.path.abspath(str(given_basetemp))) + self._trace = trace + self._retention_count = retention_count + self._retention_policy = retention_policy + self._basetemp = basetemp + + @classmethod + def from_config( + cls, + config: Config, + *, + _ispytest: bool = False, + ) -> TempPathFactory: + """Create a factory according to pytest configuration. + + :meta private: + """ + check_ispytest(_ispytest) + count = int(config.getini("tmp_path_retention_count")) + if count < 0: + raise ValueError( + f"tmp_path_retention_count must be >= 0. Current input: {count}." + ) + + policy = config.getini("tmp_path_retention_policy") + if policy not in ("all", "failed", "none"): + raise ValueError( + f"tmp_path_retention_policy must be either all, failed, none. Current input: {policy}." + ) + + return cls( + given_basetemp=config.option.basetemp, + trace=config.trace.get("tmpdir"), + retention_count=count, + retention_policy=policy, + _ispytest=True, + ) + + def _ensure_relative_to_basetemp(self, basename: str) -> str: + basename = os.path.normpath(basename) + if (self.getbasetemp() / basename).resolve().parent != self.getbasetemp(): + raise ValueError(f"{basename} is not a normalized and relative path") + return basename + + def mktemp(self, basename: str, numbered: bool = True) -> Path: + """Create a new temporary directory managed by the factory. + + :param basename: + Directory base name, must be a relative path. + + :param numbered: + If ``True``, ensure the directory is unique by adding a numbered + suffix greater than any existing one: ``basename="foo-"`` and ``numbered=True`` + means that this function will create directories named ``"foo-0"``, + ``"foo-1"``, ``"foo-2"`` and so on. + + :returns: + The path to the new directory. + """ + basename = self._ensure_relative_to_basetemp(basename) + if not numbered: + p = self.getbasetemp().joinpath(basename) + p.mkdir(mode=0o700) + else: + p = make_numbered_dir(root=self.getbasetemp(), prefix=basename, mode=0o700) + self._trace("mktemp", p) + return p + + def getbasetemp(self) -> Path: + """Return the base temporary directory, creating it if needed. + + :returns: + The base temporary directory. + """ + if self._basetemp is not None: + return self._basetemp + + if self._given_basetemp is not None: + basetemp = self._given_basetemp + if basetemp.exists(): + rm_rf(basetemp) + basetemp.mkdir(mode=0o700) + basetemp = basetemp.resolve() + else: + from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT") + temproot = Path(from_env or tempfile.gettempdir()).resolve() + user = get_user() or "unknown" + # use a sub-directory in the temproot to speed-up + # make_numbered_dir() call + rootdir = temproot.joinpath(f"pytest-of-{user}") + try: + rootdir.mkdir(mode=0o700, exist_ok=True) + except OSError: + # getuser() likely returned illegal characters for the platform, use unknown back off mechanism + rootdir = temproot.joinpath("pytest-of-unknown") + rootdir.mkdir(mode=0o700, exist_ok=True) + # Because we use exist_ok=True with a predictable name, make sure + # we are the owners, to prevent any funny business (on unix, where + # temproot is usually shared). + # Also, to keep things private, fixup any world-readable temp + # rootdir's permissions. Historically 0o755 was used, so we can't + # just error out on this, at least for a while. + uid = get_user_id() + if uid is not None: + rootdir_stat = rootdir.stat() + if rootdir_stat.st_uid != uid: + raise OSError( + f"The temporary directory {rootdir} is not owned by the current user. " + "Fix this and try again." + ) + if (rootdir_stat.st_mode & 0o077) != 0: + os.chmod(rootdir, rootdir_stat.st_mode & ~0o077) + keep = self._retention_count + if self._retention_policy == "none": + keep = 0 + basetemp = make_numbered_dir_with_cleanup( + prefix="pytest-", + root=rootdir, + keep=keep, + lock_timeout=LOCK_TIMEOUT, + mode=0o700, + ) + assert basetemp is not None, basetemp + self._basetemp = basetemp + self._trace("new basetemp", basetemp) + return basetemp + + +def get_user() -> str | None: + """Return the current user name, or None if getuser() does not work + in the current environment (see #1010).""" + try: + # In some exotic environments, getpass may not be importable. + import getpass + + return getpass.getuser() + except (ImportError, OSError, KeyError): + return None + + +def pytest_configure(config: Config) -> None: + """Create a TempPathFactory and attach it to the config object. + + This is to comply with existing plugins which expect the handler to be + available at pytest_configure time, but ideally should be moved entirely + to the tmp_path_factory session fixture. + """ + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + _tmp_path_factory = TempPathFactory.from_config(config, _ispytest=True) + mp.setattr(config, "_tmp_path_factory", _tmp_path_factory, raising=False) + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "tmp_path_retention_count", + help="How many sessions should we keep the `tmp_path` directories, according to `tmp_path_retention_policy`.", + default=3, + ) + + parser.addini( + "tmp_path_retention_policy", + help="Controls which directories created by the `tmp_path` fixture are kept around, based on test outcome. " + "(all/failed/none)", + default="all", + ) + + +@fixture(scope="session") +def tmp_path_factory(request: FixtureRequest) -> TempPathFactory: + """Return a :class:`pytest.TempPathFactory` instance for the test session.""" + # Set dynamically by pytest_configure() above. + return request.config._tmp_path_factory # type: ignore + + +def _mk_tmp(request: FixtureRequest, factory: TempPathFactory) -> Path: + name = request.node.name + name = re.sub(r"[\W]", "_", name) + MAXVAL = 30 + name = name[:MAXVAL] + return factory.mktemp(name, numbered=True) + + +@fixture +def tmp_path( + request: FixtureRequest, tmp_path_factory: TempPathFactory +) -> Generator[Path]: + """Return a temporary directory (as :class:`pathlib.Path` object) + which is unique to each test function invocation. + The temporary directory is created as a subdirectory + of the base temporary directory, with configurable retention, + as discussed in :ref:`temporary directory location and retention`. + """ + path = _mk_tmp(request, tmp_path_factory) + yield path + + # Remove the tmpdir if the policy is "failed" and the test passed. + policy = tmp_path_factory._retention_policy + result_dict = request.node.stash[tmppath_result_key] + + if policy == "failed" and result_dict.get("call", True): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(path, ignore_errors=True) + + del request.node.stash[tmppath_result_key] + + +def pytest_sessionfinish(session, exitstatus: int | ExitCode): + """After each session, remove base directory if all the tests passed, + the policy is "failed", and the basetemp is not specified by a user. + """ + tmp_path_factory: TempPathFactory = session.config._tmp_path_factory + basetemp = tmp_path_factory._basetemp + if basetemp is None: + return + + policy = tmp_path_factory._retention_policy + if ( + exitstatus == 0 + and policy == "failed" + and tmp_path_factory._given_basetemp is None + ): + if basetemp.is_dir(): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(basetemp, ignore_errors=True) + + # Remove dead symlinks. + if basetemp.is_dir(): + cleanup_dead_symlinks(basetemp) + + +@hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_makereport( + item: Item, call +) -> Generator[None, TestReport, TestReport]: + rep = yield + assert rep.when is not None + empty: dict[str, bool] = {} + item.stash.setdefault(tmppath_result_key, empty)[rep.when] = rep.passed + return rep diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/tracemalloc.py b/Backend/venv/lib/python3.12/site-packages/_pytest/tracemalloc.py new file mode 100644 index 00000000..5d0b1985 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/tracemalloc.py @@ -0,0 +1,24 @@ +from __future__ import annotations + + +def tracemalloc_message(source: object) -> str: + if source is None: + return "" + + try: + import tracemalloc + except ImportError: + return "" + + tb = tracemalloc.get_object_traceback(source) + if tb is not None: + formatted_tb = "\n".join(tb.format()) + # Use a leading new line to better separate the (large) output + # from the traceback to the previous warning text. + return f"\nObject allocated at:\n{formatted_tb}" + # No need for a leading new line. + url = "https://docs.pytest.org/en/stable/how-to/capture-warnings.html#resource-warnings" + return ( + "Enable tracemalloc to get traceback where the object was allocated.\n" + f"See {url} for more info." + ) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/unittest.py b/Backend/venv/lib/python3.12/site-packages/_pytest/unittest.py new file mode 100644 index 00000000..7498f1b0 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/unittest.py @@ -0,0 +1,614 @@ +# mypy: allow-untyped-defs +"""Discover and run std-library "unittest" style tests.""" + +from __future__ import annotations + +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from enum import auto +from enum import Enum +import inspect +import sys +import traceback +import types +from typing import Any +from typing import TYPE_CHECKING +from unittest import TestCase + +import _pytest._code +from _pytest._code import ExceptionInfo +from _pytest.compat import assert_never +from _pytest.compat import is_async_function +from _pytest.config import hookimpl +from _pytest.fixtures import FixtureRequest +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import exit +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.python import Class +from _pytest.python import Function +from _pytest.python import Module +from _pytest.runner import CallInfo +from _pytest.runner import check_interactive_exception +from _pytest.subtests import SubtestContext +from _pytest.subtests import SubtestReport + + +if sys.version_info[:2] < (3, 11): + from exceptiongroup import ExceptionGroup + +if TYPE_CHECKING: + from types import TracebackType + import unittest + + import twisted.trial.unittest + + +_SysExcInfoType = ( + tuple[type[BaseException], BaseException, types.TracebackType] + | tuple[None, None, None] +) + + +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> UnitTestCase | None: + try: + # Has unittest been imported? + ut = sys.modules["unittest"] + # Is obj a subclass of unittest.TestCase? + # Type ignored because `ut` is an opaque module. + if not issubclass(obj, ut.TestCase): # type: ignore + return None + except Exception: + return None + # Is obj a concrete class? + # Abstract classes can't be instantiated so no point collecting them. + if inspect.isabstract(obj): + return None + # Yes, so let's collect it. + return UnitTestCase.from_parent(collector, name=name, obj=obj) + + +class UnitTestCase(Class): + # Marker for fixturemanger.getfixtureinfo() + # to declare that our children do not support funcargs. + nofuncargs = True + + def newinstance(self): + # TestCase __init__ takes the method (test) name. The TestCase + # constructor treats the name "runTest" as a special no-op, so it can be + # used when a dummy instance is needed. While unittest.TestCase has a + # default, some subclasses omit the default (#9610), so always supply + # it. + return self.obj("runTest") + + def collect(self) -> Iterable[Item | Collector]: + from unittest import TestLoader + + cls = self.obj + if not getattr(cls, "__test__", True): + return + + skipped = _is_skipped(cls) + if not skipped: + self._register_unittest_setup_method_fixture(cls) + self._register_unittest_setup_class_fixture(cls) + self._register_setup_class_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + loader = TestLoader() + foundsomething = False + for name in loader.getTestCaseNames(self.obj): + x = getattr(self.obj, name) + if not getattr(x, "__test__", True): + continue + yield TestCaseFunction.from_parent(self, name=name) + foundsomething = True + + if not foundsomething: + runtest = getattr(self.obj, "runTest", None) + if runtest is not None: + ut = sys.modules.get("twisted.trial.unittest", None) + if ut is None or runtest != ut.TestCase.runTest: + yield TestCaseFunction.from_parent(self, name="runTest") + + def _register_unittest_setup_class_fixture(self, cls: type) -> None: + """Register an auto-use fixture to invoke setUpClass and + tearDownClass (#517).""" + setup = getattr(cls, "setUpClass", None) + teardown = getattr(cls, "tearDownClass", None) + if setup is None and teardown is None: + return None + cleanup = getattr(cls, "doClassCleanups", lambda: None) + + def process_teardown_exceptions() -> None: + # tearDown_exceptions is a list set in the class containing exc_infos for errors during + # teardown for the class. + exc_infos = getattr(cls, "tearDown_exceptions", None) + if not exc_infos: + return + exceptions = [exc for (_, exc, _) in exc_infos] + # If a single exception, raise it directly as this provides a more readable + # error (hopefully this will improve in #12255). + if len(exceptions) == 1: + raise exceptions[0] + else: + raise ExceptionGroup("Unittest class cleanup errors", exceptions) + + def unittest_setup_class_fixture( + request: FixtureRequest, + ) -> Generator[None]: + cls = request.cls + if _is_skipped(cls): + reason = cls.__unittest_skip_why__ + raise skip.Exception(reason, _use_item_location=True) + if setup is not None: + try: + setup() + # unittest does not call the cleanup function for every BaseException, so we + # follow this here. + except Exception: + cleanup() + process_teardown_exceptions() + raise + yield + try: + if teardown is not None: + teardown() + finally: + cleanup() + process_teardown_exceptions() + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_unittest_setUpClass_fixture_{cls.__qualname__}", + func=unittest_setup_class_fixture, + nodeid=self.nodeid, + scope="class", + autouse=True, + ) + + def _register_unittest_setup_method_fixture(self, cls: type) -> None: + """Register an auto-use fixture to invoke setup_method and + teardown_method (#517).""" + setup = getattr(cls, "setup_method", None) + teardown = getattr(cls, "teardown_method", None) + if setup is None and teardown is None: + return None + + def unittest_setup_method_fixture( + request: FixtureRequest, + ) -> Generator[None]: + self = request.instance + if _is_skipped(self): + reason = self.__unittest_skip_why__ + raise skip.Exception(reason, _use_item_location=True) + if setup is not None: + setup(self, request.function) + yield + if teardown is not None: + teardown(self, request.function) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_unittest_setup_method_fixture_{cls.__qualname__}", + func=unittest_setup_method_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +class TestCaseFunction(Function): + nofuncargs = True + failfast = False + _excinfo: list[_pytest._code.ExceptionInfo[BaseException]] | None = None + + def _getinstance(self): + assert isinstance(self.parent, UnitTestCase) + return self.parent.obj(self.name) + + # Backward compat for pytest-django; can be removed after pytest-django + # updates + some slack. + @property + def _testcase(self): + return self.instance + + def setup(self) -> None: + # A bound method to be called during teardown() if set (see 'runtest()'). + self._explicit_tearDown: Callable[[], None] | None = None + super().setup() + + def teardown(self) -> None: + if self._explicit_tearDown is not None: + self._explicit_tearDown() + self._explicit_tearDown = None + self._obj = None + del self._instance + super().teardown() + + def startTest(self, testcase: unittest.TestCase) -> None: + pass + + def _addexcinfo(self, rawexcinfo: _SysExcInfoType) -> None: + rawexcinfo = _handle_twisted_exc_info(rawexcinfo) + try: + excinfo = _pytest._code.ExceptionInfo[BaseException].from_exc_info( + rawexcinfo # type: ignore[arg-type] + ) + # Invoke the attributes to trigger storing the traceback + # trial causes some issue there. + _ = excinfo.value + _ = excinfo.traceback + except TypeError: + try: + try: + values = traceback.format_exception(*rawexcinfo) + values.insert( + 0, + "NOTE: Incompatible Exception Representation, " + "displaying natively:\n\n", + ) + fail("".join(values), pytrace=False) + except (fail.Exception, KeyboardInterrupt): + raise + except BaseException: + fail( + "ERROR: Unknown Incompatible Exception " + f"representation:\n{rawexcinfo!r}", + pytrace=False, + ) + except KeyboardInterrupt: + raise + except fail.Exception: + excinfo = _pytest._code.ExceptionInfo.from_current() + self.__dict__.setdefault("_excinfo", []).append(excinfo) + + def addError( + self, testcase: unittest.TestCase, rawexcinfo: _SysExcInfoType + ) -> None: + try: + if isinstance(rawexcinfo[1], exit.Exception): + exit(rawexcinfo[1].msg) + except TypeError: + pass + self._addexcinfo(rawexcinfo) + + def addFailure( + self, testcase: unittest.TestCase, rawexcinfo: _SysExcInfoType + ) -> None: + self._addexcinfo(rawexcinfo) + + def addSkip( + self, testcase: unittest.TestCase, reason: str, *, handle_subtests: bool = True + ) -> None: + from unittest.case import _SubTest # type: ignore[attr-defined] + + def add_skip() -> None: + try: + raise skip.Exception(reason, _use_item_location=True) + except skip.Exception: + self._addexcinfo(sys.exc_info()) + + if not handle_subtests: + add_skip() + return + + if isinstance(testcase, _SubTest): + add_skip() + if self._excinfo is not None: + exc_info = self._excinfo[-1] + self.addSubTest(testcase.test_case, testcase, exc_info) + else: + # For python < 3.11: the non-subtest skips have to be added by `add_skip` only after all subtest + # failures are processed by `_addSubTest`: `self.instance._outcome` has no attribute + # `skipped/errors` anymore. + # We also need to check if `self.instance._outcome` is `None` (this happens if the test + # class/method is decorated with `unittest.skip`, see pytest-dev/pytest-subtests#173). + if sys.version_info < (3, 11) and self.instance._outcome is not None: + subtest_errors = [ + x + for x, y in self.instance._outcome.errors + if isinstance(x, _SubTest) and y is not None + ] + if len(subtest_errors) == 0: + add_skip() + else: + add_skip() + + def addExpectedFailure( + self, + testcase: unittest.TestCase, + rawexcinfo: _SysExcInfoType, + reason: str = "", + ) -> None: + try: + xfail(str(reason)) + except xfail.Exception: + self._addexcinfo(sys.exc_info()) + + def addUnexpectedSuccess( + self, + testcase: unittest.TestCase, + reason: twisted.trial.unittest.Todo | None = None, + ) -> None: + msg = "Unexpected success" + if reason: + msg += f": {reason.reason}" + # Preserve unittest behaviour - fail the test. Explicitly not an XPASS. + try: + fail(msg, pytrace=False) + except fail.Exception: + self._addexcinfo(sys.exc_info()) + + def addSuccess(self, testcase: unittest.TestCase) -> None: + pass + + def stopTest(self, testcase: unittest.TestCase) -> None: + pass + + def addDuration(self, testcase: unittest.TestCase, elapsed: float) -> None: + pass + + def runtest(self) -> None: + from _pytest.debugging import maybe_wrap_pytest_function_for_tracing + + testcase = self.instance + assert testcase is not None + + maybe_wrap_pytest_function_for_tracing(self) + + # Let the unittest framework handle async functions. + if is_async_function(self.obj): + testcase(result=self) + else: + # When --pdb is given, we want to postpone calling tearDown() otherwise + # when entering the pdb prompt, tearDown() would have probably cleaned up + # instance variables, which makes it difficult to debug. + # Arguably we could always postpone tearDown(), but this changes the moment where the + # TestCase instance interacts with the results object, so better to only do it + # when absolutely needed. + # We need to consider if the test itself is skipped, or the whole class. + assert isinstance(self.parent, UnitTestCase) + skipped = _is_skipped(self.obj) or _is_skipped(self.parent.obj) + if self.config.getoption("usepdb") and not skipped: + self._explicit_tearDown = testcase.tearDown + setattr(testcase, "tearDown", lambda *args: None) + + # We need to update the actual bound method with self.obj, because + # wrap_pytest_function_for_tracing replaces self.obj by a wrapper. + setattr(testcase, self.name, self.obj) + try: + testcase(result=self) + finally: + delattr(testcase, self.name) + + def _traceback_filter( + self, excinfo: _pytest._code.ExceptionInfo[BaseException] + ) -> _pytest._code.Traceback: + traceback = super()._traceback_filter(excinfo) + ntraceback = traceback.filter( + lambda x: not x.frame.f_globals.get("__unittest"), + ) + if not ntraceback: + ntraceback = traceback + return ntraceback + + def addSubTest( + self, + test_case: Any, + test: TestCase, + exc_info: ExceptionInfo[BaseException] + | tuple[type[BaseException], BaseException, TracebackType] + | None, + ) -> None: + exception_info: ExceptionInfo[BaseException] | None + match exc_info: + case tuple(): + exception_info = ExceptionInfo(exc_info, _ispytest=True) + case ExceptionInfo() | None: + exception_info = exc_info + case unreachable: + assert_never(unreachable) + + call_info = CallInfo[None]( + None, + exception_info, + start=0, + stop=0, + duration=0, + when="call", + _ispytest=True, + ) + msg = test._message if isinstance(test._message, str) else None # type: ignore[attr-defined] + report = self.ihook.pytest_runtest_makereport(item=self, call=call_info) + sub_report = SubtestReport._new( + report, + SubtestContext(msg=msg, kwargs=dict(test.params)), # type: ignore[attr-defined] + captured_output=None, + captured_logs=None, + ) + self.ihook.pytest_runtest_logreport(report=sub_report) + if check_interactive_exception(call_info, sub_report): + self.ihook.pytest_exception_interact( + node=self, call=call_info, report=sub_report + ) + + # For python < 3.11: add non-subtest skips once all subtest failures are processed by # `_addSubTest`. + if sys.version_info < (3, 11): + from unittest.case import _SubTest # type: ignore[attr-defined] + + non_subtest_skip = [ + (x, y) + for x, y in self.instance._outcome.skipped + if not isinstance(x, _SubTest) + ] + subtest_errors = [ + (x, y) + for x, y in self.instance._outcome.errors + if isinstance(x, _SubTest) and y is not None + ] + # Check if we have non-subtest skips: if there are also sub failures, non-subtest skips are not treated in + # `_addSubTest` and have to be added using `add_skip` after all subtest failures are processed. + if len(non_subtest_skip) > 0 and len(subtest_errors) > 0: + # Make sure we have processed the last subtest failure + last_subset_error = subtest_errors[-1] + if exc_info is last_subset_error[-1]: + # Add non-subtest skips (as they could not be treated in `_addSkip`) + for testcase, reason in non_subtest_skip: + self.addSkip(testcase, reason, handle_subtests=False) + + +@hookimpl(tryfirst=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None: + if isinstance(item, TestCaseFunction): + if item._excinfo: + call.excinfo = item._excinfo.pop(0) + try: + del call.result + except AttributeError: + pass + + # Convert unittest.SkipTest to pytest.skip. + # This covers explicit `raise unittest.SkipTest`. + unittest = sys.modules.get("unittest") + if unittest and call.excinfo and isinstance(call.excinfo.value, unittest.SkipTest): + excinfo = call.excinfo + call2 = CallInfo[None].from_call(lambda: skip(str(excinfo.value)), call.when) + call.excinfo = call2.excinfo + + +def _is_skipped(obj) -> bool: + """Return True if the given object has been marked with @unittest.skip.""" + return bool(getattr(obj, "__unittest_skip__", False)) + + +def pytest_configure() -> None: + """Register the TestCaseFunction class as an IReporter if twisted.trial is available.""" + if _get_twisted_version() is not TwistedVersion.NotInstalled: + from twisted.trial.itrial import IReporter + from zope.interface import classImplements + + classImplements(TestCaseFunction, IReporter) + + +class TwistedVersion(Enum): + """ + The Twisted version installed in the environment. + + We have different workarounds in place for different versions of Twisted. + """ + + # Twisted version 24 or prior. + Version24 = auto() + # Twisted version 25 or later. + Version25 = auto() + # Twisted version is not available. + NotInstalled = auto() + + +def _get_twisted_version() -> TwistedVersion: + # We need to check if "twisted.trial.unittest" is specifically present in sys.modules. + # This is because we intend to integrate with Trial only when it's actively running + # the test suite, but not needed when only other Twisted components are in use. + if "twisted.trial.unittest" not in sys.modules: + return TwistedVersion.NotInstalled + + import importlib.metadata + + import packaging.version + + version_str = importlib.metadata.version("twisted") + version = packaging.version.parse(version_str) + if version.major <= 24: + return TwistedVersion.Version24 + else: + return TwistedVersion.Version25 + + +# Name of the attribute in `twisted.python.Failure` instances that stores +# the `sys.exc_info()` tuple. +# See twisted.trial support in `pytest_runtest_protocol`. +TWISTED_RAW_EXCINFO_ATTR = "_twisted_raw_excinfo" + + +@hookimpl(wrapper=True) +def pytest_runtest_protocol(item: Item) -> Iterator[None]: + if _get_twisted_version() is TwistedVersion.Version24: + import twisted.python.failure as ut + + # Monkeypatch `Failure.__init__` to store the raw exception info. + original__init__ = ut.Failure.__init__ + + def store_raw_exception_info( + self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None + ): # pragma: no cover + if exc_value is None: + raw_exc_info = sys.exc_info() + else: + if exc_type is None: + exc_type = type(exc_value) + if exc_tb is None: + exc_tb = sys.exc_info()[2] + raw_exc_info = (exc_type, exc_value, exc_tb) + setattr(self, TWISTED_RAW_EXCINFO_ATTR, tuple(raw_exc_info)) + try: + original__init__( + self, exc_value, exc_type, exc_tb, captureVars=captureVars + ) + except TypeError: # pragma: no cover + original__init__(self, exc_value, exc_type, exc_tb) + + with MonkeyPatch.context() as patcher: + patcher.setattr(ut.Failure, "__init__", store_raw_exception_info) + return (yield) + else: + return (yield) + + +def _handle_twisted_exc_info( + rawexcinfo: _SysExcInfoType | BaseException, +) -> _SysExcInfoType: + """ + Twisted passes a custom Failure instance to `addError()` instead of using `sys.exc_info()`. + Therefore, if `rawexcinfo` is a `Failure` instance, convert it into the equivalent `sys.exc_info()` tuple + as expected by pytest. + """ + twisted_version = _get_twisted_version() + if twisted_version is TwistedVersion.NotInstalled: + # Unfortunately, because we cannot import `twisted.python.failure` at the top of the file + # and use it in the signature, we need to use `type:ignore` here because we cannot narrow + # the type properly in the `if` statement above. + return rawexcinfo # type:ignore[return-value] + elif twisted_version is TwistedVersion.Version24: + # Twisted calls addError() passing its own classes (like `twisted.python.Failure`), which violates + # the `addError()` signature, so we extract the original `sys.exc_info()` tuple which is stored + # in the object. + if hasattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR): + saved_exc_info = getattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR) + # Delete the attribute from the original object to avoid leaks. + delattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR) + return saved_exc_info # type:ignore[no-any-return] + return rawexcinfo # type:ignore[return-value] + elif twisted_version is TwistedVersion.Version25: + if isinstance(rawexcinfo, BaseException): + import twisted.python.failure + + if isinstance(rawexcinfo, twisted.python.failure.Failure): + tb = rawexcinfo.__traceback__ + if tb is None: + tb = sys.exc_info()[2] + return type(rawexcinfo.value), rawexcinfo.value, tb + + return rawexcinfo # type:ignore[return-value] + else: + # Ideally we would use assert_never() here, but it is not available in all Python versions + # we support, plus we do not require `type_extensions` currently. + assert False, f"Unexpected Twisted version: {twisted_version}" diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/unraisableexception.py b/Backend/venv/lib/python3.12/site-packages/_pytest/unraisableexception.py new file mode 100644 index 00000000..0faca36a --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/unraisableexception.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import collections +from collections.abc import Callable +import functools +import gc +import sys +import traceback +from typing import NamedTuple +from typing import TYPE_CHECKING +import warnings + +from _pytest.config import Config +from _pytest.nodes import Item +from _pytest.stash import StashKey +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +if TYPE_CHECKING: + pass + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + + +# This is a stash item and not a simple constant to allow pytester to override it. +gc_collect_iterations_key = StashKey[int]() + + +def gc_collect_harder(iterations: int) -> None: + for _ in range(iterations): + gc.collect() + + +class UnraisableMeta(NamedTuple): + msg: str + cause_msg: str + exc_value: BaseException | None + + +unraisable_exceptions: StashKey[collections.deque[UnraisableMeta | BaseException]] = ( + StashKey() +) + + +def collect_unraisable(config: Config) -> None: + pop_unraisable = config.stash[unraisable_exceptions].pop + errors: list[pytest.PytestUnraisableExceptionWarning | RuntimeError] = [] + meta = None + hook_error = None + try: + while True: + try: + meta = pop_unraisable() + except IndexError: + break + + if isinstance(meta, BaseException): + hook_error = RuntimeError("Failed to process unraisable exception") + hook_error.__cause__ = meta + errors.append(hook_error) + continue + + msg = meta.msg + try: + warnings.warn(pytest.PytestUnraisableExceptionWarning(msg)) + except pytest.PytestUnraisableExceptionWarning as e: + # This except happens when the warning is treated as an error (e.g. `-Werror`). + if meta.exc_value is not None: + # Exceptions have a better way to show the traceback, but + # warnings do not, so hide the traceback from the msg and + # set the cause so the traceback shows up in the right place. + e.args = (meta.cause_msg,) + e.__cause__ = meta.exc_value + errors.append(e) + + if len(errors) == 1: + raise errors[0] + if errors: + raise ExceptionGroup("multiple unraisable exception warnings", errors) + finally: + del errors, meta, hook_error + + +def cleanup( + *, config: Config, prev_hook: Callable[[sys.UnraisableHookArgs], object] +) -> None: + # A single collection doesn't necessarily collect everything. + # Constant determined experimentally by the Trio project. + gc_collect_iterations = config.stash.get(gc_collect_iterations_key, 5) + try: + try: + gc_collect_harder(gc_collect_iterations) + collect_unraisable(config) + finally: + sys.unraisablehook = prev_hook + finally: + del config.stash[unraisable_exceptions] + + +def unraisable_hook( + unraisable: sys.UnraisableHookArgs, + /, + *, + append: Callable[[UnraisableMeta | BaseException], object], +) -> None: + try: + # we need to compute these strings here as they might change after + # the unraisablehook finishes and before the metadata object is + # collected by a pytest hook + err_msg = ( + "Exception ignored in" if unraisable.err_msg is None else unraisable.err_msg + ) + summary = f"{err_msg}: {unraisable.object!r}" + traceback_message = "\n\n" + "".join( + traceback.format_exception( + unraisable.exc_type, + unraisable.exc_value, + unraisable.exc_traceback, + ) + ) + tracemalloc_tb = "\n" + tracemalloc_message(unraisable.object) + msg = summary + traceback_message + tracemalloc_tb + cause_msg = summary + tracemalloc_tb + + append( + UnraisableMeta( + msg=msg, + cause_msg=cause_msg, + exc_value=unraisable.exc_value, + ) + ) + except BaseException as e: + append(e) + # Raising this will cause the exception to be logged twice, once in our + # collect_unraisable and once by the unraisablehook calling machinery + # which is fine - this should never happen anyway and if it does + # it should probably be reported as a pytest bug. + raise + + +def pytest_configure(config: Config) -> None: + prev_hook = sys.unraisablehook + deque: collections.deque[UnraisableMeta | BaseException] = collections.deque() + config.stash[unraisable_exceptions] = deque + config.add_cleanup(functools.partial(cleanup, config=config, prev_hook=prev_hook)) + sys.unraisablehook = functools.partial(unraisable_hook, append=deque.append) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_setup(item: Item) -> None: + collect_unraisable(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_call(item: Item) -> None: + collect_unraisable(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_teardown(item: Item) -> None: + collect_unraisable(item.config) diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/warning_types.py b/Backend/venv/lib/python3.12/site-packages/_pytest/warning_types.py new file mode 100644 index 00000000..93071b4a --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/warning_types.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +import dataclasses +import inspect +from types import FunctionType +from typing import Any +from typing import final +from typing import Generic +from typing import TypeVar +import warnings + + +class PytestWarning(UserWarning): + """Base class for all warnings emitted by pytest.""" + + __module__ = "pytest" + + +@final +class PytestAssertRewriteWarning(PytestWarning): + """Warning emitted by the pytest assert rewrite module.""" + + __module__ = "pytest" + + +@final +class PytestCacheWarning(PytestWarning): + """Warning emitted by the cache plugin in various situations.""" + + __module__ = "pytest" + + +@final +class PytestConfigWarning(PytestWarning): + """Warning emitted for configuration issues.""" + + __module__ = "pytest" + + +@final +class PytestCollectionWarning(PytestWarning): + """Warning emitted when pytest is not able to collect a file or symbol in a module.""" + + __module__ = "pytest" + + +class PytestDeprecationWarning(PytestWarning, DeprecationWarning): + """Warning class for features that will be removed in a future version.""" + + __module__ = "pytest" + + +class PytestRemovedIn9Warning(PytestDeprecationWarning): + """Warning class for features that will be removed in pytest 9.""" + + __module__ = "pytest" + + +class PytestRemovedIn10Warning(PytestDeprecationWarning): + """Warning class for features that will be removed in pytest 10.""" + + __module__ = "pytest" + + +@final +class PytestExperimentalApiWarning(PytestWarning, FutureWarning): + """Warning category used to denote experiments in pytest. + + Use sparingly as the API might change or even be removed completely in a + future version. + """ + + __module__ = "pytest" + + @classmethod + def simple(cls, apiname: str) -> PytestExperimentalApiWarning: + return cls(f"{apiname} is an experimental api that may change over time") + + +@final +class PytestReturnNotNoneWarning(PytestWarning): + """ + Warning emitted when a test function returns a value other than ``None``. + + See :ref:`return-not-none` for details. + """ + + __module__ = "pytest" + + +@final +class PytestUnknownMarkWarning(PytestWarning): + """Warning emitted on use of unknown markers. + + See :ref:`mark` for details. + """ + + __module__ = "pytest" + + +@final +class PytestUnraisableExceptionWarning(PytestWarning): + """An unraisable exception was reported. + + Unraisable exceptions are exceptions raised in :meth:`__del__ ` + implementations and similar situations when the exception cannot be raised + as normal. + """ + + __module__ = "pytest" + + +@final +class PytestUnhandledThreadExceptionWarning(PytestWarning): + """An unhandled exception occurred in a :class:`~threading.Thread`. + + Such exceptions don't propagate normally. + """ + + __module__ = "pytest" + + +_W = TypeVar("_W", bound=PytestWarning) + + +@final +@dataclasses.dataclass +class UnformattedWarning(Generic[_W]): + """A warning meant to be formatted during runtime. + + This is used to hold warnings that need to format their message at runtime, + as opposed to a direct message. + """ + + category: type[_W] + template: str + + def format(self, **kwargs: Any) -> _W: + """Return an instance of the warning category, formatted with given kwargs.""" + return self.category(self.template.format(**kwargs)) + + +@final +class PytestFDWarning(PytestWarning): + """When the lsof plugin finds leaked fds.""" + + __module__ = "pytest" + + +def warn_explicit_for(method: FunctionType, message: PytestWarning) -> None: + """ + Issue the warning :param:`message` for the definition of the given :param:`method` + + this helps to log warnings for functions defined prior to finding an issue with them + (like hook wrappers being marked in a legacy mechanism) + """ + lineno = method.__code__.co_firstlineno + filename = inspect.getfile(method) + module = method.__module__ + mod_globals = method.__globals__ + try: + warnings.warn_explicit( + message, + type(message), + filename=filename, + module=module, + registry=mod_globals.setdefault("__warningregistry__", {}), + lineno=lineno, + ) + except Warning as w: + # If warnings are errors (e.g. -Werror), location information gets lost, so we add it to the message. + raise type(w)(f"{w}\n at {filename}:{lineno}") from None diff --git a/Backend/venv/lib/python3.12/site-packages/_pytest/warnings.py b/Backend/venv/lib/python3.12/site-packages/_pytest/warnings.py new file mode 100644 index 00000000..1dbf0025 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/_pytest/warnings.py @@ -0,0 +1,151 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Generator +from contextlib import contextmanager +from contextlib import ExitStack +import sys +from typing import Literal +import warnings + +from _pytest.config import apply_warning_filters +from _pytest.config import Config +from _pytest.config import parse_warning_filter +from _pytest.main import Session +from _pytest.nodes import Item +from _pytest.terminal import TerminalReporter +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +@contextmanager +def catch_warnings_for_item( + config: Config, + ihook, + when: Literal["config", "collect", "runtest"], + item: Item | None, + *, + record: bool = True, +) -> Generator[None]: + """Context manager that catches warnings generated in the contained execution block. + + ``item`` can be None if we are not in the context of an item execution. + + Each warning captured triggers the ``pytest_warning_recorded`` hook. + """ + config_filters = config.getini("filterwarnings") + cmdline_filters = config.known_args_namespace.pythonwarnings or [] + with warnings.catch_warnings(record=record) as log: + if not sys.warnoptions: + # If user is not explicitly configuring warning filters, show deprecation warnings by default (#2908). + warnings.filterwarnings("always", category=DeprecationWarning) + warnings.filterwarnings("always", category=PendingDeprecationWarning) + + warnings.filterwarnings("error", category=pytest.PytestRemovedIn9Warning) + + apply_warning_filters(config_filters, cmdline_filters) + + # apply filters from "filterwarnings" marks + nodeid = "" if item is None else item.nodeid + if item is not None: + for mark in item.iter_markers(name="filterwarnings"): + for arg in mark.args: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + + try: + yield + finally: + if record: + # mypy can't infer that record=True means log is not None; help it. + assert log is not None + + for warning_message in log: + ihook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=warning_message, + nodeid=nodeid, + when=when, + location=None, + ) + ) + + +def warning_record_to_str(warning_message: warnings.WarningMessage) -> str: + """Convert a warnings.WarningMessage to a string.""" + return warnings.formatwarning( + str(warning_message.message), + warning_message.category, + warning_message.filename, + warning_message.lineno, + warning_message.line, + ) + tracemalloc_message(warning_message.source) + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + with catch_warnings_for_item( + config=item.config, ihook=item.ihook, when="runtest", item=item + ): + return (yield) + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_collection(session: Session) -> Generator[None, object, object]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="collect", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_terminal_summary( + terminalreporter: TerminalReporter, +) -> Generator[None]: + config = terminalreporter.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_sessionfinish(session: Session) -> Generator[None]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_load_initial_conftests( + early_config: Config, +) -> Generator[None]: + with catch_warnings_for_item( + config=early_config, ihook=early_config.hook, when="config", item=None + ): + return (yield) + + +def pytest_configure(config: Config) -> None: + with ExitStack() as stack: + stack.enter_context( + catch_warnings_for_item( + config=config, + ihook=config.hook, + when="config", + item=None, + # this disables recording because the terminalreporter has + # finished by the time it comes to reporting logged warnings + # from the end of config cleanup. So for now, this is only + # useful for setting a warning filter with an 'error' action. + record=False, + ) + ) + config.addinivalue_line( + "markers", + "filterwarnings(warning): add a warning filter to the given test. " + "see https://docs.pytest.org/en/stable/how-to/capture-warnings.html#pytest-mark-filterwarnings ", + ) + config.add_cleanup(stack.pop_all().close) diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/RECORD index fd8454f0..b98083b2 100644 --- a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/RECORD +++ b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/RECORD @@ -2,6 +2,7 @@ anyio-3.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvF anyio-3.7.1.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 anyio-3.7.1.dist-info/METADATA,sha256=mOhfXPB7qKVQh3dUtp2NgLysa10jHWeDBNnRg-93A_c,4708 anyio-3.7.1.dist-info/RECORD,, +anyio-3.7.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 anyio-3.7.1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 anyio-3.7.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 anyio-3.7.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..90dc41d6 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc index 17ad1202..eb02f314 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..b694e2de Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc index 3855481d..11032773 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..b67c3cd7 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc index 34cf225e..e85045a7 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..c3948cbe Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc index f2d8887b..4f100e4d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc index 833f2637..d8f4f3b9 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..a7d90fbe Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc index da4466ec..24dcba9b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..2ed58217 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc index 88d4ad55..2ed58217 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..e1ccda23 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc index 19f93f91..4e8f26b3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc index 19edf9ad..815acc21 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..78b221cf Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc index fa6a3f8f..78b221cf 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..76e721ba Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312.pyc index 14389ba2..5fc25e08 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..a0905f00 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc index 8e543245..ce7bb6b7 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..d9d6a869 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc index 8665bb5c..40f6fd14 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..92566c4f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc index 278cac6b..810a87ff 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..bcccb4bb Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc index 170979dd..a3e340b4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..2c6ddb7e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc index 4bc28dfc..a6aa1461 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..85ee7f5b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc index 6d3aca41..72544100 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..7ec8f9de Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc index 4e43f2ec..63b46bf3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..656373f2 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc index db0365ec..6adfeea3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..faac890b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc index 7742b8e8..05ab1255 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..bb6564e1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc index 7440649b..471767bc 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..31d01802 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc index 9656ffca..a31b4541 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..7cdd2aa8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc index e0477c89..cb74b210 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..51da0524 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc index 39c30784..1724b5b4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..ad375c7b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc index 994e724e..d43b3259 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..62e4aa5a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc index 36bdc863..b0756337 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..d1508fc6 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc index e2e8cde3..c6035583 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..77f61ccb Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc index 2002f8a1..0938df15 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..5f809d2c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc index 2bb8cbae..de853864 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..346e9df3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc index 0736c2ca..7e9f0c8d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..acbc05d7 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc index d80fde0c..acbc05d7 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc index b8f382c1..216e3abf 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc index 7adf87bf..eb41e5b6 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..9e57e9d2 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc index f31872f1..c442d585 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..52c89f97 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc index f01343e0..e34f74a5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc index dfe57d2d..e768cd8d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312-pytest-9.0.1.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312-pytest-9.0.1.pyc new file mode 100644 index 00000000..5f27495c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312-pytest-9.0.1.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc index 6be85123..d69a02d9 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc index 2eadcee7..eece9f2f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc index b1959811..3ba889e0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc index e77d8ea9..c50579ca 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/INSTALLER similarity index 100% rename from Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER rename to Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/INSTALLER diff --git a/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/METADATA new file mode 100644 index 00000000..949a4344 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/METADATA @@ -0,0 +1,221 @@ +Metadata-Version: 2.4 +Name: coverage +Version: 7.12.0 +Summary: Code coverage measurement for Python +Home-page: https://github.com/coveragepy/coveragepy +Author: Ned Batchelder and 245 others +Author-email: ned@nedbatchelder.com +License: Apache-2.0 +Project-URL: Documentation, https://coverage.readthedocs.io/en/7.12.0 +Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=pypi +Project-URL: Issues, https://github.com/coveragepy/coveragepy/issues +Project-URL: Mastodon, https://hachyderm.io/@coveragepy +Project-URL: Mastodon (nedbat), https://hachyderm.io/@nedbat +Keywords: code coverage testing +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: 3.15 +Classifier: Programming Language :: Python :: Free Threading :: 3 - Stable +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Quality Assurance +Classifier: Topic :: Software Development :: Testing +Classifier: Development Status :: 5 - Production/Stable +Requires-Python: >=3.10 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Provides-Extra: toml +Requires-Dist: tomli; python_full_version <= "3.11.0a6" and extra == "toml" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: project-url +Dynamic: provides-extra +Dynamic: requires-python +Dynamic: summary + +.. Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +.. For details: https://github.com/coveragepy/coveragepy/blob/main/NOTICE.txt + +=========== +Coverage.py +=========== + +Code coverage measurement for Python. + +.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner2-direct.svg + :target: https://vshymanskyy.github.io/StandWithUkraine + :alt: Stand with Ukraine + +------------- + +| |kit| |license| |versions| +| |test-status| |quality-status| |docs| |metacov| +| |tidelift| |sponsor| |stars| |mastodon-coveragepy| |mastodon-nedbat| + |bluesky-nedbat| + +Coverage.py measures code coverage, typically during test execution. It uses +the code analysis tools and tracing hooks provided in the Python standard +library to determine which lines are executable, and which have been executed. + +Coverage.py runs on these versions of Python: + +.. PYVERSIONS + +* Python 3.10 through 3.15 alpha, including free-threading. +* PyPy3 versions 3.10 and 3.11. + +Documentation is on `Read the Docs`_. Code repository and issue tracker are on +`GitHub`_. + +.. _Read the Docs: https://coverage.readthedocs.io/en/7.12.0/ +.. _GitHub: https://github.com/coveragepy/coveragepy + +**New in 7.x:** +``[run] patch`` setting; +``--save-signal`` option; +``[run] core`` setting; +``[run] source_dirs`` setting; +``Coverage.branch_stats()``; +multi-line exclusion patterns; +function/class reporting; +experimental support for sys.monitoring; +dropped support for Python up to 3.9; +added ``Coverage.collect()`` context manager; +improved data combining; +``[run] exclude_also`` setting; +``report --format=``; +type annotations. + +**New in 6.x:** +dropped support for Python 2.7, 3.5, and 3.6; +write data on SIGTERM; +added support for 3.10 match/case statements. + + +For Enterprise +-------------- + +.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logo_small.png + :alt: Tidelift + :target: https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme + +.. list-table:: + :widths: 10 100 + + * - |tideliftlogo| + - `Available as part of the Tidelift Subscription. `_ + Coverage and thousands of other packages are working with + Tidelift to deliver one enterprise subscription that covers all of the open + source you use. If you want the flexibility of open source and the confidence + of commercial-grade software, this is for you. + `Learn more. `_ + + +Getting Started +--------------- + +Looking to run ``coverage`` on your test suite? See the `Quick Start section`_ +of the docs. + +.. _Quick Start section: https://coverage.readthedocs.io/en/7.12.0/#quick-start + + +Change history +-------------- + +The complete history of changes is on the `change history page`_. + +.. _change history page: https://coverage.readthedocs.io/en/7.12.0/changes.html + + +Code of Conduct +--------------- + +Everyone participating in the coverage.py project is expected to treat other +people with respect and to follow the guidelines articulated in the `Python +Community Code of Conduct`_. + +.. _Python Community Code of Conduct: https://www.python.org/psf/codeofconduct/ + + +Contributing +------------ + +Found a bug? Want to help improve the code or documentation? See the +`Contributing section`_ of the docs. + +.. _Contributing section: https://coverage.readthedocs.io/en/7.12.0/contributing.html + + +Security +-------- + +To report a security vulnerability, please use the `Tidelift security +contact`_. Tidelift will coordinate the fix and disclosure. + +.. _Tidelift security contact: https://tidelift.com/security + + +License +------- + +Licensed under the `Apache 2.0 License`_. For details, see `NOTICE.txt`_. + +.. _Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +.. _NOTICE.txt: https://github.com/coveragepy/coveragepy/blob/main/NOTICE.txt + + +.. |test-status| image:: https://github.com/coveragepy/coveragepy/actions/workflows/testsuite.yml/badge.svg?branch=main&event=push + :target: https://github.com/coveragepy/coveragepy/actions/workflows/testsuite.yml + :alt: Test suite status +.. |quality-status| image:: https://github.com/coveragepy/coveragepy/actions/workflows/quality.yml/badge.svg?branch=main&event=push + :target: https://github.com/coveragepy/coveragepy/actions/workflows/quality.yml + :alt: Quality check status +.. |docs| image:: https://readthedocs.org/projects/coverage/badge/?version=latest&style=flat + :target: https://coverage.readthedocs.io/en/7.12.0/ + :alt: Documentation +.. |kit| image:: https://img.shields.io/pypi/v/coverage + :target: https://pypi.org/project/coverage/ + :alt: PyPI status +.. |versions| image:: https://img.shields.io/pypi/pyversions/coverage.svg?logo=python&logoColor=FBE072 + :target: https://pypi.org/project/coverage/ + :alt: Python versions supported +.. |license| image:: https://img.shields.io/pypi/l/coverage.svg + :target: https://github.com/coveragepy/coveragepy/blob/main/LICENSE.txt + :alt: License +.. |metacov| image:: https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/nedbat/8c6980f77988a327348f9b02bbaf67f5/raw/metacov.json + :target: https://coveragepy.github.io/metacov-reports/latest.html + :alt: Coverage reports +.. |tidelift| image:: https://tidelift.com/badges/package/pypi/coverage + :target: https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme + :alt: Tidelift +.. |stars| image:: https://img.shields.io/github/stars/coveragepy/coveragepy.svg?logo=github&style=flat + :target: https://github.com/coveragepy/coveragepy/stargazers + :alt: GitHub stars +.. |mastodon-nedbat| image:: https://img.shields.io/badge/dynamic/json?style=flat&labelColor=450657&logo=mastodon&logoColor=ffffff&label=@nedbat&query=followers_count&url=https%3A%2F%2Fhachyderm.io%2Fapi%2Fv1%2Faccounts%2Flookup%3Facct=nedbat + :target: https://hachyderm.io/@nedbat + :alt: nedbat on Mastodon +.. |mastodon-coveragepy| image:: https://img.shields.io/badge/dynamic/json?style=flat&labelColor=450657&logo=mastodon&logoColor=ffffff&label=@coveragepy&query=followers_count&url=https%3A%2F%2Fhachyderm.io%2Fapi%2Fv1%2Faccounts%2Flookup%3Facct=coveragepy + :target: https://hachyderm.io/@coveragepy + :alt: coveragepy on Mastodon +.. |bluesky-nedbat| image:: https://img.shields.io/badge/dynamic/json?style=flat&color=96a3b0&labelColor=3686f7&logo=icloud&logoColor=white&label=@nedbat&url=https%3A%2F%2Fpublic.api.bsky.app%2Fxrpc%2Fapp.bsky.actor.getProfile%3Factor=nedbat.com&query=followersCount + :target: https://bsky.app/profile/nedbat.com + :alt: nedbat on Bluesky +.. |sponsor| image:: https://img.shields.io/badge/%E2%9D%A4-Sponsor%20me-brightgreen?style=flat&logo=GitHub + :target: https://github.com/sponsors/nedbat + :alt: Sponsor me on GitHub diff --git a/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/RECORD new file mode 100644 index 00000000..57c45d30 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/RECORD @@ -0,0 +1,106 @@ +../../../bin/coverage,sha256=-a_nmUwoLehtytdxHAGo0yFcyFc5cF3jJ6fLex4VOy8,227 +../../../bin/coverage-3.12,sha256=-a_nmUwoLehtytdxHAGo0yFcyFc5cF3jJ6fLex4VOy8,227 +../../../bin/coverage3,sha256=-a_nmUwoLehtytdxHAGo0yFcyFc5cF3jJ6fLex4VOy8,227 +coverage-7.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +coverage-7.12.0.dist-info/METADATA,sha256=PGD12wCXYeynkpPbVge35bwwiNBgbO6zkLCeeAozPuE,9074 +coverage-7.12.0.dist-info/RECORD,, +coverage-7.12.0.dist-info/WHEEL,sha256=mX4U4odf6w47aVjwZUmTYd1MF9BbrhVLKlaWSvZwHEk,186 +coverage-7.12.0.dist-info/entry_points.txt,sha256=s7x_4Bg6sI_AjEov0yLrWDOVR__vCWpFoIGw-MZk2qA,123 +coverage-7.12.0.dist-info/licenses/LICENSE.txt,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +coverage-7.12.0.dist-info/top_level.txt,sha256=BjhyiIvusb5OJkqCXjRncTF3soKF-mDOby-hxkWwwv0,9 +coverage/__init__.py,sha256=deRlSPNGXQa-6Mr9q3FpSXvS51dc-xpHxdLS58TDE3k,1065 +coverage/__main__.py,sha256=rAq5mnzJvTfjnZxufsY-YoKZkHM81vdhkUsAmOU4wt8,297 +coverage/__pycache__/__init__.cpython-312.pyc,, +coverage/__pycache__/__main__.cpython-312.pyc,, +coverage/__pycache__/annotate.cpython-312.pyc,, +coverage/__pycache__/bytecode.cpython-312.pyc,, +coverage/__pycache__/cmdline.cpython-312.pyc,, +coverage/__pycache__/collector.cpython-312.pyc,, +coverage/__pycache__/config.cpython-312.pyc,, +coverage/__pycache__/context.cpython-312.pyc,, +coverage/__pycache__/control.cpython-312.pyc,, +coverage/__pycache__/core.cpython-312.pyc,, +coverage/__pycache__/data.cpython-312.pyc,, +coverage/__pycache__/debug.cpython-312.pyc,, +coverage/__pycache__/disposition.cpython-312.pyc,, +coverage/__pycache__/env.cpython-312.pyc,, +coverage/__pycache__/exceptions.cpython-312.pyc,, +coverage/__pycache__/execfile.cpython-312.pyc,, +coverage/__pycache__/files.cpython-312.pyc,, +coverage/__pycache__/html.cpython-312.pyc,, +coverage/__pycache__/inorout.cpython-312.pyc,, +coverage/__pycache__/jsonreport.cpython-312.pyc,, +coverage/__pycache__/lcovreport.cpython-312.pyc,, +coverage/__pycache__/misc.cpython-312.pyc,, +coverage/__pycache__/multiproc.cpython-312.pyc,, +coverage/__pycache__/numbits.cpython-312.pyc,, +coverage/__pycache__/parser.cpython-312.pyc,, +coverage/__pycache__/patch.cpython-312.pyc,, +coverage/__pycache__/phystokens.cpython-312.pyc,, +coverage/__pycache__/plugin.cpython-312.pyc,, +coverage/__pycache__/plugin_support.cpython-312.pyc,, +coverage/__pycache__/python.cpython-312.pyc,, +coverage/__pycache__/pytracer.cpython-312.pyc,, +coverage/__pycache__/regions.cpython-312.pyc,, +coverage/__pycache__/report.cpython-312.pyc,, +coverage/__pycache__/report_core.cpython-312.pyc,, +coverage/__pycache__/results.cpython-312.pyc,, +coverage/__pycache__/sqldata.cpython-312.pyc,, +coverage/__pycache__/sqlitedb.cpython-312.pyc,, +coverage/__pycache__/sysmon.cpython-312.pyc,, +coverage/__pycache__/templite.cpython-312.pyc,, +coverage/__pycache__/tomlconfig.cpython-312.pyc,, +coverage/__pycache__/types.cpython-312.pyc,, +coverage/__pycache__/version.cpython-312.pyc,, +coverage/__pycache__/xmlreport.cpython-312.pyc,, +coverage/annotate.py,sha256=vI_P4Qj9W7OqdJaMJyvSp57hvT6ljCsnEf5ZyfaKvkM,3751 +coverage/bytecode.py,sha256=n_4YzE8Gas37i0mRgwvbTgu4v6yfekCh4qWZ7YVq0tk,6666 +coverage/cmdline.py,sha256=t7l_LoWAUhUuEmMogiVEAOducHdrudqmwezrZxVhaYE,36819 +coverage/collector.py,sha256=doMi0mv8Z-zDY3kf7NJifLjH3Kz7QuXr3o9aSrHzMTQ,18541 +coverage/config.py,sha256=tXVjZ0EwLI9oxEcRCVJZRiDktCXoUpD1d7L0JNvBM5Y,25964 +coverage/context.py,sha256=Ef1NlMuuD5g2Z3vJhK9fr6yg_NxOYTJmGTACRLU1uno,2434 +coverage/control.py,sha256=ZH_GxR7uc9uJBOkj1IKv0xsJP-63g3JuEDe21QQHz88,54818 +coverage/core.py,sha256=wQG--Xm1Hvyt_jYO7VgfP-CT2vE1o4zIbFI2CtfTsXw,5404 +coverage/data.py,sha256=b-4KXkMlpocqS-T_HHa1LlPOxnz-I35OpcH4ztfvYP4,8127 +coverage/debug.py,sha256=5f1JSbSVeQnulJTOu0E_kelo29cih5A9gS2o27OX3h8,21753 +coverage/disposition.py,sha256=T6p5yH1b6dnnsXq7YI9nbP8UAqFk6V9PyFOivkV5Qr8,1897 +coverage/env.py,sha256=_0HqQJiQIeY44ziVwiMohYIaox0btbc2fLoJMnSt1RI,4972 +coverage/exceptions.py,sha256=VD6utQATQ5GRIAK0SPJyOlL2och54qRfVD9vlt9EElA,1640 +coverage/execfile.py,sha256=IL3TzwhAxiMLs6QwUvF-HK-A2Scjgh8GjkpOKhcWFtY,11993 +coverage/files.py,sha256=WrI3dw_zEMG1YNS6674vFP9TPoUXvXg0itxVSWeOoNc,19357 +coverage/html.py,sha256=hQVh56hKJcF_v0Di02Gd-ov22_MCYF29f3A-0lHwZmg,31524 +coverage/htmlfiles/coverage_html.js,sha256=c3j5ad-4xXqf3u_aBPpiNhoHYn2mTYsKmercXP2XW3M,25450 +coverage/htmlfiles/favicon_32.png,sha256=vIEA-odDwRvSQ-syWfSwEnWGUWEv2b-Tv4tzTRfwJWE,1732 +coverage/htmlfiles/index.html,sha256=8_Resfvt6vgAJs82-dF5gtKCjkPp8by8PMff4eWmMkU,8702 +coverage/htmlfiles/keybd_closed.png,sha256=fZv4rmY3DkNJtPQjrFJ5UBOE5DdNof3mdeCZWC7TOoo,9004 +coverage/htmlfiles/pyfile.html,sha256=iBCg74uV_XEcCTP2F02daNZYlIuzzh0vhsZi35zkpGU,6508 +coverage/htmlfiles/style.css,sha256=vkP6XozR7sM-MQotUpXV03kyobzBvl8rquzommeX58A,15941 +coverage/htmlfiles/style.scss,sha256=fzKmMXr61ZvtnpvtgbDeA9kSnfNmbFFY7jYmmiP9SxI,21291 +coverage/inorout.py,sha256=jFUz-I_g50G_8vGR935hWuwXdrNPep3nfnwVYmAVYEQ,24345 +coverage/jsonreport.py,sha256=mpNhylwzkx3bitcEKOSrvYtP0uAzR4rz1Ofq-JHiwcs,7333 +coverage/lcovreport.py,sha256=Q9-g1QS7dUO-9ckqxecRs1_18yfxQPcHw-Mu8y2XU1U,7874 +coverage/misc.py,sha256=iPEV4g_HsXhQmpuzuN1CPy82uF2YLjKOleMnWgXJyDA,11291 +coverage/multiproc.py,sha256=Y1AeYjch8pD4Zb5HjoW51IVz5yeLDY5-ipIOOk-Adyk,4175 +coverage/numbits.py,sha256=4B171qTbHyZ0WDnYGjGo456_PC2jDZSe22F9KCrmZ4Q,4673 +coverage/parser.py,sha256=Toq-DgeZBl_ZawrKpWPDkfAYbLxMLQPJwi4UlWMPDjY,46313 +coverage/patch.py,sha256=oEcBoH6lcSAB9Y0jxuk3ZbFWqJI05R76fJtMHtsp8PY,5567 +coverage/phystokens.py,sha256=Z7chMvCxuYMu2Wj9Oc1vlO8dV1OOqZXWfzALsWfEC7s,7450 +coverage/plugin.py,sha256=4fF3Bq9JVZIIxPUI-DCUGUqeGNzvYldJnqMvP6JlsKY,21509 +coverage/plugin_support.py,sha256=X0I5D3q7MX2IsZZUBBkJJ6lP3guF-zNo_2oTu1XyePs,10444 +coverage/py.typed,sha256=_B1ZXy5hKJZ2Zo3jWSXjqy1SO3rnLdZsUULnKGTplfc,72 +coverage/python.py,sha256=X6-ypbCYnJoihsSj1M5thX_kFRyHjN37ajOHQWo5F1E,8753 +coverage/pytracer.py,sha256=RBs6GaQQHSEau-eSiuyjIyS64W5Jg61GRVRO2wo1rZA,15322 +coverage/regions.py,sha256=hIf6Hly1Zsfl4hPA2FQgDtDT6Lcv0fLlFCXmc94WOlY,4500 +coverage/report.py,sha256=QrMfQaXfghss5g301vp7BudGLaQXD_eaKrFLFFC4Ixg,10818 +coverage/report_core.py,sha256=xyhYM93YVQUu05fDpPkG1raDtTQyQbzPUjIwrlUf-ns,4044 +coverage/results.py,sha256=I1SBr07T-EtHdgmOBYj7ivydla7QskEVzqBPiHjrojQ,17242 +coverage/sqldata.py,sha256=ba76NYiiMWNwfsFWxTrrgjSVKVvA9vw42x4cZv-c_dU,45528 +coverage/sqlitedb.py,sha256=R6A4B4D0LQNqMUPZTE4W66hC7V5OenHN9hRWb_pXJSg,10024 +coverage/sysmon.py,sha256=ZDJfR27XU6jSOH4LtrKU6bVqgQxtt2CZHnoz7gJBhnA,19729 +coverage/templite.py,sha256=OXquHdxqm3SWGY4GnGb6SjCUNa40_yXmzzY3hnV6zNU,11307 +coverage/tomlconfig.py,sha256=9KXQWUPpnqkmRYFhyJMS8ZOfJ4bG69EsskVXm_NT7j8,7558 +coverage/tracer.cpython-312-x86_64-linux-gnu.so,sha256=c-WzBMl8koEIhlFYJyUBC3b4PnB6Wl_lAgcgbLrBnSg,129792 +coverage/tracer.pyi,sha256=53ZiaWNz6q6qWiEZWFMHeLLkbg6-4oBAzNJ2i1-hA-g,1207 +coverage/types.py,sha256=nOLSWf-CMhaa9h7SMTcueFsklX0vuLdXdmsqzp2Tuqg,5600 +coverage/version.py,sha256=y8jqxYmM1nutdtr6SU9gzKhY_7sW5jywPrrQq_HS86Q,1094 +coverage/xmlreport.py,sha256=ayeJ8DEqIX6f9pdRFJvULIGA4WJ8wDR9BEjoKLdK1PA,9871 diff --git a/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/WHEEL new file mode 100644 index 00000000..9921a02f --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_5_x86_64 +Tag: cp312-cp312-manylinux1_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/entry_points.txt b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/entry_points.txt new file mode 100644 index 00000000..242b4774 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +coverage = coverage.cmdline:main +coverage-3.12 = coverage.cmdline:main +coverage3 = coverage.cmdline:main diff --git a/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/licenses/LICENSE.txt b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/licenses/LICENSE.txt new file mode 100644 index 00000000..f433b1a5 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/licenses/LICENSE.txt @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/top_level.txt new file mode 100644 index 00000000..4ebc8aea --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/coverage-7.12.0.dist-info/top_level.txt @@ -0,0 +1 @@ +coverage diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc index 24170122..6ce20e0e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc index cdd51588..bc3daee1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc index 1dfcf126..016a0c6a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc index 51ff2d06..fcc219a0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/LICENSE.txt similarity index 100% rename from Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt rename to Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/LICENSE.txt diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/METADATA similarity index 95% rename from Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA rename to Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/METADATA index 8a2f6390..cf12a82f 100644 --- a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA +++ b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/METADATA @@ -1,6 +1,6 @@ -Metadata-Version: 2.4 +Metadata-Version: 2.1 Name: h11 -Version: 0.16.0 +Version: 0.14.0 Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 Home-page: https://github.com/python-hyper/h11 Author: Nathaniel J. Smith @@ -13,24 +13,15 @@ Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 Classifier: Topic :: Internet :: WWW/HTTP Classifier: Topic :: System :: Networking -Requires-Python: >=3.8 +Requires-Python: >=3.7 License-File: LICENSE.txt -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: home-page -Dynamic: license -Dynamic: license-file -Dynamic: requires-python -Dynamic: summary +Requires-Dist: typing-extensions ; python_version < "3.8" h11 === @@ -146,7 +137,7 @@ library. It has a test suite with 100.0% coverage for both statements and branches. -Currently it supports Python 3 (testing on 3.8-3.12) and PyPy 3. +Currently it supports Python 3 (testing on 3.7-3.10) and PyPy 3. The last Python 2-compatible version was h11 0.11.x. (Originally it had a Cython wrapper for `http-parser `_ and a beautiful nested state diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/RECORD new file mode 100644 index 00000000..a63f6ccf --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/RECORD @@ -0,0 +1,52 @@ +h11-0.14.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.14.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.14.0.dist-info/METADATA,sha256=B7pZ0m7WBXNs17vl6hUH9bJTL9s37DaGvY31w7jNxSg,8175 +h11-0.14.0.dist-info/RECORD,, +h11-0.14.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +h11-0.14.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/__pycache__/__init__.cpython-312.pyc,, +h11/__pycache__/_abnf.cpython-312.pyc,, +h11/__pycache__/_connection.cpython-312.pyc,, +h11/__pycache__/_events.cpython-312.pyc,, +h11/__pycache__/_headers.cpython-312.pyc,, +h11/__pycache__/_readers.cpython-312.pyc,, +h11/__pycache__/_receivebuffer.cpython-312.pyc,, +h11/__pycache__/_state.cpython-312.pyc,, +h11/__pycache__/_util.cpython-312.pyc,, +h11/__pycache__/_version.cpython-312.pyc,, +h11/__pycache__/_writers.cpython-312.pyc,, +h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 +h11/_connection.py,sha256=eS2sorMD0zKLCFiB9lW9W9F_Nzny2tjHa4e6s1ujr1c,26539 +h11/_events.py,sha256=LEfuvg1AbhHaVRwxCd0I-pFn9-ezUOaoL8o2Kvy1PBA,11816 +h11/_headers.py,sha256=RqB8cd8CN0blYPzcLe5qeCh-phv6D1U_CHj4hs67lgQ,10230 +h11/_readers.py,sha256=EbSed0jzwVUiD1nOPAeUcVE4Flf3wXkxfb8c06-OTBM,8383 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=k1VL6SDbaPkSrZ-49ewCXDpuiUS69_46YhbWjuV1qEY,13300 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=LVyTdiZRzIIEv79UyOgbM5iUrJUllEzlCWaJEYBY1zc,686 +h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +h11/tests/__pycache__/__init__.cpython-312.pyc,, +h11/tests/__pycache__/helpers.cpython-312.pyc,, +h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc,, +h11/tests/__pycache__/test_connection.cpython-312.pyc,, +h11/tests/__pycache__/test_events.cpython-312.pyc,, +h11/tests/__pycache__/test_headers.cpython-312.pyc,, +h11/tests/__pycache__/test_helpers.cpython-312.pyc,, +h11/tests/__pycache__/test_io.cpython-312.pyc,, +h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc,, +h11/tests/__pycache__/test_state.cpython-312.pyc,, +h11/tests/__pycache__/test_util.cpython-312.pyc,, +h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 +h11/tests/helpers.py,sha256=a1EVG_p7xU4wRsa3tMPTRxuaKCmretok9sxXWvqfmQA,3355 +h11/tests/test_against_stdlib_http.py,sha256=cojCHgHXFQ8gWhNlEEwl3trmOpN-5uDukRoHnElqo3A,3995 +h11/tests/test_connection.py,sha256=ZbPLDPclKvjgjAhgk-WlCPBaf17c4XUIV2tpaW08jOI,38720 +h11/tests/test_events.py,sha256=LPVLbcV-NvPNK9fW3rraR6Bdpz1hAlsWubMtNaJ5gHg,4657 +h11/tests/test_headers.py,sha256=qd8T1Zenuz5GbD6wklSJ5G8VS7trrYgMV0jT-SMvqg8,5612 +h11/tests/test_helpers.py,sha256=kAo0CEM4LGqmyyP2ZFmhsyq3UFJqoFfAbzu3hbWreRM,794 +h11/tests/test_io.py,sha256=uCZVnjarkRBkudfC1ij-KSCQ71XWJhnkgkgWWkKgYPQ,16386 +h11/tests/test_receivebuffer.py,sha256=3jGbeJM36Akqg_pAhPb7XzIn2NS6RhPg-Ryg8Eu6ytk,3454 +h11/tests/test_state.py,sha256=rqll9WqFsJPE0zSrtCn9LH659mPKsDeXZ-DwXwleuBQ,8928 +h11/tests/test_util.py,sha256=VO5L4nSFe4pgtSwKuv6u_6l0H7UeizF5WKuHTWreg70,2970 diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/top_level.txt similarity index 100% rename from Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt rename to Backend/venv/lib/python3.12/site-packages/h11-0.14.0.dist-info/top_level.txt diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD deleted file mode 100644 index a8f8e63f..00000000 --- a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD +++ /dev/null @@ -1,29 +0,0 @@ -h11-0.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -h11-0.16.0.dist-info/METADATA,sha256=KPMmCYrAn8unm48YD5YIfIQf4kViFct7hyqcfVzRnWQ,8348 -h11-0.16.0.dist-info/RECORD,, -h11-0.16.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 -h11-0.16.0.dist-info/licenses/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 -h11-0.16.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 -h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 -h11/__pycache__/__init__.cpython-312.pyc,, -h11/__pycache__/_abnf.cpython-312.pyc,, -h11/__pycache__/_connection.cpython-312.pyc,, -h11/__pycache__/_events.cpython-312.pyc,, -h11/__pycache__/_headers.cpython-312.pyc,, -h11/__pycache__/_readers.cpython-312.pyc,, -h11/__pycache__/_receivebuffer.cpython-312.pyc,, -h11/__pycache__/_state.cpython-312.pyc,, -h11/__pycache__/_util.cpython-312.pyc,, -h11/__pycache__/_version.cpython-312.pyc,, -h11/__pycache__/_writers.cpython-312.pyc,, -h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 -h11/_connection.py,sha256=k9YRVf6koZqbttBW36xSWaJpWdZwa-xQVU9AHEo9DuI,26863 -h11/_events.py,sha256=I97aXoal1Wu7dkL548BANBUCkOIbe-x5CioYA9IBY14,11792 -h11/_headers.py,sha256=P7D-lBNxHwdLZPLimmYwrPG-9ZkjElvvJZJdZAgSP-4,10412 -h11/_readers.py,sha256=a4RypORUCC3d0q_kxPuBIM7jTD8iLt5X91TH0FsduN4,8590 -h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 -h11/_state.py,sha256=_5LG_BGR8FCcFQeBPH-TMHgm_-B-EUcWCnQof_9XjFE,13231 -h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 -h11/_version.py,sha256=GVSsbPSPDcOuF6ptfIiXnVJoaEm3ygXbMnqlr_Giahw,686 -h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 -h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc index 434809b6..4a3f03b2 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc index ce3acaf0..307efa92 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc index b8b85114..98a0b898 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc index b22f438a..ed8b0b5f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc index 4bd147d5..9bf9871b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc index f0ab5d3f..749eb35d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc index cbb4df58..212f53a1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc index a9e2c4a2..6e8e60f3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc index e6f09ca9..409a1fc9 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc index 5e296636..e18a8f40 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc index a802c867..f4ca341c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/_connection.py b/Backend/venv/lib/python3.12/site-packages/h11/_connection.py index e37d82a8..d1752707 100644 --- a/Backend/venv/lib/python3.12/site-packages/h11/_connection.py +++ b/Backend/venv/lib/python3.12/site-packages/h11/_connection.py @@ -1,17 +1,6 @@ # This contains the main Connection class. Everything in h11 revolves around # this. -from typing import ( - Any, - Callable, - cast, - Dict, - List, - Optional, - overload, - Tuple, - Type, - Union, -) +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union from ._events import ( ConnectionClosed, @@ -68,7 +57,6 @@ class PAUSED(Sentinel, metaclass=Sentinel): # - Apache: <8 KiB per line> DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 - # RFC 7230's rules for connection lifecycles: # - If either side says they want to close the connection, then the connection # must close. @@ -172,7 +160,7 @@ class Connection: self._max_incomplete_event_size = max_incomplete_event_size # State and role tracking if our_role not in (CLIENT, SERVER): - raise ValueError(f"expected CLIENT or SERVER, not {our_role!r}") + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) self.our_role = our_role self.their_role: Type[Sentinel] if our_role is CLIENT: @@ -428,7 +416,7 @@ class Connection: # return that event, and then the state will change and we'll # get called again to generate the actual ConnectionClosed(). if hasattr(self._reader, "read_eof"): - event = self._reader.read_eof() + event = self._reader.read_eof() # type: ignore[attr-defined] else: event = ConnectionClosed() if event is None: @@ -500,20 +488,6 @@ class Connection: else: raise - @overload - def send(self, event: ConnectionClosed) -> None: - ... - - @overload - def send( - self, event: Union[Request, InformationalResponse, Response, Data, EndOfMessage] - ) -> bytes: - ... - - @overload - def send(self, event: Event) -> Optional[bytes]: - ... - def send(self, event: Event) -> Optional[bytes]: """Convert a high-level event into bytes that can be sent to the peer, while updating our internal state machine. diff --git a/Backend/venv/lib/python3.12/site-packages/h11/_events.py b/Backend/venv/lib/python3.12/site-packages/h11/_events.py index ca1c3adb..075bf8a4 100644 --- a/Backend/venv/lib/python3.12/site-packages/h11/_events.py +++ b/Backend/venv/lib/python3.12/site-packages/h11/_events.py @@ -7,8 +7,8 @@ import re from abc import ABC -from dataclasses import dataclass -from typing import List, Tuple, Union +from dataclasses import dataclass, field +from typing import Any, cast, Dict, List, Tuple, Union from ._abnf import method, request_target from ._headers import Headers, normalize_and_validate diff --git a/Backend/venv/lib/python3.12/site-packages/h11/_headers.py b/Backend/venv/lib/python3.12/site-packages/h11/_headers.py index 31da3e2b..b97d020b 100644 --- a/Backend/venv/lib/python3.12/site-packages/h11/_headers.py +++ b/Backend/venv/lib/python3.12/site-packages/h11/_headers.py @@ -12,8 +12,6 @@ try: except ImportError: from typing_extensions import Literal # type: ignore -CONTENT_LENGTH_MAX_DIGITS = 20 # allow up to 1 billion TB - 1 - # Facts # ----- @@ -175,8 +173,6 @@ def normalize_and_validate( raise LocalProtocolError("conflicting Content-Length headers") value = lengths.pop() validate(_content_length_re, value, "bad Content-Length") - if len(value) > CONTENT_LENGTH_MAX_DIGITS: - raise LocalProtocolError("bad Content-Length") if seen_content_length is None: seen_content_length = value new_headers.append((raw_name, name, value)) diff --git a/Backend/venv/lib/python3.12/site-packages/h11/_readers.py b/Backend/venv/lib/python3.12/site-packages/h11/_readers.py index 576804cc..08a9574d 100644 --- a/Backend/venv/lib/python3.12/site-packages/h11/_readers.py +++ b/Backend/venv/lib/python3.12/site-packages/h11/_readers.py @@ -148,9 +148,10 @@ chunk_header_re = re.compile(chunk_header.encode("ascii")) class ChunkedReader: def __init__(self) -> None: self._bytes_in_chunk = 0 - # After reading a chunk, we have to throw away the trailing \r\n. - # This tracks the bytes that we need to match and throw away. - self._bytes_to_discard = b"" + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 self._reading_trailer = False def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: @@ -159,19 +160,15 @@ class ChunkedReader: if lines is None: return None return EndOfMessage(headers=list(_decode_header_lines(lines))) - if self._bytes_to_discard: - data = buf.maybe_extract_at_most(len(self._bytes_to_discard)) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) if data is None: return None - if data != self._bytes_to_discard[: len(data)]: - raise LocalProtocolError( - f"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})" - ) - self._bytes_to_discard = self._bytes_to_discard[len(data) :] - if self._bytes_to_discard: + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: return None # else, fall through and read some more - assert self._bytes_to_discard == b"" + assert self._bytes_to_discard == 0 if self._bytes_in_chunk == 0: # We need to refill our chunk count chunk_header = buf.maybe_extract_next_line() @@ -197,7 +194,7 @@ class ChunkedReader: return None self._bytes_in_chunk -= len(data) if self._bytes_in_chunk == 0: - self._bytes_to_discard = b"\r\n" + self._bytes_to_discard = 2 chunk_end = True else: chunk_end = False diff --git a/Backend/venv/lib/python3.12/site-packages/h11/_state.py b/Backend/venv/lib/python3.12/site-packages/h11/_state.py index 3ad444b0..3593430a 100644 --- a/Backend/venv/lib/python3.12/site-packages/h11/_state.py +++ b/Backend/venv/lib/python3.12/site-packages/h11/_state.py @@ -283,7 +283,9 @@ class ConnectionState: assert role is SERVER if server_switch_event not in self.pending_switch_proposals: raise LocalProtocolError( - "Received server _SWITCH_UPGRADE event without a pending proposal" + "Received server {} event without a pending proposal".format( + server_switch_event + ) ) _event_type = (event_type, server_switch_event) if server_switch_event is None and _event_type is Response: @@ -356,7 +358,7 @@ class ConnectionState: def start_next_cycle(self) -> None: if self.states != {CLIENT: DONE, SERVER: DONE}: raise LocalProtocolError( - f"not in a reusable state. self.states={self.states}" + "not in a reusable state. self.states={}".format(self.states) ) # Can't reach DONE/DONE with any of these active, but still, let's be # sure. diff --git a/Backend/venv/lib/python3.12/site-packages/h11/_version.py b/Backend/venv/lib/python3.12/site-packages/h11/_version.py index 76e7327b..4c891130 100644 --- a/Backend/venv/lib/python3.12/site-packages/h11/_version.py +++ b/Backend/venv/lib/python3.12/site-packages/h11/_version.py @@ -13,4 +13,4 @@ # want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* # 1.0.0.) -__version__ = "0.16.0" +__version__ = "0.14.0" diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__init__.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..004e72f3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/helpers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/helpers.cpython-312.pyc new file mode 100644 index 00000000..119bd908 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/helpers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc new file mode 100644 index 00000000..a6ad3b93 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_connection.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_connection.cpython-312.pyc new file mode 100644 index 00000000..8df8751f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_connection.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_events.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_events.cpython-312.pyc new file mode 100644 index 00000000..31377192 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_events.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_headers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_headers.cpython-312.pyc new file mode 100644 index 00000000..b145abf5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_headers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_helpers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_helpers.cpython-312.pyc new file mode 100644 index 00000000..76f96902 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_helpers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_io.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_io.cpython-312.pyc new file mode 100644 index 00000000..735cff37 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_io.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc new file mode 100644 index 00000000..a9dc533f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_state.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_state.cpython-312.pyc new file mode 100644 index 00000000..3f71cb3b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_state.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_util.cpython-312.pyc new file mode 100644 index 00000000..ceb6ae5c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/h11/tests/__pycache__/test_util.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/data/test-file b/Backend/venv/lib/python3.12/site-packages/h11/tests/data/test-file new file mode 100644 index 00000000..d0be0a6c --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/data/test-file @@ -0,0 +1 @@ +92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/helpers.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/helpers.py new file mode 100644 index 00000000..571be444 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/helpers.py @@ -0,0 +1,101 @@ +from typing import cast, List, Type, Union, ValuesView + +from .._connection import Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER +from .._util import Sentinel + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +def get_all_events(conn: Connection) -> List[Event]: + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + event = cast(Event, event) + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn: Connection, data: bytes) -> List[Event]: + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events: List[Event]) -> List[Event]: + out_events: List[Event] = [] + for event in in_events: + if type(event) is Data: + event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1] = Data( + data=out_events[-1].data + event.data, + chunk_start=out_events[-1].chunk_start, + chunk_end=out_events[-1].chunk_end, + ) + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self) -> None: + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self) -> ValuesView[Connection]: + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send( + self, + role: Type[Sentinel], + send_events: Union[List[Event], Event], + expect: Union[List[Event], Event, Literal["match"]] = "match", + ) -> bytes: + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_against_stdlib_http.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_against_stdlib_http.py new file mode 100644 index 00000000..d2ee1314 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_against_stdlib_http.py @@ -0,0 +1,115 @@ +import json +import os.path +import socket +import socketserver +import threading +from contextlib import closing, contextmanager +from http.server import SimpleHTTPRequestHandler +from typing import Callable, Generator +from urllib.request import urlopen + +import h11 + + +@contextmanager +def socket_server( + handler: Callable[..., socketserver.BaseRequestHandler] +) -> Generator[socketserver.TCPServer, None, None]: + httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) + thread = threading.Thread( + target=httpd.serve_forever, kwargs={"poll_interval": 0.01} + ) + thread.daemon = True + try: + thread.start() + yield httpd + finally: + httpd.shutdown() + + +test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") +with open(test_file_path, "rb") as f: + test_file_data = f.read() + + +class SingleMindedRequestHandler(SimpleHTTPRequestHandler): + def translate_path(self, path: str) -> str: + return test_file_path + + +def test_h11_as_client() -> None: + with socket_server(SingleMindedRequestHandler) as httpd: + with closing(socket.create_connection(httpd.server_address)) as s: + c = h11.Connection(h11.CLIENT) + + s.sendall( + c.send( # type: ignore[arg-type] + h11.Request( + method="GET", target="/foo", headers=[("Host", "localhost")] + ) + ) + ) + s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] + + data = bytearray() + while True: + event = c.next_event() + print(event) + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Response: + assert event.status_code == 200 + if type(event) is h11.Data: + data += event.data + if type(event) is h11.EndOfMessage: + break + assert bytes(data) == test_file_data + + +class H11RequestHandler(socketserver.BaseRequestHandler): + def handle(self) -> None: + with closing(self.request) as s: + c = h11.Connection(h11.SERVER) + request = None + while True: + event = c.next_event() + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Request: + request = event + if type(event) is h11.EndOfMessage: + break + assert request is not None + info = json.dumps( + { + "method": request.method.decode("ascii"), + "target": request.target.decode("ascii"), + "headers": { + name.decode("ascii"): value.decode("ascii") + for (name, value) in request.headers + }, + } + ) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] + s.sendall(c.send(h11.Data(data=info.encode("ascii")))) + s.sendall(c.send(h11.EndOfMessage())) + + +def test_h11_as_server() -> None: + with socket_server(H11RequestHandler) as httpd: + host, port = httpd.server_address + url = "http://{}:{}/some-path".format(host, port) + with closing(urlopen(url)) as f: + assert f.getcode() == 200 + data = f.read() + info = json.loads(data.decode("ascii")) + print(info) + assert info["method"] == "GET" + assert info["target"] == "/some-path" + assert "urllib" in info["headers"]["user-agent"] diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_connection.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_connection.py new file mode 100644 index 00000000..73a27b98 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_connection.py @@ -0,0 +1,1122 @@ +from typing import Any, cast, Dict, List, Optional, Tuple, Type + +import pytest + +from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError, RemoteProtocolError, Sentinel +from .helpers import ConnectionPair, get_all_events, receive_and_get + + +def test__keep_alive() -> None: + assert _keep_alive( + Request(method="GET", target="/", headers=[("Host", "Example.com")]) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "close")], + ) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], + ) + ) + assert not _keep_alive( + Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type] + ) + + assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type] + assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) + assert not _keep_alive( + Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) + ) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type] + + +def test__body_framing() -> None: + def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]: + headers = [] + if cl is not None: + headers.append(("Content-Length", str(cl))) + if te: + headers.append(("Transfer-Encoding", "chunked")) + return headers + + def resp( + status_code: int = 200, cl: Optional[int] = None, te: bool = False + ) -> Response: + return Response(status_code=status_code, headers=headers(cl, te)) + + def req(cl: Optional[int] = None, te: bool = False) -> Request: + h = headers(cl, te) + h += [("Host", "example.com")] + return Request(method="GET", target="/", headers=h) + + # Special cases where the headers are ignored: + for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [ + (b"HEAD", resp(**kwargs)), + (b"GET", resp(status_code=204, **kwargs)), + (b"GET", resp(status_code=304, **kwargs)), + ]: + assert _body_framing(meth, r) == ("content-length", (0,)) + + # Transfer-encoding + for kwargs in [{"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore + assert _body_framing(meth, r) == ("chunked", ()) + + # Content-Length + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore + assert _body_framing(meth, r) == ("content-length", (100,)) + + # No headers + assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore + assert _body_framing(b"GET", resp()) == ("http/1.0", ()) + + +def test_Connection_basics_and_content_length() -> None: + with pytest.raises(ValueError): + Connection("CLIENT") # type: ignore + + p = ConnectionPair() + assert p.conn[CLIENT].our_role is CLIENT + assert p.conn[CLIENT].their_role is SERVER + assert p.conn[SERVER].our_role is SERVER + assert p.conn[SERVER].their_role is CLIENT + + data = p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Content-Length", "10")], + ), + ) + assert data == ( + b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" + ) + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + assert p.conn[CLIENT].our_state is SEND_BODY + assert p.conn[CLIENT].their_state is SEND_RESPONSE + assert p.conn[SERVER].our_state is SEND_RESPONSE + assert p.conn[SERVER].their_state is SEND_BODY + + assert p.conn[CLIENT].their_http_version is None + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + assert data == b"HTTP/1.1 100 \r\n\r\n" + + data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) + assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + assert p.conn[CLIENT].their_http_version == b"1.1" + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(CLIENT, Data(data=b"12345")) + assert data == b"12345" + data = p.send( + CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] + ) + assert data == b"67890" + data = p.send(CLIENT, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + + data = p.send(SERVER, Data(data=b"1234567890")) + assert data == b"1234567890" + data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) + assert data == b"1" + data = p.send(SERVER, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunked() -> None: + p = ConnectionPair() + + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ), + ) + data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) + assert data == b"a\r\n1234567890\r\n" + data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) + assert data == b"5\r\nabcde\r\n" + data = p.send(CLIENT, Data(data=b""), expect=[]) + assert data == b"" + data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) + assert data == b"0\r\nhello: there\r\n\r\n" + + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) + p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) + p.send(SERVER, EndOfMessage()) + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunk_boundaries() -> None: + conn = Connection(our_role=SERVER) + + request = ( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + conn.receive_data(request) + assert conn.next_event() == Request( + method="POST", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ) + assert conn.next_event() is NEED_DATA + + conn.receive_data(b"5\r\nhello\r\n") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"5\r\nhel") + assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) + + conn.receive_data(b"l") + assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) + + conn.receive_data(b"o\r\n") + assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) + + conn.receive_data(b"5\r\nhello") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"\r\n") + assert conn.next_event() == NEED_DATA + + conn.receive_data(b"0\r\n\r\n") + assert conn.next_event() == EndOfMessage() + + +def test_client_talking_to_http10_server() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) + c.send(EndOfMessage()) + assert c.our_state is DONE + # No content-length, so Http10 framing for body + assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type] + ] + assert c.our_state is MUST_CLOSE + assert receive_and_get(c, b"12345") == [Data(data=b"12345")] + assert receive_and_get(c, b"67890") == [Data(data=b"67890")] + assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] + assert c.their_state is CLOSED + + +def test_server_talking_to_http10_client() -> None: + c = Connection(SERVER) + # No content-length, so no body + # NB: no host header + assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + assert c.their_state is MUST_CLOSE + + # We automatically Connection: close back at them + assert ( + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + + assert c.send(Data(data=b"12345")) == b"12345" + assert c.send(EndOfMessage()) == b"" + assert c.our_state is MUST_CLOSE + + # Check that it works if they do send Content-Length + c = Connection(SERVER) + # NB: no host header + assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ + Request( + method="POST", + target="/", + headers=[("Content-Length", "10")], + http_version="1.0", + ), + Data(data=b"1"), + ] + assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] + assert c.their_state is MUST_CLOSE + assert receive_and_get(c, b"") == [ConnectionClosed()] + + +def test_automatic_transfer_encoding_in_response() -> None: + # Check that in responses, the user can specify either Transfer-Encoding: + # chunked or no framing at all, and in both cases we automatically select + # the right option depending on whether the peer speaks HTTP/1.0 or + # HTTP/1.1 + for user_headers in [ + [("Transfer-Encoding", "chunked")], + [], + # In fact, this even works if Content-Length is set, + # because if both are set then Transfer-Encoding wins + [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], + ]: + user_headers = cast(List[Tuple[str, str]], user_headers) + p = ConnectionPair() + p.send( + CLIENT, + [ + Request(method="GET", target="/", headers=[("Host", "example.com")]), + EndOfMessage(), + ], + ) + # When speaking to HTTP/1.1 client, all of the above cases get + # normalized to Transfer-Encoding: chunked + p.send( + SERVER, + Response(status_code=200, headers=user_headers), + expect=Response( + status_code=200, headers=[("Transfer-Encoding", "chunked")] + ), + ) + + # When speaking to HTTP/1.0 client, all of the above cases get + # normalized to no-framing-headers + c = Connection(SERVER) + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert ( + c.send(Response(status_code=200, headers=user_headers)) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + assert c.send(Data(data=b"12345")) == b"12345" + + +def test_automagic_connection_close_handling() -> None: + p = ConnectionPair() + # If the user explicitly sets Connection: close, then we notice and + # respect it + p.send( + CLIENT, + [ + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Connection", "close")], + ), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states[CLIENT] is MUST_CLOSE + # And if the client sets it, the server automatically echoes it back + p.send( + SERVER, + # no header here... + [Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type] + # ...but oh look, it arrived anyway + expect=[ + Response(status_code=204, headers=[("connection", "close")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_100_continue() -> None: + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[ + ("Host", "example.com"), + ("Content-Length", "100"), + ("Expect", "100-continue"), + ], + ), + ) + for conn in p.conns: + assert conn.client_is_waiting_for_100_continue + assert not p.conn[CLIENT].they_are_waiting_for_100_continue + assert p.conn[SERVER].they_are_waiting_for_100_continue + return p + + # Disabled by 100 Continue + p = setup() + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by a real response + p = setup() + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by the client going ahead and sending stuff anyway + p = setup() + p.send(CLIENT, Data(data=b"12345")) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + +def test_max_incomplete_event_size_countermeasure() -> None: + # Infinitely long headers are definitely not okay + c = Connection(SERVER) + c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") + assert c.next_event() is NEED_DATA + with pytest.raises(RemoteProtocolError): + while True: + c.receive_data(b"a" * 1024) + c.next_event() + + # Checking that the same header is accepted / rejected depending on the + # max_incomplete_event_size setting: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + c.receive_data(b"\r\n\r\n") + assert get_all_events(c) == [ + Request( + method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] + ), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=4000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + with pytest.raises(RemoteProtocolError): + c.next_event() + + # Temporarily exceeding the size limit is fine, as long as its done with + # complete events: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") + c.receive_data(b"\r\n\r\n" + b"a" * 10000) + assert get_all_events(c) == [ + Request( + method="GET", + target="/", + http_version="1.0", + headers=[("Content-Length", "10000")], + ), + Data(data=b"a" * 10000), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=100) + # Two pipelined requests to create a way-too-big receive buffer... but + # it's fine because we're not checking + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" + b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 + ) + assert get_all_events(c) == [ + Request(method="GET", target="/1", headers=[("host", "a")]), + EndOfMessage(), + ] + # Even more data comes in, still no problem + c.receive_data(b"X" * 1000) + # We can respond and reuse to get the second pipelined request + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + assert get_all_events(c) == [ + Request(method="GET", target="/2", headers=[("host", "b")]), + EndOfMessage(), + ] + # But once we unpause and try to read the next message, and find that it's + # incomplete and the buffer is *still* way too large, then *that's* a + # problem: + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_reuse_simple() -> None: + p = ConnectionPair() + p.send( + CLIENT, + [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], + ) + p.send( + SERVER, + [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + conn.start_next_cycle() + + p.send( + CLIENT, + [ + Request(method="DELETE", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ], + ) + p.send( + SERVER, + [ + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + + +def test_pipelining() -> None: + # Client doesn't support pipelining, so we have to do this by hand + c = Connection(SERVER) + assert c.next_event() is NEED_DATA + # 3 requests all bunched up + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" + ) + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.their_state is DONE + assert c.our_state is SEND_RESPONSE + + assert c.next_event() is PAUSED + + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.their_state is DONE + assert c.our_state is DONE + + c.start_next_cycle() + + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ] + assert c.next_event() is PAUSED + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + + assert get_all_events(c) == [ + Request(method="GET", target="/3", headers=[("Host", "a.com")]), + EndOfMessage(), + ] + # Doesn't pause this time, no trailing data + assert c.next_event() is NEED_DATA + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + + # Arrival of more data triggers pause + assert c.next_event() is NEED_DATA + c.receive_data(b"SADF") + assert c.next_event() is PAUSED + assert c.trailing_data == (b"SADF", False) + # If EOF arrives while paused, we don't see that either: + c.receive_data(b"") + assert c.trailing_data == (b"SADF", True) + assert c.next_event() is PAUSED + c.receive_data(b"") + assert c.next_event() is PAUSED + # Can't call receive_data with non-empty buf after closing it + with pytest.raises(RuntimeError): + c.receive_data(b"FDSA") + + +def test_protocol_switch() -> None: + for (req, deny, accept) in [ + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept CONNECT, not upgrade + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept Upgrade, not CONNECT + InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), + ), + ]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(CLIENT, req) + # No switch-related state change stuff yet; the client has to + # finish the request before that kicks in + for conn in p.conns: + assert conn.states[CLIENT] is SEND_BODY + p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) + for conn in p.conns: + assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL + assert p.conn[SERVER].next_event() is PAUSED + return p + + # Test deny case + p = setup() + p.send(SERVER, deny) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + p.send(SERVER, EndOfMessage()) + # Check that re-use is still allowed after a denial + for conn in p.conns: + conn.start_next_cycle() + + # Test accept case + p = setup() + p.send(SERVER, accept) + for conn in p.conns: + assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + conn.receive_data(b"123") + assert conn.next_event() is PAUSED + conn.receive_data(b"456") + assert conn.next_event() is PAUSED + assert conn.trailing_data == (b"123456", False) + + # Pausing in might-switch, then recovery + # (weird artificial case where the trailing data actually is valid + # HTTP for some reason, because this makes it easier to test the state + # logic) + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) + sc.send(deny) + assert sc.next_event() is PAUSED + sc.send(EndOfMessage()) + sc.start_next_cycle() + assert get_all_events(sc) == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + + # When we're DONE, have no trailing data, and the connection gets + # closed, we report ConnectionClosed(). When we're in might-switch or + # switched, we don't. + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"", True) + p.send(SERVER, accept) + assert sc.next_event() is PAUSED + + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + sc.send(deny) + assert sc.next_event() == ConnectionClosed() + + # You can't send after switching protocols, or while waiting for a + # protocol switch + p = setup() + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send( + Request(method="GET", target="/", headers=[("Host", "a")]) + ) + p = setup() + p.send(SERVER, accept) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(Data(data=b"123")) + + +def test_close_simple() -> None: + # Just immediately closing a new connection without anything having + # happened yet. + for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(who_shot_first, ConnectionClosed()) + for conn in p.conns: + assert conn.states == { + who_shot_first: CLOSED, + who_shot_second: MUST_CLOSE, + } + return p + + # You can keep putting b"" into a closed connection, and you keep + # getting ConnectionClosed() out: + p = setup() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + p.conn[who_shot_second].receive_data(b"") + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + # Second party can close... + p = setup() + p.send(who_shot_second, ConnectionClosed()) + for conn in p.conns: + assert conn.our_state is CLOSED + assert conn.their_state is CLOSED + # But trying to receive new data on a closed connection is a + # RuntimeError (not ProtocolError, because the problem here isn't + # violation of HTTP, it's violation of physics) + p = setup() + with pytest.raises(RuntimeError): + p.conn[who_shot_second].receive_data(b"123") + # And receiving new data on a MUST_CLOSE connection is a ProtocolError + p = setup() + p.conn[who_shot_first].receive_data(b"GET") + with pytest.raises(RemoteProtocolError): + p.conn[who_shot_first].next_event() + + +def test_close_different_states() -> None: + req = [ + Request(method="GET", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ] + resp = [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ] + + # Client before request + p = ConnectionPair() + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + + # Client after request + p = ConnectionPair() + p.send(CLIENT, req) + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + + # Server after request -> not allowed + p = ConnectionPair() + p.send(CLIENT, req) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(ConnectionClosed()) + p.conn[CLIENT].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[CLIENT].next_event() + + # Server after response + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(SERVER, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + # Both after closing (ConnectionClosed() is idempotent) + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + + # In the middle of sending -> not allowed + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] + ), + ) + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send(ConnectionClosed()) + p.conn[SERVER].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[SERVER].next_event() + + +# Receive several requests and then client shuts down their side of the +# connection; we can respond to each +def test_pipelined_close() -> None: + c = Connection(SERVER) + # 2 requests then a close + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + ) + c.receive_data(b"") + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.states[CLIENT] is DONE + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states[SERVER] is DONE + c.start_next_cycle() + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ConnectionClosed(), + ] + assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + c.send(ConnectionClosed()) + assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} + + +def test_sendfile() -> None: + class SendfilePlaceholder: + def __len__(self) -> int: + return 10 + + placeholder = SendfilePlaceholder() + + def setup( + header: Tuple[str, str], http_version: str + ) -> Tuple[Connection, Optional[List[bytes]]]: + c = Connection(SERVER) + receive_and_get( + c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") + ) + headers = [] + if header: + headers.append(header) + c.send(Response(status_code=200, headers=headers)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore + + c, data = setup(("Content-Length", "10"), "1.1") + assert data == [placeholder] # type: ignore + # Raises an error if the connection object doesn't think we've sent + # exactly 10 bytes + c.send(EndOfMessage()) + + _, data = setup(("Transfer-Encoding", "chunked"), "1.1") + assert placeholder in data # type: ignore + data[data.index(placeholder)] = b"x" * 10 # type: ignore + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore + + c, data = setup(None, "1.0") # type: ignore + assert data == [placeholder] # type: ignore + assert c.our_state is SEND_BODY + + +def test_errors() -> None: + # After a receive error, you can't receive + for role in [CLIENT, SERVER]: + c = Connection(our_role=role) + c.receive_data(b"gibberish\r\n\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + # Now any attempt to receive continues to raise + assert c.their_state is ERROR + assert c.our_state is not ERROR + print(c._cstate.states) + with pytest.raises(RemoteProtocolError): + c.next_event() + # But we can still yell at the client for sending us gibberish + if role is SERVER: + assert ( + c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" + ) + + # After an error sending, you can no longer send + # (This is especially important for things like content-length errors, + # where there's complex internal state being modified) + def conn(role: Type[Sentinel]) -> Connection: + c = Connection(our_role=role) + if role is SERVER: + # Put it into the state where it *could* send a response... + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert c.our_state is SEND_RESPONSE + return c + + for role in [CLIENT, SERVER]: + if role is CLIENT: + # This HTTP/1.0 request won't be detected as bad until after we go + # through the state machine and hit the writing code + good = Request(method="GET", target="/", headers=[("Host", "example.com")]) + bad = Request( + method="GET", + target="/", + headers=[("Host", "example.com")], + http_version="1.0", + ) + elif role is SERVER: + good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment] + bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment] + # Make sure 'good' actually is good + c = conn(role) + c.send(good) + assert c.our_state is not ERROR + # Do that again, but this time sending 'bad' first + c = conn(role) + with pytest.raises(LocalProtocolError): + c.send(bad) + assert c.our_state is ERROR + assert c.their_state is not ERROR + # Now 'good' is not so good + with pytest.raises(LocalProtocolError): + c.send(good) + + # And check send_failed() too + c = conn(role) + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + # This is idempotent + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + + +def test_idle_receive_nothing() -> None: + # At one point this incorrectly raised an error + for role in [CLIENT, SERVER]: + c = Connection(role) + assert c.next_event() is NEED_DATA + + +def test_connection_drop() -> None: + c = Connection(SERVER) + c.receive_data(b"GET /") + assert c.next_event() is NEED_DATA + c.receive_data(b"") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_408_request_timeout() -> None: + # Should be able to send this spontaneously as a server without seeing + # anything from client + p = ConnectionPair() + p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")])) + + +# This used to raise IndexError +def test_empty_request() -> None: + c = Connection(SERVER) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to raise IndexError +def test_empty_response() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "a")])) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello + ], +) +def test_early_detection_of_invalid_request(data: bytes) -> None: + c = Connection(SERVER) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello + ], +) +def test_early_detection_of_invalid_response(data: bytes) -> None: + c = Connection(CLIENT) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to give different headers for HEAD and GET. +# The correct way to handle HEAD is to put whatever headers we *would* have +# put if it were a GET -- even though we know that for HEAD, those headers +# will be ignored. +def test_HEAD_framing_headers() -> None: + def setup(method: bytes, http_version: bytes) -> Connection: + c = Connection(SERVER) + c.receive_data( + method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert type(c.next_event()) is EndOfMessage + return c + + for method in [b"GET", b"HEAD"]: + # No Content-Length, HTTP/1.1 peer, should use chunked + c = setup(method, b"1.1") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + # No Content-Length, HTTP/1.0 peer, frame with connection: close + c = setup(method, b"1.0") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Connection: close\r\n\r\n" + ) + + # Content-Length + Transfer-Encoding, TE wins + c = setup(method, b"1.1") + assert ( + c.send( + Response( + status_code=200, + headers=[ + ("Content-Length", "100"), + ("Transfer-Encoding", "chunked"), + ], + ) + ) + == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + +def test_special_exceptions_for_lost_connection_in_message_body() -> None: + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"12345") + assert c.next_event() == Data(data=b"12345") + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "received 5 bytes" in str(excinfo.value) + assert "expected 100" in str(excinfo.value) + + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"8\r\n012345") + assert c.next_event().data == b"012345" # type: ignore + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "incomplete chunked read" in str(excinfo.value) diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_events.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_events.py new file mode 100644 index 00000000..bc6c3137 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_events.py @@ -0,0 +1,150 @@ +from http import HTTPStatus + +import pytest + +from .. import _events +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._util import LocalProtocolError + + +def test_events() -> None: + with pytest.raises(LocalProtocolError): + # Missing Host: + req = Request( + method="GET", target="/", headers=[("a", "b")], http_version="1.1" + ) + # But this is okay (HTTP/1.0) + req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") + # fields are normalized + assert req.method == b"GET" + assert req.target == b"/" + assert req.headers == [(b"a", b"b")] + assert req.http_version == b"1.0" + + # This is also okay -- has a Host (with weird capitalization, which is ok) + req = Request( + method="GET", + target="/", + headers=[("a", "b"), ("hOSt", "example.com")], + http_version="1.1", + ) + # we normalize header capitalization + assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] + + # Multiple host is bad too + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.1", + ) + # Even for HTTP/1.0 + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.0", + ) + + # Header values are validated + for bad_char in "\x00\r\n\f\v": + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd" + bad_char)], + http_version="1.0", + ) + + # But for compatibility we allow non-whitespace control characters, even + # though they're forbidden by the spec. + Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], + http_version="1.0", + ) + + # Request target is validated + for bad_byte in b"\x00\x20\x7f\xee": + target = bytearray(b"/") + target.append(bad_byte) + with pytest.raises(LocalProtocolError): + Request( + method="GET", target=target, headers=[("Host", "a")], http_version="1.1" + ) + + # Request method is validated + with pytest.raises(LocalProtocolError): + Request( + method="GET / HTTP/1.1", + target=target, + headers=[("Host", "a")], + http_version="1.1", + ) + + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) + assert ir.status_code == 100 + assert ir.headers == [(b"host", b"a")] + assert ir.http_version == b"1.1" + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=200, headers=[("Host", "a")]) + + resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] + assert resp.status_code == 204 + assert resp.headers == [] + assert resp.http_version == b"1.0" + + with pytest.raises(LocalProtocolError): + resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] + + d = Data(data=b"asdf") + assert d.data == b"asdf" + + eom = EndOfMessage() + assert eom.headers == [] + + cc = ConnectionClosed() + assert repr(cc) == "ConnectionClosed()" + + +def test_intenum_status_code() -> None: + # https://github.com/python-hyper/h11/issues/72 + + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] + assert r.status_code == HTTPStatus.OK + assert type(r.status_code) is not type(HTTPStatus.OK) + assert type(r.status_code) is int + + +def test_header_casing() -> None: + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert len(r.headers) == 2 + assert r.headers[0] == (b"host", b"example.org") + assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive"), + ] diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_headers.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_headers.py new file mode 100644 index 00000000..ba53d088 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_headers.py @@ -0,0 +1,157 @@ +import pytest + +from .._events import Request +from .._headers import ( + get_comma_header, + has_expect_100_continue, + Headers, + normalize_and_validate, + set_comma_header, +) +from .._util import LocalProtocolError + + +def test_normalize_and_validate() -> None: + assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] + assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] + + # no leading/trailing whitespace in names + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo ", "bar")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b" foo", "bar")]) + + # no weird characters in names + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([(b"foo bar", b"baz")]) + assert "foo bar" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x00bar", b"baz")]) + # Not even 8-bit characters: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\xffbar", b"baz")]) + # And not even the control characters we allow in values: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x01bar", b"baz")]) + + # no return or NUL characters in values + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("foo", "bar\rbaz")]) + assert "bar\\rbaz" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\nbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\x00baz")]) + # no leading/trailing whitespace + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz ")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", " barbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz\t")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "\tbarbaz")]) + + # content-length + assert normalize_and_validate([("Content-Length", "1")]) == [ + (b"content-length", b"1") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "asdf")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1x")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) + assert normalize_and_validate( + [("Content-Length", "0"), ("Content-Length", "0")] + ) == [(b"content-length", b"0")] + assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ + (b"content-length", b"0") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate( + [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] + ) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1 , 1,2")]) + + # transfer-encoding + assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ + (b"transfer-encoding", b"chunked") + ] + assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ + (b"transfer-encoding", b"chunked") + ] + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("Transfer-Encoding", "gzip")]) + assert excinfo.value.error_status_hint == 501 # Not Implemented + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate( + [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] + ) + assert excinfo.value.error_status_hint == 501 # Not Implemented + + +def test_get_set_comma_header() -> None: + headers = normalize_and_validate( + [ + ("Connection", "close"), + ("whatever", "something"), + ("connectiON", "fOo,, , BAR"), + ] + ) + + assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] + + headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore + + with pytest.raises(LocalProtocolError): + set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"whatever", b"something"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + ] + + headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + (b"whatever", b"different thing"), + ] + + +def test_has_100_continue() -> None: + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + ) + ) + assert not has_expect_100_continue( + Request(method="GET", target="/", headers=[("Host", "example.com")]) + ) + # Case insensitive + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-Continue")], + ) + ) + # Doesn't work in HTTP/1.0 + assert not has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + http_version="1.0", + ) + ) diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_helpers.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_helpers.py new file mode 100644 index 00000000..c329c767 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_helpers.py @@ -0,0 +1,32 @@ +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .helpers import normalize_data_events + + +def test_normalize_data_events() -> None: + assert normalize_data_events( + [ + Data(data=bytearray(b"1")), + Data(data=b"2"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"3"), + Data(data=b"4"), + EndOfMessage(), + Data(data=b"5"), + Data(data=b"6"), + Data(data=b"7"), + ] + ) == [ + Data(data=b"12"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"34"), + EndOfMessage(), + Data(data=b"567"), + ] diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_io.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_io.py new file mode 100644 index 00000000..2b47c0ea --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_io.py @@ -0,0 +1,572 @@ +from typing import Any, Callable, Generator, List + +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._headers import Headers, normalize_and_validate +from .._readers import ( + _obsolete_line_fold, + ChunkedReader, + ContentLengthReader, + Http10Reader, + READERS, +) +from .._receivebuffer import ReceiveBuffer +from .._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError +from .._writers import ( + ChunkedWriter, + ContentLengthWriter, + Http10Writer, + write_any_response, + write_headers, + write_request, + WRITERS, +) +from .helpers import normalize_data_events + +SIMPLE_CASES = [ + ( + (CLIENT, IDLE), + Request( + method="GET", + target="/a", + headers=[("Host", "foo"), ("Connection", "close")], + ), + b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), + b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] + b"HTTP/1.1 200 OK\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse( + status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" + ), + b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] + b"HTTP/1.1 101 Upgrade\r\n\r\n", + ), +] + + +def dowrite(writer: Callable[..., None], obj: Any) -> bytes: + got_list: List[bytes] = [] + writer(obj, got_list.append) + return b"".join(got_list) + + +def tw(writer: Any, obj: Any, expected: Any) -> None: + got = dowrite(writer, obj) + assert got == expected + + +def makebuf(data: bytes) -> ReceiveBuffer: + buf = ReceiveBuffer() + buf += data + return buf + + +def tr(reader: Any, data: bytes, expected: Any) -> None: + def check(got: Any) -> None: + assert got == expected + # Headers should always be returned as bytes, not e.g. bytearray + # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 + for name, value in getattr(got, "headers", []): + assert type(name) is bytes + assert type(value) is bytes + + # Simple: consume whole thing + buf = makebuf(data) + check(reader(buf)) + assert not buf + + # Incrementally growing buffer + buf = ReceiveBuffer() + for i in range(len(data)): + assert reader(buf) is None + buf += data[i : i + 1] + check(reader(buf)) + + # Trailing data + buf = makebuf(data) + buf += b"trailing" + check(reader(buf)) + assert bytes(buf) == b"trailing" + + +def test_writers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tw(WRITERS[role, state], event, binary) + + +def test_readers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tr(READERS[role, state], binary, event) + + +def test_writers_unusual() -> None: + # Simple test of the write_headers utility routine + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("baz", "quux")]), + b"foo: bar\r\nbaz: quux\r\n\r\n", + ) + tw(write_headers, Headers([]), b"\r\n") + + # We understand HTTP/1.0, but we don't speak it + with pytest.raises(LocalProtocolError): + tw( + write_request, + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Connection", "close")], + http_version="1.0", + ), + None, + ) + with pytest.raises(LocalProtocolError): + tw( + write_any_response, + Response( + status_code=200, headers=[("Connection", "close")], http_version="1.0" + ), + None, + ) + + +def test_readers_unusual() -> None: + # Reading HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[("Some", "header")], + http_version="1.0", + ), + ) + + # check no-headers, since it's only legal with HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\n\r\n", + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", + Response( + status_code=200, + headers=[("Some", "header")], + http_version="1.0", + reason=b"OK", + ), + ) + + # single-character header values (actually disallowed by the ABNF in RFC + # 7230 -- this is a bug in the standard that we originally copied...) + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", + Response( + status_code=200, + headers=[("Foo", "a a a a a")], + http_version="1.0", + reason=b"OK", + ), + ) + + # Empty headers -- also legal + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + # Tolerate broken servers that leave off the response code + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" + ), + ) + + # Tolerate headers line endings (\r\n and \n) + # \n\r\b between headers and body + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader", "val")], + http_version="1.1", + reason="OK", + ), + ) + + # delimited only with \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # mixed \r\n and \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # obsolete line folding + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Some: multi-line\r\n" + b" header\r\n" + b"\tnonsense\r\n" + b" \t \t\tI guess\r\n" + b"Connection: close\r\n" + b"More-nonsense: in the\r\n" + b" last header \r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "example.com"), + ("Some", "multi-line header nonsense I guess"), + ("Connection", "close"), + ("More-nonsense", "in the last header"), + ], + ), + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", + None, + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) + + +def test__obsolete_line_fold_bytes() -> None: + # _obsolete_line_fold has a defensive cast to bytearray, which is + # necessary to protect against O(n^2) behavior in case anyone ever passes + # in regular bytestrings... but right now we never pass in regular + # bytestrings. so this test just exists to get some coverage on that + # defensive cast. + assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ + b"aaa", + bytearray(b"bbb ccc"), + b"ddd", + ] + + +def _run_reader_iter( + reader: Any, buf: bytes, do_eof: bool +) -> Generator[Any, None, None]: + while True: + event = reader(buf) + if event is None: + break + yield event + # body readers have undefined behavior after returning EndOfMessage, + # because this changes the state so they don't get called again + if type(event) is EndOfMessage: + break + if do_eof: + assert not buf + yield reader.read_eof() + + +def _run_reader(*args: Any) -> List[Event]: + events = list(_run_reader_iter(*args)) + return normalize_data_events(events) + + +def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: + # Simple: consume whole thing + print("Test 1") + buf = makebuf(data) + assert _run_reader(thunk(), buf, do_eof) == expected + + # Incrementally growing buffer + print("Test 2") + reader = thunk() + buf = ReceiveBuffer() + events = [] + for i in range(len(data)): + events += _run_reader(reader, buf, False) + buf += data[i : i + 1] + events += _run_reader(reader, buf, do_eof) + assert normalize_data_events(events) == expected + + is_complete = any(type(event) is EndOfMessage for event in expected) + if is_complete and not do_eof: + buf = makebuf(data + b"trailing") + assert _run_reader(thunk(), buf, False) == expected + + +def test_ContentLengthReader() -> None: + t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) + + t_body_reader( + lambda: ContentLengthReader(10), + b"0123456789", + [Data(data=b"0123456789"), EndOfMessage()], + ) + + +def test_Http10Reader() -> None: + t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) + t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) + t_body_reader( + Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True + ) + + +def test_ChunkedReader() -> None: + t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) + + t_body_reader( + ChunkedReader, + b"0\r\nSome: header\r\n\r\n", + [EndOfMessage(headers=[("Some", "header")])], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + + b"10\r\n0123456789abcdef\r\n" + + b"0\r\n" + + b"Some: header\r\n\r\n", + [ + Data(data=b"012340123456789abcdef"), + EndOfMessage(headers=[("Some", "header")]), + ], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", + [Data(data=b"012340123456789abcdef"), EndOfMessage()], + ) + + # handles upper and lowercase hex + t_body_reader( + ChunkedReader, + b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", + [Data(data=b"x" * 0xAA), EndOfMessage()], + ) + + # refuses arbitrarily long chunk integers + with pytest.raises(LocalProtocolError): + # Technically this is legal HTTP/1.1, but we refuse to process chunk + # sizes that don't fit into 20 characters of hex + t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) + + # refuses garbage in the chunk count + with pytest.raises(LocalProtocolError): + t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) + + # handles (and discards) "chunk extensions" omg wtf + t_body_reader( + ChunkedReader, + b"5; hello=there\r\n" + + b"xxxxx" + + b"\r\n" + + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', + [Data(data=b"xxxxx"), EndOfMessage()], + ) + + t_body_reader( + ChunkedReader, + b"5 \r\n01234\r\n" + b"0\r\n\r\n", + [Data(data=b"01234"), EndOfMessage()], + ) + + +def test_ContentLengthWriter() -> None: + w = ContentLengthWriter(5) + assert dowrite(w, Data(data=b"123")) == b"123" + assert dowrite(w, Data(data=b"45")) == b"45" + assert dowrite(w, EndOfMessage()) == b"" + + w = ContentLengthWriter(5) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"123456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage()) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) == b"123" + dowrite(w, Data(data=b"45")) == b"45" + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_ChunkedWriter() -> None: + w = ChunkedWriter() + assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" + assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" + + assert dowrite(w, Data(data=b"")) == b"" + + assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" + + assert ( + dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) + == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" + ) + + +def test_Http10Writer() -> None: + w = Http10Writer() + assert dowrite(w, Data(data=b"1234")) == b"1234" + assert dowrite(w, EndOfMessage()) == b"" + + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_reject_garbage_after_request_line() -> None: + with pytest.raises(LocalProtocolError): + tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) + + +def test_reject_garbage_after_response_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", + None, + ) + + +def test_reject_garbage_in_header_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", + None, + ) + + +def test_reject_non_vchar_in_path() -> None: + for bad_char in b"\x00\x20\x7f\xee": + message = bytearray(b"HEAD /") + message.append(bad_char) + message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], message, None) + + +# https://github.com/python-hyper/h11/issues/57 +def test_allow_some_garbage_in_cookies() -> None: + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: foo\r\n" + b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" + b"\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "foo"), + ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), + ], + ), + ) + + +def test_host_comes_first() -> None: + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), + b"Host: example.com\r\nfoo: bar\r\n\r\n", + ) diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_receivebuffer.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_receivebuffer.py new file mode 100644 index 00000000..21a3870b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_receivebuffer.py @@ -0,0 +1,135 @@ +import re +from typing import Tuple + +import pytest + +from .._receivebuffer import ReceiveBuffer + + +def test_receivebuffer() -> None: + b = ReceiveBuffer() + assert not b + assert len(b) == 0 + assert bytes(b) == b"" + + b += b"123" + assert b + assert len(b) == 3 + assert bytes(b) == b"123" + + assert bytes(b) == b"123" + + assert b.maybe_extract_at_most(2) == b"12" + assert b + assert len(b) == 1 + assert bytes(b) == b"3" + + assert bytes(b) == b"3" + + assert b.maybe_extract_at_most(10) == b"3" + assert bytes(b) == b"" + + assert b.maybe_extract_at_most(10) is None + assert not b + + ################################################################ + # maybe_extract_until_next + ################################################################ + + b += b"123\n456\r\n789\r\n" + + assert b.maybe_extract_next_line() == b"123\n456\r\n" + assert bytes(b) == b"789\r\n" + + assert b.maybe_extract_next_line() == b"789\r\n" + assert bytes(b) == b"" + + b += b"12\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r" + + b += b"345\n\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r345\n\r" + + # here we stopped at the middle of b"\r\n" delimiter + + b += b"\n6789aaa123\r\n" + assert b.maybe_extract_next_line() == b"12\r345\n\r\n" + assert b.maybe_extract_next_line() == b"6789aaa123\r\n" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"" + + ################################################################ + # maybe_extract_lines + ################################################################ + + b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" + lines = b.maybe_extract_lines() + assert lines == [b"123", b"a: b", b"foo:bar"] + assert bytes(b) == b"trailing" + + assert b.maybe_extract_lines() is None + + b += b"\r\n\r" + assert b.maybe_extract_lines() is None + + assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" + assert not b + + # Empty body case (as happens at the end of chunked encoding if there are + # no trailing headers, e.g.) + b += b"\r\ntrailing" + assert b.maybe_extract_lines() == [] + assert bytes(b) == b"trailing" + + +@pytest.mark.parametrize( + "data", + [ + pytest.param( + ( + b"HTTP/1.1 200 OK\r\n", + b"Content-type: text/plain\r\n", + b"Connection: close\r\n", + b"\r\n", + b"Some body", + ), + id="with_crlf_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_lf_only_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\r\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_mixed_crlf_and_lf", + ), + ], +) +def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: + b = ReceiveBuffer() + + for line in data: + b += line + + lines = b.maybe_extract_lines() + + assert lines == [ + b"HTTP/1.1 200 OK", + b"Content-type: text/plain", + b"Connection: close", + ] + assert bytes(b) == b"Some body" diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_state.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_state.py new file mode 100644 index 00000000..bc974e63 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_state.py @@ -0,0 +1,271 @@ +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + CLOSED, + ConnectionState, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError + + +def test_ConnectionState() -> None: + cs = ConnectionState() + + # Basic event-triggered transitions + + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + cs.process_event(CLIENT, Request) + # The SERVER-Request special case: + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # Illegal transitions raise an error and nothing happens + with pytest.raises(LocalProtocolError): + cs.process_event(CLIENT, Request) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: DONE, SERVER: DONE} + + # State-triggered transition + + cs.process_event(SERVER, ConnectionClosed) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + +def test_ConnectionState_keep_alive() -> None: + # keep_alive = False + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_ConnectionState_keep_alive_in_DONE() -> None: + # Check that if keep_alive is disabled when the CLIENT is already in DONE, + # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE + # transition + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states[CLIENT] is DONE + cs.process_keep_alive_disabled() + assert cs.states[CLIENT] is MUST_CLOSE + + +def test_ConnectionState_switch_denied() -> None: + for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): + for deny_early in (True, False): + cs = ConnectionState() + cs.process_client_switch_proposal(switch_type) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + assert switch_type in cs.pending_switch_proposals + + if deny_early: + # before client reaches DONE + cs.process_event(SERVER, Response) + assert not cs.pending_switch_proposals + + cs.process_event(CLIENT, EndOfMessage) + + if deny_early: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + assert not cs.pending_switch_proposals + + +_response_type_for_switch = { + _SWITCH_UPGRADE: InformationalResponse, + _SWITCH_CONNECT: Response, + None: Response, +} + + +def test_ConnectionState_protocol_switch_accepted() -> None: + for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(switch_event) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_double_protocol_switch() -> None: + # CONNECT + Upgrade is legal! Very silly, but legal. So we support + # it. Because sometimes doing the silly thing is easier than not. + for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_client_switch_proposal(_SWITCH_CONNECT) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + cs.process_event( + SERVER, _response_type_for_switch[server_switch], server_switch + ) + if server_switch is None: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_inconsistent_protocol_switch() -> None: + for client_switches, server_switch in [ + ([], _SWITCH_CONNECT), + ([], _SWITCH_UPGRADE), + ([_SWITCH_UPGRADE], _SWITCH_CONNECT), + ([_SWITCH_CONNECT], _SWITCH_UPGRADE), + ]: + cs = ConnectionState() + for client_switch in client_switches: # type: ignore[attr-defined] + cs.process_client_switch_proposal(client_switch) + cs.process_event(CLIENT, Request) + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Response, server_switch) + + +def test_ConnectionState_keepalive_protocol_switch_interaction() -> None: + # keep_alive=False + pending_switch_proposals + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # the protocol switch "wins" + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + # but when the server denies the request, keep_alive comes back into play + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} + + +def test_ConnectionState_reuse() -> None: + cs = ConnectionState() + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + # No keepalive + + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # One side closed + + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(CLIENT, ConnectionClosed) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Succesful protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Failed protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + +def test_server_request_is_illegal() -> None: + # There used to be a bug in how we handled the Request special case that + # made this allowed... + cs = ConnectionState() + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Request) diff --git a/Backend/venv/lib/python3.12/site-packages/h11/tests/test_util.py b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_util.py new file mode 100644 index 00000000..79bc0951 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/h11/tests/test_util.py @@ -0,0 +1,112 @@ +import re +import sys +import traceback +from typing import NoReturn + +import pytest + +from .._util import ( + bytesify, + LocalProtocolError, + ProtocolError, + RemoteProtocolError, + Sentinel, + validate, +) + + +def test_ProtocolError() -> None: + with pytest.raises(TypeError): + ProtocolError("abstract base class") + + +def test_LocalProtocolError() -> None: + try: + raise LocalProtocolError("foo") + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 400 + + try: + raise LocalProtocolError("foo", error_status_hint=418) + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 418 + + def thunk() -> NoReturn: + raise LocalProtocolError("a", error_status_hint=420) + + try: + try: + thunk() + except LocalProtocolError as exc1: + orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + exc1._reraise_as_remote_protocol_error() + except RemoteProtocolError as exc2: + assert type(exc2) is RemoteProtocolError + assert exc2.args == ("a",) + assert exc2.error_status_hint == 420 + new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + assert new_traceback.endswith(orig_traceback) + + +def test_validate() -> None: + my_re = re.compile(rb"(?P[0-9]+)\.(?P[0-9]+)") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.") + + groups = validate(my_re, b"0.1") + assert groups == {"group1": b"0", "group2": b"1"} + + # successful partial matches are an error - must match whole string + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1xx") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1\n") + + +def test_validate_formatting() -> None: + my_re = re.compile(rb"foo") + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops") + assert "oops" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {}") + assert "oops {}" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {} xx", 10) + assert "oops 10 xx" in str(excinfo.value) + + +def test_make_sentinel() -> None: + class S(Sentinel, metaclass=Sentinel): + pass + + assert repr(S) == "S" + assert S == S + assert type(S).__name__ == "S" + assert S in {S} + assert type(S) is S + + class S2(Sentinel, metaclass=Sentinel): + pass + + assert repr(S2) == "S2" + assert S != S2 + assert S not in {S2} + assert type(S) is not type(S2) + + +def test_bytesify() -> None: + assert bytesify(b"123") == b"123" + assert bytesify(bytearray(b"123")) == b"123" + assert bytesify("123") == b"123" + + with pytest.raises(UnicodeEncodeError): + bytesify("\u1234") + + with pytest.raises(TypeError): + bytesify(10) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/LICENSE.md b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/LICENSE.md new file mode 100644 index 00000000..311b2b56 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/METADATA new file mode 100644 index 00000000..3bcd8aee --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/METADATA @@ -0,0 +1,542 @@ +Metadata-Version: 2.1 +Name: httpcore +Version: 0.17.3 +Summary: A minimal low-level HTTP client. +Home-page: https://github.com/encode/httpcore +Author: Tom Christie +Author-email: tom@tomchristie.com +License: BSD +Project-URL: Documentation, https://www.encode.io/httpcore +Project-URL: Source, https://github.com/encode/httpcore +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE.md +Requires-Dist: h11 (<0.15,>=0.13) +Requires-Dist: sniffio (==1.*) +Requires-Dist: anyio (<5.0,>=3.0) +Requires-Dist: certifi +Provides-Extra: http2 +Requires-Dist: h2 (<5,>=3) ; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio (==1.*) ; extra == 'socks' + +# HTTP Core + +[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) +[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) + +> *Do one thing, and do it well.* + +The HTTP Core package provides a minimal low-level HTTP client, which does +one thing only. Sending HTTP requests. + +It does not provide any high level model abstractions over the API, +does not handle redirects, multipart uploads, building authentication headers, +transparent HTTP caching, URL parsing, session cookie handling, +content or charset decoding, handling JSON, environment based configuration +defaults, or any of that Jazz. + +Some things HTTP Core does do: + +* Sending HTTP requests. +* Thread-safe / task-safe connection pooling. +* HTTP(S) proxy & SOCKS proxy support. +* Supports HTTP/1.1 and HTTP/2. +* Provides both sync and async interfaces. +* Async backend support for `asyncio` and `trio`. + +## Requirements + +Python 3.7+ + +## Installation + +For HTTP/1.1 only support, install with: + +```shell +$ pip install httpcore +``` + +For HTTP/1.1 and HTTP/2 support, install with: + +```shell +$ pip install httpcore[http2] +``` + +For SOCKS proxy support, install with: + +```shell +$ pip install httpcore[socks] +``` + +# Sending requests + +Send an HTTP request: + +```python +import httpcore + +response = httpcore.request("GET", "https://www.example.com/") + +print(response) +# +print(response.status) +# 200 +print(response.headers) +# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] +print(response.content) +# b'\n\n\nExample Domain\n\n\n ...' +``` + +The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. + +```python +import httpcore + +http = httpcore.ConnectionPool() +response = http.request("GET", "https://www.example.com/") +``` + +Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). + +## Motivation + +You *probably* don't want to be using HTTP Core directly. It might make sense if +you're writing something like a proxy service in Python, and you just want +something at the lowest possible level, but more typically you'll want to use +a higher level client library, such as `httpx`. + +The motivation for `httpcore` is: + +* To provide a reusable low-level client library, that other packages can then build on top of. +* To provide a *really clear interface split* between the networking code and client logic, + so that each is easier to understand and reason about in isolation. + + +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## 0.17.3 (5th July 2023) + +- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) +- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) +- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) +- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) + +## 0.17.2 (May 23th, 2023) + +- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) +- Improve logging with per-module logger names. (#690) +- Add `sni_hostname` request extension. (#696) +- Resolve race condition during import of `anyio` package. (#692) +- Enable TCP_NODELAY for all synchronous sockets. (#651) + +## 0.17.1 (May 17th, 2023) + +- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) +- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) +- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) +- Fix edge-case exception when removing requests from the connection pool. (#680) +- Fix pool timeout edge-case. (#688) + +## 0.17.0 (March 16th, 2023) + +- Add DEBUG level logging. (#648) +- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) +- Increase the allowable HTTP header size to 100kB. (#647) +- Add `retries` option to SOCKS proxy classes. (#643) + +## 0.16.3 (December 20th, 2022) + +- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) +- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) +- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) +- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) + +## 0.16.2 (November 25th, 2022) + +- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) +- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) + +## 0.16.1 (November 17th, 2022) + +- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) + +## 0.16.0 (October 11th, 2022) + +- Support HTTP/1.1 informational responses. (#581) +- Fix async cancellation behaviour. (#580) +- Support `h11` 0.14. (#579) + +## 0.15.0 (May 17th, 2022) + +- Drop Python 3.6 support (#535) +- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) +- Switch to explicit `typing.Optional` for type hints. (#513) +- For `trio` map OSError exceptions to `ConnectError`. (#543) + +## 0.14.7 (February 4th, 2022) + +- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) +- Fix AttributeError that happened when Socks5Connection were terminated. (#501) + +## 0.14.6 (February 1st, 2022) + +- Fix SOCKS support for `http://` URLs. (#492) +- Resolve race condition around exceptions during streaming a response. (#491) + +## 0.14.5 (January 18th, 2022) + +- SOCKS proxy support. (#478) +- Add proxy_auth argument to HTTPProxy. (#481) +- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) + +## 0.14.4 (January 5th, 2022) + +- Support HTTP/2 on HTTPS tunnelling proxies. (#468) +- Fix proxy headers missing on HTTP forwarding. (#456) +- Only instantiate SSL context if required. (#457) +- More robust HTTP/2 handling. (#253, #439, #440, #441) + +## 0.14.3 (November 17th, 2021) + +- Fix race condition when removing closed connections from the pool. (#437) + +## 0.14.2 (November 16th, 2021) + +- Failed connections no longer remain in the pool. (Pull #433) + +## 0.14.1 (November 12th, 2021) + +- `max_connections` becomes optional. (Pull #429) +- `certifi` is now included in the install dependancies. (Pull #428) +- `h2` is now strictly optional. (Pull #428) + +## 0.14.0 (November 11th, 2021) + +The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. + +Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. + +See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. + +There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... + +* Log the point at which the connection is established, and the IP/port on which it is made. +* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) +* Log SSL version info / certificate info. + +Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. + +## 0.13.7 (September 13th, 2021) + +- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) + +## 0.13.6 (June 15th, 2021) + +### Fixed + +- Close sockets when read or write timeouts occur. (Pull #365) + +## 0.13.5 (June 14th, 2021) + +### Fixed + +- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) + +## 0.13.4 (June 9th, 2021) + +### Added + +- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) + +### Fixed + +- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). + +## 0.13.3 (May 6th, 2021) + +### Added + +- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) + +### Fixed + +- Handle cases where environment does not provide `select.poll` support. (Pull #331) + +## 0.13.2 (April 29th, 2021) + +### Added + +- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) + +## 0.13.1 (April 28th, 2021) + +### Fixed + +- More resiliant testing for closed connections. (Pull #311) +- Don't raise exceptions on ungraceful connection closes. (Pull #310) + +## 0.13.0 (April 21st, 2021) + +The 0.13 release updates the core API in order to match the HTTPX Transport API, +introduced in HTTPX 0.18 onwards. + +An example of making requests with the new interface is: + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +### Changed + +- The `.request()` method is now `handle_request()`. (Pull #296) +- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) +- The `headers` argument is no longer optional. (Pull #296) +- The `stream` argument is no longer optional. (Pull #296) +- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) +- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) +- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) +- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) + +### Added + +- Streams now support a `.read()` interface. (Pull #296) + +### Fixed + +- Task cancellation no longer leaks connections from the connection pool. (Pull #305) + +## 0.12.3 (December 7th, 2020) + +### Fixed + +- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) +- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) +- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) +- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) +- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) + +## 0.12.2 (November 20th, 2020) + +### Fixed + +- Properly wrap connect errors on the asyncio backend. (Pull #235) +- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) + +## 0.12.1 (November 7th, 2020) + +### Added + +- Add connect retries. (Pull #221) + +### Fixed + +- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) +- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) +- Properly wrap OS errors when using `trio`. (Pull #225) + +## 0.12.0 (October 6th, 2020) + +### Changed + +- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) + +### Added + +- Add Python 3.9 to officially supported versions. + +### Fixed + +- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) + +## 0.11.1 (September 28nd, 2020) + +### Fixed + +- Add await to async semaphore release() coroutine (#197) +- Drop incorrect curio classifier (#192) + +## 0.11.0 (September 22nd, 2020) + +The Transport API with 0.11.0 has a couple of significant changes. + +Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features +such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. + +The interface changes from: + +```python +def request(method, url, headers, stream, timeout): + return (http_version, status_code, reason, headers, stream) +``` + +To instead including an optional dictionary of extensions on the request and response: + +```python +def request(method, url, headers, stream, ext): + return (status_code, headers, stream, ext) +``` + +Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. + +In particular: + +* Trailing headers support. +* HTTP/2 Server Push +* sendfile. +* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. +* Exposing debug information out of the API, including template name, template context. + +Currently extensions are limited to: + +* request: `timeout` - Optional. Timeout dictionary. +* response: `http_version` - Optional. Include the HTTP version used on the response. +* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. + +See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. + +Secondly, the async version of `request` is now namespaced as `arequest`. + +This allows concrete transports to support both sync and async implementations on the same class. + +### Added + +- Add curio support. (Pull #168) +- Add anyio support, with `backend="anyio"`. (Pull #169) + +### Changed + +- Update the Transport API to use 'ext' for optional extensions. (Pull #190) +- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) + +## 0.10.2 (August 20th, 2020) + +### Added + +- Added Unix Domain Socket support. (Pull #139) + +### Fixed + +- Always include the port on proxy CONNECT requests. (Pull #154) +- Fix `max_keepalive_connections` configuration. (Pull #153) +- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) + +## 0.10.1 (August 7th, 2020) + +- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. + +## 0.10.0 (August 7th, 2020) + +The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. + +Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. + +### Added + +- HTTP/2 support becomes optional. (Pull #121, #130) +- Add `local_address=...` support. (Pull #100, #134) +- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) +- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) +- Add `UnsupportedProtocol` exception. (Pull #128) +- Add `.get_connection_info()` method. (Pull #102, #137) +- Add better TRACE logs. (Pull #101) + +### Changed + +- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) + +### Fixed + +- Improve handling of server disconnects. (Pull #112) + +## 0.9.1 (May 27th, 2020) + +### Fixed + +- Proper host resolution for sync case, including IPv6 support. (Pull #97) +- Close outstanding connections when connection pool is closed. (Pull #98) + +## 0.9.0 (May 21th, 2020) + +### Changed + +- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) + +### Fixed + +- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) +- Remove incorrect debug log. (Pull #83) + +## 0.8.4 (May 11th, 2020) + +### Added + +- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull #79) + +### Fixed + +- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) + +## 0.8.3 (May 6rd, 2020) + +### Fixed + +- Include `Host` and `Accept` headers on proxy "CONNECT" requests. +- De-duplicate any headers also contained in proxy_headers. +- HTTP/2 flag not being passed down to proxy connections. + +## 0.8.2 (May 3rd, 2020) + +### Fixed + +- Fix connections using proxy forwarding requests not being added to the +connection pool properly. (Pull #70) + +## 0.8.1 (April 30th, 2020) + +### Changed + +- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. + +## 0.8.0 (April 30th, 2020) + +### Fixed + +- Fixed tunnel proxy support. + +### Added + +- New `TimeoutException` base class. + +## 0.7.0 (March 5th, 2020) + +- First integration with HTTPX. diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/RECORD new file mode 100644 index 00000000..8f8da5d5 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/RECORD @@ -0,0 +1,69 @@ +httpcore-0.17.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpcore-0.17.3.dist-info/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 +httpcore-0.17.3.dist-info/METADATA,sha256=FXYdgFJ2kxh_T0yVw4qIdD031yF4wtYjTlU0TLrNjIk,18594 +httpcore-0.17.3.dist-info/RECORD,, +httpcore-0.17.3.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +httpcore-0.17.3.dist-info/top_level.txt,sha256=kYeSB6l1hBNp7JwgSwLajcsxRlrSCVKOhYKSkdgx798,59 +httpcore/__init__.py,sha256=Dza2gJlD90bgsFlu61Fo9RpTqTj7-mxGdJVA1X-MG_U,3338 +httpcore/__pycache__/__init__.cpython-312.pyc,, +httpcore/__pycache__/_api.cpython-312.pyc,, +httpcore/__pycache__/_exceptions.cpython-312.pyc,, +httpcore/__pycache__/_models.cpython-312.pyc,, +httpcore/__pycache__/_ssl.cpython-312.pyc,, +httpcore/__pycache__/_synchronization.cpython-312.pyc,, +httpcore/__pycache__/_trace.cpython-312.pyc,, +httpcore/__pycache__/_utils.cpython-312.pyc,, +httpcore/_api.py,sha256=IBR18qZQ8ETcghJXC1Gd-30WuKYRS0EyF2eC80_OBQ8,3167 +httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 +httpcore/_async/__pycache__/__init__.cpython-312.pyc,, +httpcore/_async/__pycache__/connection.cpython-312.pyc,, +httpcore/_async/__pycache__/connection_pool.cpython-312.pyc,, +httpcore/_async/__pycache__/http11.cpython-312.pyc,, +httpcore/_async/__pycache__/http2.cpython-312.pyc,, +httpcore/_async/__pycache__/http_proxy.cpython-312.pyc,, +httpcore/_async/__pycache__/interfaces.cpython-312.pyc,, +httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc,, +httpcore/_async/connection.py,sha256=0LKFUXPkxusvJAUyHSJpy4mMkgf71BtOjtlaMBL4sUs,8420 +httpcore/_async/connection_pool.py,sha256=hj1viqcWZivNmoRu-QZjyuOvAFx3-Ae2rMpuK6OZhEM,14305 +httpcore/_async/http11.py,sha256=z58glbEF4YrDM03KVHkuNXNRpAQaJQ4qyblapA-mk4o,11968 +httpcore/_async/http2.py,sha256=KXwWZxZ-43vxIWzr1aTLErhaCodDzFr-XAvzc4fUb10,23879 +httpcore/_async/http_proxy.py,sha256=6jdp87k6_iNCAaM7bJF8wOw_4mX_xrXGU_c4qDjJxLk,13999 +httpcore/_async/interfaces.py,sha256=J2iq9rs7x3nKS6iCfntjHY0Woast6V_HuXuE8rs3HmA,4486 +httpcore/_async/socks_proxy.py,sha256=7tFg_GuAL6WoV5-emaBaiDEmZBHdVODaQXd7nkOoGC8,13810 +httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_backends/__pycache__/__init__.cpython-312.pyc,, +httpcore/_backends/__pycache__/anyio.cpython-312.pyc,, +httpcore/_backends/__pycache__/auto.cpython-312.pyc,, +httpcore/_backends/__pycache__/base.cpython-312.pyc,, +httpcore/_backends/__pycache__/mock.cpython-312.pyc,, +httpcore/_backends/__pycache__/sync.cpython-312.pyc,, +httpcore/_backends/__pycache__/trio.cpython-312.pyc,, +httpcore/_backends/anyio.py,sha256=mU8gtunBSLxESGkU0Iy1ZMgumDlAeMkwBjFE3kZiCnc,5208 +httpcore/_backends/auto.py,sha256=8r0ipGxSwXoCb_xKQAyRwL1UzfXVbO4Ee2y8vYQv3Ic,1654 +httpcore/_backends/base.py,sha256=Qsb8b_PSiVP1ldHHGXHxQzJ1Qlzj2r8KR9KQeANkSbE,3218 +httpcore/_backends/mock.py,sha256=S4IADhC6kE22ge_jR_WHlEUkD6QAsXnwz26DSWZLcG4,4179 +httpcore/_backends/sync.py,sha256=Q2skeGyuAt6ETqPjZkiw-iUU0zh_nFXvCFkrsT-Y9GI,4444 +httpcore/_backends/trio.py,sha256=INOeHEkA8pO6AsSqjColWcayM0FQSyGi1hpaQghjrCs,6078 +httpcore/_exceptions.py,sha256=7zb3KNiG0qmfUNIdFgdaUSbn2Pu3oztghi6Vg7i-LJU,1185 +httpcore/_models.py,sha256=1aM8l5D3CbP5QKXCBsdzAWVCHSm0t7UVrCNVTaXUPI8,16343 +httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 +httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 +httpcore/_sync/__pycache__/__init__.cpython-312.pyc,, +httpcore/_sync/__pycache__/connection.cpython-312.pyc,, +httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc,, +httpcore/_sync/__pycache__/http11.cpython-312.pyc,, +httpcore/_sync/__pycache__/http2.cpython-312.pyc,, +httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc,, +httpcore/_sync/__pycache__/interfaces.cpython-312.pyc,, +httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc,, +httpcore/_sync/connection.py,sha256=8IOzYLwK8_GuUPz9fF3z0EARb-ueGeKW6ZDXRPdNluQ,8209 +httpcore/_sync/connection_pool.py,sha256=1iwYLdiq3pi9LBvpMZ8O8gWdb56qqPlm6rp35zeORBQ,13928 +httpcore/_sync/http11.py,sha256=FTg8wAzMu1kSDjCQqQUXIslJ90aFrWnO6eL459K8SYs,11629 +httpcore/_sync/http2.py,sha256=lkpHesGkrwzIA4oHLyClJf5IAwRLcaAFMnmffAahAK4,23343 +httpcore/_sync/http_proxy.py,sha256=PcTIz3XuYT3rKvdaruAtH5W7EQvjofOcUHTv9YXiOc0,13761 +httpcore/_sync/interfaces.py,sha256=EM4PTf-rgkclzisFcrTyx1G8FwraoffE8rbckOznX_o,4365 +httpcore/_sync/socks_proxy.py,sha256=BLRF27DHvsfpdZ7WVzK3Ba3vxN6zk0iD_3xRCzDt-2Q,13595 +httpcore/_synchronization.py,sha256=_d_vHqylvzm1Jh58_0G7i-1VwCg3Gu39Cgd4nWASvP0,8751 +httpcore/_trace.py,sha256=akf5PsWVq3rZjqmXniomU59OY37K7JHoeNDCQ4GU84E,3954 +httpcore/_utils.py,sha256=9QPh5ib4JilWX4dBCC_XO6wdBY4b0kbUGgfV3QfBANc,1525 +httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/WHEEL new file mode 100644 index 00000000..1f37c02f --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/top_level.txt new file mode 100644 index 00000000..613e4350 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore-0.17.3.dist-info/top_level.txt @@ -0,0 +1,4 @@ +httpcore +httpcore/_async +httpcore/_backends +httpcore/_sync diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__init__.py b/Backend/venv/lib/python3.12/site-packages/httpcore/__init__.py new file mode 100644 index 00000000..da95f8d0 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/__init__.py @@ -0,0 +1,139 @@ +from ._api import request, stream +from ._async import ( + AsyncConnectionInterface, + AsyncConnectionPool, + AsyncHTTP2Connection, + AsyncHTTP11Connection, + AsyncHTTPConnection, + AsyncHTTPProxy, + AsyncSOCKSProxy, +) +from ._backends.base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) +from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream +from ._backends.sync import SyncBackend +from ._exceptions import ( + ConnectError, + ConnectionNotAvailable, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import URL, Origin, Request, Response +from ._ssl import default_ssl_context +from ._sync import ( + ConnectionInterface, + ConnectionPool, + HTTP2Connection, + HTTP11Connection, + HTTPConnection, + HTTPProxy, + SOCKSProxy, +) + +# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. +try: + from ._backends.anyio import AnyIOBackend +except ImportError: # pragma: nocover + + class AnyIOBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = ( + "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." + ) + raise RuntimeError(msg) + + +# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. +try: + from ._backends.trio import TrioBackend +except ImportError: # pragma: nocover + + class TrioBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." + raise RuntimeError(msg) + + +__all__ = [ + # top-level requests + "request", + "stream", + # models + "Origin", + "URL", + "Request", + "Response", + # async + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", + # sync + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", + # network backends, implementations + "SyncBackend", + "AnyIOBackend", + "TrioBackend", + # network backends, mock implementations + "AsyncMockBackend", + "AsyncMockStream", + "MockBackend", + "MockStream", + # network backends, interface + "AsyncNetworkStream", + "AsyncNetworkBackend", + "NetworkStream", + "NetworkBackend", + # util + "default_ssl_context", + "SOCKET_OPTION", + # exceptions + "ConnectionNotAvailable", + "ProxyError", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "UnsupportedProtocol", + "TimeoutException", + "PoolTimeout", + "ConnectTimeout", + "ReadTimeout", + "WriteTimeout", + "NetworkError", + "ConnectError", + "ReadError", + "WriteError", +] + +__version__ = "0.17.3" + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..41b78124 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc new file mode 100644 index 00000000..ce707b9e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 00000000..220706e0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc new file mode 100644 index 00000000..dfa3391a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc new file mode 100644 index 00000000..990abe20 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc new file mode 100644 index 00000000..344ea370 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc new file mode 100644 index 00000000..87b0cd0a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 00000000..fb5150d8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_api.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_api.py new file mode 100644 index 00000000..854235f5 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_api.py @@ -0,0 +1,92 @@ +from contextlib import contextmanager +from typing import Iterator, Optional, Union + +from ._models import URL, Extensions, HeaderTypes, Response +from ._sync.connection_pool import ConnectionPool + + +def request( + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, +) -> Response: + """ + Sends an HTTP request, returning the response. + + ``` + response = httpcore.request("GET", "https://www.example.com/") + ``` + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + return pool.request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + + +@contextmanager +def stream( + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, +) -> Iterator[Response]: + """ + Sends an HTTP request, returning the response within a content manager. + + ``` + with httpcore.stream("GET", "https://www.example.com/") as response: + ... + ``` + + When using the `stream()` function, the body of the response will not be + automatically read. If you want to access the response body you should + either use `content = response.read()`, or `for chunk in response.iter_content()`. + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + with pool.stream( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) as response: + yield response diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py new file mode 100644 index 00000000..88dc7f01 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py @@ -0,0 +1,39 @@ +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .http_proxy import AsyncHTTPProxy +from .interfaces import AsyncConnectionInterface + +try: + from .http2 import AsyncHTTP2Connection +except ImportError: # pragma: nocover + + class AsyncHTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import AsyncSOCKSProxy +except ImportError: # pragma: nocover + + class AsyncSOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", +] diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..56197452 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc new file mode 100644 index 00000000..e302b0c2 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc new file mode 100644 index 00000000..cf79b745 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc new file mode 100644 index 00000000..b6b05422 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc new file mode 100644 index 00000000..43bf257a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc new file mode 100644 index 00000000..bc0ba44e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 00000000..d508b399 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc new file mode 100644 index 00000000..8c2ecb4b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/connection.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/connection.py new file mode 100644 index 00000000..9014ab95 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/connection.py @@ -0,0 +1,215 @@ +import itertools +import logging +import ssl +from types import TracebackType +from typing import Iterable, Iterator, Optional, Type + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> Iterator[float]: + yield 0 + for n in itertools.count(2): + yield factor * (2 ** (n - 2)) + + +class AsyncHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connection: Optional[AsyncConnectionInterface] = None + self._connect_failed: bool = False + self._request_lock = AsyncLock() + self._socket_options = socket_options + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + async with self._request_lock: + if self._connection is None: + try: + stream = await self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + async def _connect(self, request: Request) -> AsyncNetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = await self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + async with Trace("retry", logger, request, kwargs) as trace: + await self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + async def aclose(self) -> None: + if self._connection is not None: + async with Trace("close", logger, None, {}): + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTPConnection": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 00000000..ddc0510e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,356 @@ +import ssl +import sys +from types import TracebackType +from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Request, Response +from .._synchronization import AsyncEvent, AsyncLock, AsyncShieldCancellation +from .connection import AsyncHTTPConnection +from .interfaces import AsyncConnectionInterface, AsyncRequestInterface + + +class RequestStatus: + def __init__(self, request: Request): + self.request = request + self.connection: Optional[AsyncConnectionInterface] = None + self._connection_acquired = AsyncEvent() + + def set_connection(self, connection: AsyncConnectionInterface) -> None: + assert self.connection is None + self.connection = connection + self._connection_acquired.set() + + def unset_connection(self) -> None: + assert self.connection is not None + self.connection = None + self._connection_acquired = AsyncEvent() + + async def wait_for_connection( + self, timeout: Optional[float] = None + ) -> AsyncConnectionInterface: + if self.connection is None: + await self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + +class AsyncConnectionPool(AsyncRequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._pool: List[AsyncConnectionInterface] = [] + self._requests: List[RequestStatus] = [] + self._pool_lock = AsyncLock() + self._network_backend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncHTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> List[AsyncConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._pool) + + async def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool: + """ + Attempt to provide a connection that can handle the given origin. + """ + origin = status.request.url.origin + + # If there are queued requests in front of us, then don't acquire a + # connection. We handle requests strictly in order. + waiting = [s for s in self._requests if s.connection is None] + if waiting and waiting[0] is not status: + return False + + # Reuse an existing connection if one is currently available. + for idx, connection in enumerate(self._pool): + if connection.can_handle_request(origin) and connection.is_available(): + self._pool.pop(idx) + self._pool.insert(0, connection) + status.set_connection(connection) + return True + + # If the pool is currently full, attempt to close one idle connection. + if len(self._pool) >= self._max_connections: + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.is_idle(): + await connection.aclose() + self._pool.pop(idx) + break + + # If the pool is still full, then we cannot acquire a connection. + if len(self._pool) >= self._max_connections: + return False + + # Otherwise create a new connection. + connection = self.create_connection(origin) + self._pool.insert(0, connection) + status.set_connection(connection) + return True + + async def _close_expired_connections(self) -> None: + """ + Clean up the connection pool by closing off any connections that have expired. + """ + # Close any connections that have expired their keep-alive time. + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.has_expired(): + await connection.aclose() + self._pool.pop(idx) + + # If the pool size exceeds the maximum number of allowed keep-alive connections, + # then close off idle connections as required. + pool_size = len(self._pool) + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.is_idle() and pool_size > self._max_keepalive_connections: + await connection.aclose() + self._pool.pop(idx) + pool_size -= 1 + + async def handle_async_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + status = RequestStatus(request) + + async with self._pool_lock: + self._requests.append(status) + await self._close_expired_connections() + await self._attempt_to_acquire_connection(status) + + while True: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + try: + connection = await status.wait_for_connection(timeout=timeout) + except BaseException as exc: + # If we timeout here, or if the task is cancelled, then make + # sure to remove the request from the queue before bubbling + # up the exception. + async with self._pool_lock: + # Ensure only remove when task exists. + if status in self._requests: + self._requests.remove(status) + raise exc + + try: + response = await connection.handle_async_request(request) + except ConnectionNotAvailable: + # The ConnectionNotAvailable exception is a special case, that + # indicates we need to retry the request on a new connection. + # + # The most common case where this can occur is when multiple + # requests are queued waiting for a single connection, which + # might end up as an HTTP/2 connection, but which actually ends + # up as HTTP/1.1. + async with self._pool_lock: + # Maintain our position in the request queue, but reset the + # status so that the request becomes queued again. + status.unset_connection() + await self._attempt_to_acquire_connection(status) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.response_closed(status) + raise exc + else: + break + + # When we return the response, we wrap the stream in a special class + # that handles notifying the connection pool once the response + # has been released. + assert isinstance(response.stream, AsyncIterable) + return Response( + status=response.status, + headers=response.headers, + content=ConnectionPoolByteStream(response.stream, self, status), + extensions=response.extensions, + ) + + async def response_closed(self, status: RequestStatus) -> None: + """ + This method acts as a callback once the request/response cycle is complete. + + It is called into from the `ConnectionPoolByteStream.aclose()` method. + """ + assert status.connection is not None + connection = status.connection + + async with self._pool_lock: + # Update the state of the connection pool. + if status in self._requests: + self._requests.remove(status) + + if connection.is_closed() and connection in self._pool: + self._pool.remove(connection) + + # Since we've had a response closed, it's possible we'll now be able + # to service one or more requests that are currently pending. + for status in self._requests: + if status.connection is None: + acquired = await self._attempt_to_acquire_connection(status) + # If we could not acquire a connection for a queued request + # then we don't need to check anymore requests that are + # queued later behind it. + if not acquired: + break + + # Housekeeping. + await self._close_expired_connections() + + async def aclose(self) -> None: + """ + Close any connections in the pool. + """ + async with self._pool_lock: + for connection in self._pool: + await connection.aclose() + self._pool = [] + self._requests = [] + + async def __aenter__(self) -> "AsyncConnectionPool": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + +class ConnectionPoolByteStream: + """ + A wrapper around the response byte stream, that additionally handles + notifying the connection pool when the response has been closed. + """ + + def __init__( + self, + stream: AsyncIterable[bytes], + pool: AsyncConnectionPool, + status: RequestStatus, + ) -> None: + self._stream = stream + self._pool = pool + self._status = status + + async def __aiter__(self) -> AsyncIterator[bytes]: + async for part in self._stream: + yield part + + async def aclose(self) -> None: + try: + if hasattr(self._stream, "aclose"): + await self._stream.aclose() + finally: + with AsyncShieldCancellation(): + await self._pool.response_closed(self._status) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http11.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http11.py new file mode 100644 index 00000000..7ad36642 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http11.py @@ -0,0 +1,331 @@ +import enum +import logging +import time +from types import TracebackType +from typing import ( + AsyncIterable, + AsyncIterator, + List, + Optional, + Tuple, + Type, + Union, + cast, +) + +import h11 + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP11Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: Optional[float] = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._expire_at: Optional[float] = None + self._state = HTTPConnectionState.NEW + self._state_lock = AsyncLock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + async with Trace("send_request_headers", logger, request, kwargs) as trace: + await self._send_request_headers(**kwargs) + async with Trace("send_request_body", logger, request, kwargs) as trace: + await self._send_request_body(**kwargs) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + ) = await self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": self._network_stream, + }, + ) + except BaseException as exc: + with AsyncShieldCancellation(): + async with Trace("response_closed", logger, request) as trace: + await self._response_closed() + raise exc + + # Sending the request... + + async def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + await self._send_event(event, timeout=timeout) + + async def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, AsyncIterable) + async for chunk in request.stream: + event = h11.Data(data=chunk) + await self._send_event(event, timeout=timeout) + + await self._send_event(h11.EndOfMessage(), timeout=timeout) + + async def _send_event( + self, event: h11.Event, timeout: Optional[float] = None + ) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + await self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + async def _receive_response_headers( + self, request: Request + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + return http_version, event.status_code, event.reason, headers + + async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + async def _receive_event( + self, timeout: Optional[float] = None + ) -> Union[h11.Event, Type[h11.PAUSED]]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return cast(Union[h11.Event, Type[h11.PAUSED]], event) + + async def _response_closed(self) -> None: + async with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + await self.aclose() + + # Once the connection is no longer required... + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # The AsyncConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTP11Connection": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + async def __aiter__(self) -> AsyncIterator[bytes]: + kwargs = {"request": self._request} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + async with Trace("response_closed", logger, self._request): + await self._connection._response_closed() diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http2.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http2.py new file mode 100644 index 00000000..8dc776ff --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http2.py @@ -0,0 +1,589 @@ +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP2Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: typing.Optional[float] = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: typing.Optional[float] = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: typing.Optional[float] = None + self._request_count = 0 + self._init_lock = AsyncLock() + self._state_lock = AsyncLock() + self._read_lock = AsyncLock() + self._write_lock = AsyncLock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: typing.Dict[ + int, + typing.Union[ + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: typing.Optional[ + h2.events.ConnectionTerminated + ] = None + + self._read_exception: typing.Optional[Exception] = None + self._write_exception: typing.Optional[Exception] = None + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + async with self._init_lock: + if not self._sent_connection_init: + try: + kwargs = {"request": request} + async with Trace("send_connection_init", logger, request, kwargs): + await self._send_connection_init(**kwargs) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + await self._max_streams_semaphore.acquire() + + await self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + async with Trace("send_request_headers", logger, request, kwargs): + await self._send_request_headers(request=request, stream_id=stream_id) + async with Trace("send_request_body", logger, request, kwargs): + await self._send_request_body(request=request, stream_id=stream_id) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = await self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with AsyncShieldCancellation(): + kwargs = {"stream_id": stream_id} + async with Trace("response_closed", logger, request, kwargs): + await self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + async def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + await self._write_outgoing_data(request) + + # Sending the request... + + async def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + await self._write_outgoing_data(request) + + async def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.AsyncIterable) + async for data in request.stream: + await self._send_stream_data(request, stream_id, data) + await self._send_end_stream(request, stream_id) + + async def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = await self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + await self._write_outgoing_data(request) + + async def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + await self._write_outgoing_data(request) + + # Receiving the response... + + async def _receive_response( + self, request: Request, stream_id: int + ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.AsyncIterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + await self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + async def _receive_stream_event( + self, request: Request, stream_id: int + ) -> typing.Union[ + h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded + ]: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + await self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + async def _receive_events( + self, request: Request, stream_id: typing.Optional[int] = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + async with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = await self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + async with Trace( + "receive_remote_settings", logger, request + ) as trace: + await self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + await self._write_outgoing_data(request) + + async def _receive_remote_settings_change(self, event: h2.events.Event) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + await self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + await self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + async def _response_closed(self, stream_id: int) -> None: + await self._max_streams_semaphore.release() + del self._events[stream_id] + async with self._state_lock: + if self._connection_terminated and not self._events: + await self.aclose() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + await self.aclose() + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # Wrappers around network read/write operations... + + async def _read_incoming_data( + self, request: Request + ) -> typing.List[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + async def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + async with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + await self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + await self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTP2Connection": + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[types.TracebackType] = None, + ) -> None: + await self.aclose() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: AsyncHTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + async with Trace("response_closed", logger, self._request, kwargs): + await self._connection._response_closed(stream_id=self._stream_id) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 00000000..62f51097 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,350 @@ +import logging +import ssl +from base64 import b64encode +from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union + +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, +) -> List[Tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +def build_auth_header(username: bytes, password: bytes) -> bytes: + userpass = username + b":" + password + return b"Basic " + b64encode(userpass) + + +class AsyncHTTPProxy(AsyncConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: Union[URL, bytes, str], + proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + authorization = build_auth_header(username, password) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if origin.scheme == b"http": + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncForwardHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + keepalive_expiry: Optional[float] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + async def handle_async_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return await self._connection.handle_async_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class AsyncTunnelHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection: AsyncConnectionInterface = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = AsyncLock() + self._connected = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = await self._connection.handle_async_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + await self._connection.aclose() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py new file mode 100644 index 00000000..c998dd27 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py @@ -0,0 +1,135 @@ +from contextlib import asynccontextmanager +from typing import AsyncIterator, Optional, Union + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class AsyncRequestInterface: + async def request( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, AsyncIterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + await response.aread() + finally: + await response.aclose() + return response + + @asynccontextmanager + async def stream( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, AsyncIterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> AsyncIterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + yield response + finally: + await response.aclose() + + async def handle_async_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class AsyncConnectionInterface(AsyncRequestInterface): + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py new file mode 100644 index 00000000..f12cb373 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py @@ -0,0 +1,340 @@ +import logging +import ssl +import typing + +from socksio import socks5 + +from .._backends.auto import AutoBackend +from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +async def _init_socks5_connection( + stream: AsyncNetworkStream, + *, + host: bytes, + port: int, + auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, +) -> None: + conn = socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socks5.SOCKS5CommandRequest.from_address( + socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5Reply) + if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class AsyncSOCKSProxy(AsyncConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: typing.Union[URL, bytes, str], + proxy_auth: typing.Optional[ + typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] + ] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + max_connections: typing.Optional[int] = 10, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: typing.Optional[AsyncNetworkBackend] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncSocks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncSocks5Connection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: typing.Optional[AsyncNetworkBackend] = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connect_lock = AsyncLock() + self._connection: typing.Optional[AsyncConnectionInterface] = None + self._connect_failed = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + await _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + if self._connection is not None: + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..ee995e85 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc new file mode 100644 index 00000000..4049a7f8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc new file mode 100644 index 00000000..bfdc76a6 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..2565cc96 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc new file mode 100644 index 00000000..cd0d7e03 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc new file mode 100644 index 00000000..e45951f4 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc new file mode 100644 index 00000000..3f42306a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 00000000..1ed5228d --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,145 @@ +import ssl +import typing + +import anyio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AnyIOStream(AsyncNetworkStream): + def __init__(self, stream: anyio.abc.ByteStream) -> None: + self._stream = stream + + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + exc_map = { + TimeoutError: ReadTimeout, + anyio.BrokenResourceError: ReadError, + anyio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + try: + return await self._stream.receive(max_bytes=max_bytes) + except anyio.EndOfStream: # pragma: nocover + return b"" + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + if not buffer: + return + + exc_map = { + TimeoutError: WriteTimeout, + anyio.BrokenResourceError: WriteError, + anyio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + await self._stream.send(item=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> AsyncNetworkStream: + exc_map = { + TimeoutError: ConnectTimeout, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + try: + with anyio.fail_after(timeout): + ssl_stream = await anyio.streams.tls.TLSStream.wrap( + self._stream, + ssl_context=ssl_context, + hostname=server_hostname, + standard_compatible=False, + server_side=False, + ) + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return AnyIOStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) + if info == "client_addr": + return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) + if info == "server_addr": + return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) + if info == "socket": + return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + if info == "is_readable": + sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + return is_socket_readable(sock) + return None + + +class AnyIOBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + if socket_options is None: + socket_options = [] # pragma: no cover + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_tcp( + remote_host=host, + remote_port=port, + local_host=local_address, + ) + # By default TCP sockets opened in `asyncio` include TCP_NODELAY. + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) # pragma: nocover diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py new file mode 100644 index 00000000..b612ba07 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,52 @@ +import typing +from typing import Optional + +import sniffio + +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AutoBackend(AsyncNetworkBackend): + async def _init_backend(self) -> None: + if not (hasattr(self, "_backend")): + backend = sniffio.current_async_library() + if backend == "trio": + from .trio import TrioBackend + + self._backend: AsyncNetworkBackend = TrioBackend() + else: + from .anyio import AnyIOBackend + + self._backend = AnyIOBackend() + + async def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + await self._init_backend() + return await self._backend.connect_tcp( + host, + port, + timeout=timeout, + local_address=local_address, + socket_options=socket_options, + ) + + async def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + await self._init_backend() + return await self._backend.connect_unix_socket( + path, timeout=timeout, socket_options=socket_options + ) + + async def sleep(self, seconds: float) -> None: # pragma: nocover + await self._init_backend() + return await self._backend.sleep(seconds) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/base.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/base.py new file mode 100644 index 00000000..6cadedb5 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/base.py @@ -0,0 +1,103 @@ +import ssl +import time +import typing + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +class NetworkStream: + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "NetworkStream": + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class NetworkBackend: + def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) # pragma: nocover + + +class AsyncNetworkStream: + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + raise NotImplementedError() # pragma: nocover + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "AsyncNetworkStream": + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class AsyncNetworkBackend: + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: nocover diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py new file mode 100644 index 00000000..f7aefebf --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py @@ -0,0 +1,142 @@ +import ssl +import typing +from typing import Optional + +from .._exceptions import ReadError +from .base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) + + +class MockSSLObject: + def __init__(self, http2: bool): + self._http2 = http2 + + def selected_alpn_protocol(self) -> str: + return "h2" if self._http2 else "http/1.1" + + +class MockStream(NetworkStream): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + pass + + def close(self) -> None: + self._closed = True + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> NetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class MockBackend(NetworkBackend): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def sleep(self, seconds: float) -> None: + pass + + +class AsyncMockStream(AsyncNetworkStream): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + pass + + async def aclose(self) -> None: + self._closed = True + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> AsyncNetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class AsyncMockBackend(AsyncNetworkBackend): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + async def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def sleep(self, seconds: float) -> None: + pass diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py new file mode 100644 index 00000000..a4c85f04 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,133 @@ +import socket +import ssl +import sys +import typing + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, NetworkBackend, NetworkStream + + +class SyncStream(NetworkStream): + def __init__(self, sock: socket.socket) -> None: + self._sock = sock + + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return self._sock.recv(max_bytes) + + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + if not buffer: + return + + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + while buffer: + self._sock.settimeout(timeout) + n = self._sock.send(buffer) + buffer = buffer[n:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> NetworkStream: + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + try: + self._sock.settimeout(timeout) + sock = ssl_context.wrap_socket( + self._sock, server_hostname=server_hostname + ) + except Exception as exc: # pragma: nocover + self.close() + raise exc + return SyncStream(sock) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): + return self._sock._sslobj # type: ignore + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncBackend(NetworkBackend): + def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + # Note that we automatically include `TCP_NODELAY` + # in addition to any other custom socket options. + if socket_options is None: + socket_options = [] # pragma: no cover + address = (host, port) + source_address = None if local_address is None else (local_address, 0) + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, + timeout, + source_address=source_address, + ) + for option in socket_options: + sock.setsockopt(*option) # pragma: no cover + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SyncStream(sock) + + def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: # pragma: nocover + if sys.platform == "win32": + raise RuntimeError( + "Attempted to connect to a UNIX socket on a Windows system." + ) + if socket_options is None: + socket_options = [] + + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + for option in socket_options: + sock.setsockopt(*option) + sock.settimeout(timeout) + sock.connect(path) + return SyncStream(sock) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py new file mode 100644 index 00000000..b1626d28 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,161 @@ +import ssl +import typing + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class TrioStream(AsyncNetworkStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self._stream = stream + + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ReadTimeout, + trio.BrokenResourceError: ReadError, + trio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + data: bytes = await self._stream.receive_some(max_bytes=max_bytes) + return data + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + if not buffer: + return + + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + trio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + await self._stream.send_all(data=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> AsyncNetworkStream: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self._stream, + ssl_context=ssl_context, + server_hostname=server_hostname, + https_compatible=True, + server_side=False, + ) + with map_exceptions(exc_map): + try: + with trio.fail_after(timeout_or_inf): + await ssl_stream.do_handshake() + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return TrioStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): + # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. + # Tracked at https://github.com/python-trio/trio/issues/542 + return self._stream._ssl_object # type: ignore[attr-defined] + if info == "client_addr": + return self._get_socket_stream().socket.getsockname() + if info == "server_addr": + return self._get_socket_stream().socket.getpeername() + if info == "socket": + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream.socket + if info == "is_readable": + socket = self.get_extra_info("socket") + return socket.is_readable() + return None + + def _get_socket_stream(self) -> trio.SocketStream: + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream + + +class TrioBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + # By default for TCP sockets, trio enables TCP_NODELAY. + # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream + if socket_options is None: + socket_options = [] # pragma: no cover + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_tcp_stream( + host=host, port=port, local_address=local_address + ) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) # pragma: nocover diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_exceptions.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_exceptions.py new file mode 100644 index 00000000..81e7fc61 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_exceptions.py @@ -0,0 +1,81 @@ +import contextlib +from typing import Iterator, Mapping, Type + +ExceptionMapping = Mapping[Type[Exception], Type[Exception]] + + +@contextlib.contextmanager +def map_exceptions(map: ExceptionMapping) -> Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE786 + for from_exc, to_exc in map.items(): + if isinstance(exc, from_exc): + raise to_exc(exc) from exc + raise # pragma: nocover + + +class ConnectionNotAvailable(Exception): + pass + + +class ProxyError(Exception): + pass + + +class UnsupportedProtocol(Exception): + pass + + +class ProtocolError(Exception): + pass + + +class RemoteProtocolError(ProtocolError): + pass + + +class LocalProtocolError(ProtocolError): + pass + + +# Timeout errors + + +class TimeoutException(Exception): + pass + + +class PoolTimeout(TimeoutException): + pass + + +class ConnectTimeout(TimeoutException): + pass + + +class ReadTimeout(TimeoutException): + pass + + +class WriteTimeout(TimeoutException): + pass + + +# Network errors + + +class NetworkError(Exception): + pass + + +class ConnectError(NetworkError): + pass + + +class ReadError(NetworkError): + pass + + +class WriteError(NetworkError): + pass diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_models.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_models.py new file mode 100644 index 00000000..e15305ee --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_models.py @@ -0,0 +1,483 @@ +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) +from urllib.parse import urlparse + +# Functions for typechecking... + + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] +HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None] + +Extensions = Mapping[str, Any] + + +def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: + """ + Any arguments that are ultimately represented as bytes can be specified + either as bytes or as strings. + + However we enforce that any string arguments must only contain characters in + the plain ASCII range. chr(0)...chr(127). If you need to use characters + outside that range then be precise, and use a byte-wise argument. + """ + if isinstance(value, str): + try: + return value.encode("ascii") + except UnicodeEncodeError: + raise TypeError(f"{name} strings may not include unicode characters.") + elif isinstance(value, bytes): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") + + +def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": + """ + Type check for URL parameters. + """ + if isinstance(value, (bytes, str)): + return URL(value) + elif isinstance(value, URL): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") + + +def enforce_headers( + value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str +) -> List[Tuple[bytes, bytes]]: + """ + Convienence function that ensure all items in request or response headers + are either bytes or strings in the plain ASCII range. + """ + if value is None: + return [] + elif isinstance(value, Mapping): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value.items() + ] + elif isinstance(value, Sequence): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value + ] + + seen_type = type(value).__name__ + raise TypeError( + f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." + ) + + +def enforce_stream( + value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str +) -> Union[Iterable[bytes], AsyncIterable[bytes]]: + if value is None: + return ByteStream(b"") + elif isinstance(value, bytes): + return ByteStream(value) + return value + + +# * https://tools.ietf.org/html/rfc3986#section-3.2.3 +# * https://url.spec.whatwg.org/#url-miscellaneous +# * https://url.spec.whatwg.org/#scheme-state +DEFAULT_PORTS = { + b"ftp": 21, + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, +} + + +def include_request_headers( + headers: List[Tuple[bytes, bytes]], + *, + url: "URL", + content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]], +) -> List[Tuple[bytes, bytes]]: + headers_set = set(k.lower() for k, v in headers) + + if b"host" not in headers_set: + default_port = DEFAULT_PORTS.get(url.scheme) + if url.port is None or url.port == default_port: + header_value = url.host + else: + header_value = b"%b:%d" % (url.host, url.port) + headers = [(b"Host", header_value)] + headers + + if ( + content is not None + and b"content-length" not in headers_set + and b"transfer-encoding" not in headers_set + ): + if isinstance(content, bytes): + content_length = str(len(content)).encode("ascii") + headers += [(b"Content-Length", content_length)] + else: + headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover + + return headers + + +# Interfaces for byte streams... + + +class ByteStream: + """ + A container for non-streaming content, and that supports both sync and async + stream iteration. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" + + +class Origin: + def __init__(self, scheme: bytes, host: bytes, port: int) -> None: + self.scheme = scheme + self.host = host + self.port = port + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, Origin) + and self.scheme == other.scheme + and self.host == other.host + and self.port == other.port + ) + + def __str__(self) -> str: + scheme = self.scheme.decode("ascii") + host = self.host.decode("ascii") + port = str(self.port) + return f"{scheme}://{host}:{port}" + + +class URL: + """ + Represents the URL against which an HTTP request may be made. + + The URL may either be specified as a plain string, for convienence: + + ```python + url = httpcore.URL("https://www.example.com/") + ``` + + Or be constructed with explicitily pre-parsed components: + + ```python + url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') + ``` + + Using this second more explicit style allows integrations that are using + `httpcore` to pass through URLs that have already been parsed in order to use + libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures + that URL parsing is treated identically at both the networking level and at any + higher layers of abstraction. + + The four components are important here, as they allow the URL to be precisely + specified in a pre-parsed format. They also allow certain types of request to + be created that could not otherwise be expressed. + + For example, an HTTP request to `http://www.example.com/` forwarded via a proxy + at `http://localhost:8080`... + + ```python + # Constructs an HTTP request with a complete URL as the target: + # GET https://www.example.com/ HTTP/1.1 + url = httpcore.URL( + scheme=b'http', + host=b'localhost', + port=8080, + target=b'https://www.example.com/' + ) + request = httpcore.Request( + method="GET", + url=url + ) + ``` + + Another example is constructing an `OPTIONS *` request... + + ```python + # Constructs an 'OPTIONS *' HTTP request: + # OPTIONS * HTTP/1.1 + url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') + request = httpcore.Request(method="OPTIONS", url=url) + ``` + + This kind of request is not possible to formulate with a URL string, + because the `/` delimiter is always used to demark the target from the + host/port portion of the URL. + + For convenience, string-like arguments may be specified either as strings or + as bytes. However, once a request is being issue over-the-wire, the URL + components are always ultimately required to be a bytewise representation. + + In order to avoid any ambiguity over character encodings, when strings are used + as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. + If you require a bytewise representation that is outside this range you must + handle the character encoding directly, and pass a bytes instance. + """ + + def __init__( + self, + url: Union[bytes, str] = "", + *, + scheme: Union[bytes, str] = b"", + host: Union[bytes, str] = b"", + port: Optional[int] = None, + target: Union[bytes, str] = b"", + ) -> None: + """ + Parameters: + url: The complete URL as a string or bytes. + scheme: The URL scheme as a string or bytes. + Typically either `"http"` or `"https"`. + host: The URL host as a string or bytes. Such as `"www.example.com"`. + port: The port to connect to. Either an integer or `None`. + target: The target of the HTTP request. Such as `"/items?search=red"`. + """ + if url: + parsed = urlparse(enforce_bytes(url, name="url")) + self.scheme = parsed.scheme + self.host = parsed.hostname or b"" + self.port = parsed.port + self.target = (parsed.path or b"/") + ( + b"?" + parsed.query if parsed.query else b"" + ) + else: + self.scheme = enforce_bytes(scheme, name="scheme") + self.host = enforce_bytes(host, name="host") + self.port = port + self.target = enforce_bytes(target, name="target") + + @property + def origin(self) -> Origin: + default_port = { + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, + b"socks5": 1080, + }[self.scheme] + return Origin( + scheme=self.scheme, host=self.host, port=self.port or default_port + ) + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, URL) + and other.scheme == self.scheme + and other.host == self.host + and other.port == self.port + and other.target == self.target + ) + + def __bytes__(self) -> bytes: + if self.port is None: + return b"%b://%b%b" % (self.scheme, self.host, self.target) + return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}(scheme={self.scheme!r}, " + f"host={self.host!r}, port={self.port!r}, target={self.target!r})" + ) + + +class Request: + """ + An HTTP request. + """ + + def __init__( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> None: + """ + Parameters: + method: The HTTP request method, either as a string or bytes. + For example: `GET`. + url: The request URL, either as a `URL` instance, or as a string or bytes. + For example: `"https://www.example.com".` + headers: The HTTP request headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the request. Possible keys include `"timeout"`, and `"trace"`. + """ + self.method: bytes = enforce_bytes(method, name="method") + self.url: URL = enforce_url(url, name="url") + self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + content, name="content" + ) + self.extensions = {} if extensions is None else extensions + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.method!r}]>" + + +class Response: + """ + An HTTP response. + """ + + def __init__( + self, + status: int, + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> None: + """ + Parameters: + status: The HTTP status code of the response. For example `200`. + headers: The HTTP response headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the responseself.Possible keys include `"http_version"`, + `"reason_phrase"`, and `"network_stream"`. + """ + self.status: int = status + self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + content, name="content" + ) + self.extensions = {} if extensions is None else extensions + + self._stream_consumed = False + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + if isinstance(self.stream, Iterable): + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'response.read()' first." + ) + else: + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'await response.aread()' first." + ) + return self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.status}]>" + + # Sync interface... + + def read(self) -> bytes: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an asynchronous response using 'response.read()'. " + "You should use 'await response.aread()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part for part in self.iter_stream()]) + return self._content + + def iter_stream(self) -> Iterator[bytes]: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an asynchronous response using 'for ... in " + "response.iter_stream()'. " + "You should use 'async for ... in response.aiter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'for ... in response.iter_stream()' more than once." + ) + self._stream_consumed = True + for chunk in self.stream: + yield chunk + + def close(self) -> None: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to close an asynchronous response using 'response.close()'. " + "You should use 'await response.aclose()' instead." + ) + if hasattr(self.stream, "close"): + self.stream.close() + + # Async interface... + + async def aread(self) -> bytes: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an synchronous response using " + "'await response.aread()'. " + "You should use 'response.read()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_stream()]) + return self._content + + async def aiter_stream(self) -> AsyncIterator[bytes]: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an synchronous response using 'async for ... in " + "response.aiter_stream()'. " + "You should use 'for ... in response.iter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'async for ... in response.aiter_stream()' " + "more than once." + ) + self._stream_consumed = True + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to close a synchronous response using " + "'await response.aclose()'. " + "You should use 'response.close()' instead." + ) + if hasattr(self.stream, "aclose"): + await self.stream.aclose() diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_ssl.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_ssl.py new file mode 100644 index 00000000..c99c5a67 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_ssl.py @@ -0,0 +1,9 @@ +import ssl + +import certifi + + +def default_ssl_context() -> ssl.SSLContext: + context = ssl.create_default_context() + context.load_verify_locations(certifi.where()) + return context diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 00000000..b476d76d --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py @@ -0,0 +1,39 @@ +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .http_proxy import HTTPProxy +from .interfaces import ConnectionInterface + +try: + from .http2 import HTTP2Connection +except ImportError: # pragma: nocover + + class HTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import SOCKSProxy +except ImportError: # pragma: nocover + + class SOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", +] diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..fffafcf0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc new file mode 100644 index 00000000..85f0649f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc new file mode 100644 index 00000000..0e8a0d66 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc new file mode 100644 index 00000000..15e33741 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc new file mode 100644 index 00000000..bedddcbe Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc new file mode 100644 index 00000000..8e076fb9 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 00000000..77621796 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc new file mode 100644 index 00000000..edfd31ef Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py new file mode 100644 index 00000000..39b8b97e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,215 @@ +import itertools +import logging +import ssl +from types import TracebackType +from typing import Iterable, Iterator, Optional, Type + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> Iterator[float]: + yield 0 + for n in itertools.count(2): + yield factor * (2 ** (n - 2)) + + +class HTTPConnection(ConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connection: Optional[ConnectionInterface] = None + self._connect_failed: bool = False + self._request_lock = Lock() + self._socket_options = socket_options + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + with self._request_lock: + if self._connection is None: + try: + stream = self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def _connect(self, request: Request) -> NetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + with Trace("retry", logger, request, kwargs) as trace: + self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def close(self) -> None: + if self._connection is not None: + with Trace("close", logger, None, {}): + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTPConnection": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 00000000..dbcaff1f --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,356 @@ +import ssl +import sys +from types import TracebackType +from typing import Iterable, Iterator, Iterable, List, Optional, Type + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Request, Response +from .._synchronization import Event, Lock, ShieldCancellation +from .connection import HTTPConnection +from .interfaces import ConnectionInterface, RequestInterface + + +class RequestStatus: + def __init__(self, request: Request): + self.request = request + self.connection: Optional[ConnectionInterface] = None + self._connection_acquired = Event() + + def set_connection(self, connection: ConnectionInterface) -> None: + assert self.connection is None + self.connection = connection + self._connection_acquired.set() + + def unset_connection(self) -> None: + assert self.connection is not None + self.connection = None + self._connection_acquired = Event() + + def wait_for_connection( + self, timeout: Optional[float] = None + ) -> ConnectionInterface: + if self.connection is None: + self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + +class ConnectionPool(RequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._pool: List[ConnectionInterface] = [] + self._requests: List[RequestStatus] = [] + self._pool_lock = Lock() + self._network_backend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return HTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> List[ConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._pool) + + def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool: + """ + Attempt to provide a connection that can handle the given origin. + """ + origin = status.request.url.origin + + # If there are queued requests in front of us, then don't acquire a + # connection. We handle requests strictly in order. + waiting = [s for s in self._requests if s.connection is None] + if waiting and waiting[0] is not status: + return False + + # Reuse an existing connection if one is currently available. + for idx, connection in enumerate(self._pool): + if connection.can_handle_request(origin) and connection.is_available(): + self._pool.pop(idx) + self._pool.insert(0, connection) + status.set_connection(connection) + return True + + # If the pool is currently full, attempt to close one idle connection. + if len(self._pool) >= self._max_connections: + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.is_idle(): + connection.close() + self._pool.pop(idx) + break + + # If the pool is still full, then we cannot acquire a connection. + if len(self._pool) >= self._max_connections: + return False + + # Otherwise create a new connection. + connection = self.create_connection(origin) + self._pool.insert(0, connection) + status.set_connection(connection) + return True + + def _close_expired_connections(self) -> None: + """ + Clean up the connection pool by closing off any connections that have expired. + """ + # Close any connections that have expired their keep-alive time. + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.has_expired(): + connection.close() + self._pool.pop(idx) + + # If the pool size exceeds the maximum number of allowed keep-alive connections, + # then close off idle connections as required. + pool_size = len(self._pool) + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.is_idle() and pool_size > self._max_keepalive_connections: + connection.close() + self._pool.pop(idx) + pool_size -= 1 + + def handle_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + status = RequestStatus(request) + + with self._pool_lock: + self._requests.append(status) + self._close_expired_connections() + self._attempt_to_acquire_connection(status) + + while True: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + try: + connection = status.wait_for_connection(timeout=timeout) + except BaseException as exc: + # If we timeout here, or if the task is cancelled, then make + # sure to remove the request from the queue before bubbling + # up the exception. + with self._pool_lock: + # Ensure only remove when task exists. + if status in self._requests: + self._requests.remove(status) + raise exc + + try: + response = connection.handle_request(request) + except ConnectionNotAvailable: + # The ConnectionNotAvailable exception is a special case, that + # indicates we need to retry the request on a new connection. + # + # The most common case where this can occur is when multiple + # requests are queued waiting for a single connection, which + # might end up as an HTTP/2 connection, but which actually ends + # up as HTTP/1.1. + with self._pool_lock: + # Maintain our position in the request queue, but reset the + # status so that the request becomes queued again. + status.unset_connection() + self._attempt_to_acquire_connection(status) + except BaseException as exc: + with ShieldCancellation(): + self.response_closed(status) + raise exc + else: + break + + # When we return the response, we wrap the stream in a special class + # that handles notifying the connection pool once the response + # has been released. + assert isinstance(response.stream, Iterable) + return Response( + status=response.status, + headers=response.headers, + content=ConnectionPoolByteStream(response.stream, self, status), + extensions=response.extensions, + ) + + def response_closed(self, status: RequestStatus) -> None: + """ + This method acts as a callback once the request/response cycle is complete. + + It is called into from the `ConnectionPoolByteStream.close()` method. + """ + assert status.connection is not None + connection = status.connection + + with self._pool_lock: + # Update the state of the connection pool. + if status in self._requests: + self._requests.remove(status) + + if connection.is_closed() and connection in self._pool: + self._pool.remove(connection) + + # Since we've had a response closed, it's possible we'll now be able + # to service one or more requests that are currently pending. + for status in self._requests: + if status.connection is None: + acquired = self._attempt_to_acquire_connection(status) + # If we could not acquire a connection for a queued request + # then we don't need to check anymore requests that are + # queued later behind it. + if not acquired: + break + + # Housekeeping. + self._close_expired_connections() + + def close(self) -> None: + """ + Close any connections in the pool. + """ + with self._pool_lock: + for connection in self._pool: + connection.close() + self._pool = [] + self._requests = [] + + def __enter__(self) -> "ConnectionPool": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + +class ConnectionPoolByteStream: + """ + A wrapper around the response byte stream, that additionally handles + notifying the connection pool when the response has been closed. + """ + + def __init__( + self, + stream: Iterable[bytes], + pool: ConnectionPool, + status: RequestStatus, + ) -> None: + self._stream = stream + self._pool = pool + self._status = status + + def __iter__(self) -> Iterator[bytes]: + for part in self._stream: + yield part + + def close(self) -> None: + try: + if hasattr(self._stream, "close"): + self._stream.close() + finally: + with ShieldCancellation(): + self._pool.response_closed(self._status) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py new file mode 100644 index 00000000..edcce72a --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,331 @@ +import enum +import logging +import time +from types import TracebackType +from typing import ( + Iterable, + Iterator, + List, + Optional, + Tuple, + Type, + Union, + cast, +) + +import h11 + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP11Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: Optional[float] = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._expire_at: Optional[float] = None + self._state = HTTPConnectionState.NEW + self._state_lock = Lock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + with Trace("send_request_headers", logger, request, kwargs) as trace: + self._send_request_headers(**kwargs) + with Trace("send_request_body", logger, request, kwargs) as trace: + self._send_request_body(**kwargs) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + ) = self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": self._network_stream, + }, + ) + except BaseException as exc: + with ShieldCancellation(): + with Trace("response_closed", logger, request) as trace: + self._response_closed() + raise exc + + # Sending the request... + + def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + self._send_event(event, timeout=timeout) + + def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, Iterable) + for chunk in request.stream: + event = h11.Data(data=chunk) + self._send_event(event, timeout=timeout) + + self._send_event(h11.EndOfMessage(), timeout=timeout) + + def _send_event( + self, event: h11.Event, timeout: Optional[float] = None + ) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + def _receive_response_headers( + self, request: Request + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + return http_version, event.status_code, event.reason, headers + + def _receive_response_body(self, request: Request) -> Iterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + def _receive_event( + self, timeout: Optional[float] = None + ) -> Union[h11.Event, Type[h11.PAUSED]]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return cast(Union[h11.Event, Type[h11.PAUSED]], event) + + def _response_closed(self) -> None: + with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + self.close() + + # Once the connection is no longer required... + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # The ConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTP11Connection": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: HTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + def __iter__(self) -> Iterator[bytes]: + kwargs = {"request": self._request} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + with Trace("response_closed", logger, self._request): + self._connection._response_closed() diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py new file mode 100644 index 00000000..d141d459 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,589 @@ +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, Semaphore, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP2Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: typing.Optional[float] = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: typing.Optional[float] = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: typing.Optional[float] = None + self._request_count = 0 + self._init_lock = Lock() + self._state_lock = Lock() + self._read_lock = Lock() + self._write_lock = Lock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: typing.Dict[ + int, + typing.Union[ + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: typing.Optional[ + h2.events.ConnectionTerminated + ] = None + + self._read_exception: typing.Optional[Exception] = None + self._write_exception: typing.Optional[Exception] = None + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + with self._init_lock: + if not self._sent_connection_init: + try: + kwargs = {"request": request} + with Trace("send_connection_init", logger, request, kwargs): + self._send_connection_init(**kwargs) + except BaseException as exc: + with ShieldCancellation(): + self.close() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = Semaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + self._max_streams_semaphore.acquire() + + self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + with Trace("send_request_headers", logger, request, kwargs): + self._send_request_headers(request=request, stream_id=stream_id) + with Trace("send_request_body", logger, request, kwargs): + self._send_request_body(request=request, stream_id=stream_id) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with ShieldCancellation(): + kwargs = {"stream_id": stream_id} + with Trace("response_closed", logger, request, kwargs): + self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + self._write_outgoing_data(request) + + # Sending the request... + + def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + self._write_outgoing_data(request) + + def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.Iterable) + for data in request.stream: + self._send_stream_data(request, stream_id, data) + self._send_end_stream(request, stream_id) + + def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + self._write_outgoing_data(request) + + def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + self._write_outgoing_data(request) + + # Receiving the response... + + def _receive_response( + self, request: Request, stream_id: int + ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.Iterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + def _receive_stream_event( + self, request: Request, stream_id: int + ) -> typing.Union[ + h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded + ]: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + def _receive_events( + self, request: Request, stream_id: typing.Optional[int] = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + with Trace( + "receive_remote_settings", logger, request + ) as trace: + self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + self._write_outgoing_data(request) + + def _receive_remote_settings_change(self, event: h2.events.Event) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + def _response_closed(self, stream_id: int) -> None: + self._max_streams_semaphore.release() + del self._events[stream_id] + with self._state_lock: + if self._connection_terminated and not self._events: + self.close() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + self.close() + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # Wrappers around network read/write operations... + + def _read_incoming_data( + self, request: Request + ) -> typing.List[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTP2Connection": + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[types.TracebackType] = None, + ) -> None: + self.close() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: HTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + with Trace("response_closed", logger, self._request, kwargs): + self._connection._response_closed(stream_id=self._stream_id) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 00000000..bb368dd4 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,350 @@ +import logging +import ssl +from base64 import b64encode +from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union + +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, +) -> List[Tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +def build_auth_header(username: bytes, password: bytes) -> bytes: + userpass = username + b":" + password + return b"Basic " + b64encode(userpass) + + +class HTTPProxy(ConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: Union[URL, bytes, str], + proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + authorization = build_auth_header(username, password) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if origin.scheme == b"http": + return ForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + return TunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class ForwardHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + keepalive_expiry: Optional[float] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + def handle_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return self._connection.handle_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class TunnelHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection: ConnectionInterface = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = Lock() + self._connected = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = self._connection.handle_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + self._connection.close() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py new file mode 100644 index 00000000..5e95be1e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py @@ -0,0 +1,135 @@ +from contextlib import contextmanager +from typing import Iterator, Optional, Union + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class RequestInterface: + def request( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + response.read() + finally: + response.close() + return response + + @contextmanager + def stream( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Iterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + yield response + finally: + response.close() + + def handle_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class ConnectionInterface(RequestInterface): + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py new file mode 100644 index 00000000..407351d0 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py @@ -0,0 +1,340 @@ +import logging +import ssl +import typing + +from socksio import socks5 + +from .._backends.sync import SyncBackend +from .._backends.base import NetworkBackend, NetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +def _init_socks5_connection( + stream: NetworkStream, + *, + host: bytes, + port: int, + auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, +) -> None: + conn = socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socks5.SOCKS5CommandRequest.from_address( + socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5Reply) + if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class SOCKSProxy(ConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: typing.Union[URL, bytes, str], + proxy_auth: typing.Optional[ + typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] + ] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + max_connections: typing.Optional[int] = 10, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: typing.Optional[NetworkBackend] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return Socks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class Socks5Connection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: typing.Optional[NetworkBackend] = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connect_lock = Lock() + self._connection: typing.Optional[ConnectionInterface] = None + self._connect_failed = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + if self._connection is not None: + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_synchronization.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_synchronization.py new file mode 100644 index 00000000..bae27c1b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_synchronization.py @@ -0,0 +1,279 @@ +import threading +from types import TracebackType +from typing import Optional, Type + +import sniffio + +from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions + +# Our async synchronization primatives use either 'anyio' or 'trio' depending +# on if they're running under asyncio or trio. + +try: + import trio +except ImportError: # pragma: nocover + trio = None # type: ignore + +try: + import anyio +except ImportError: # pragma: nocover + anyio = None # type: ignore + + +class AsyncLock: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = sniffio.current_async_library() + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio, requires the 'trio' package to be installed." + ) + self._trio_lock = trio.Lock() + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + self._anyio_lock = anyio.Lock() + + async def __aenter__(self) -> "AsyncLock": + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_lock.acquire() + else: + await self._anyio_lock.acquire() + + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self._backend == "trio": + self._trio_lock.release() + else: + self._anyio_lock.release() + + +class AsyncEvent: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = sniffio.current_async_library() + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio requires the 'trio' package to be installed." + ) + self._trio_event = trio.Event() + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + self._anyio_event = anyio.Event() + + def set(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + self._trio_event.set() + else: + self._anyio_event.set() + + async def wait(self, timeout: Optional[float] = None) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio requires the 'trio' package to be installed." + ) + + trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} + timeout_or_inf = float("inf") if timeout is None else timeout + with map_exceptions(trio_exc_map): + with trio.fail_after(timeout_or_inf): + await self._trio_event.wait() + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + + anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} + with map_exceptions(anyio_exc_map): + with anyio.fail_after(timeout): + await self._anyio_event.wait() + + +class AsyncSemaphore: + def __init__(self, bound: int) -> None: + self._bound = bound + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a semaphore with the correct implementation. + """ + self._backend = sniffio.current_async_library() + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio requires the 'trio' package to be installed." + ) + + self._trio_semaphore = trio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + + self._anyio_semaphore = anyio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + + async def acquire(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_semaphore.acquire() + else: + await self._anyio_semaphore.acquire() + + async def release(self) -> None: + if self._backend == "trio": + self._trio_semaphore.release() + else: + self._anyio_semaphore.release() + + +class AsyncShieldCancellation: + # For certain portions of our codebase where we're dealing with + # closing connections during exception handling we want to shield + # the operation from being cancelled. + # + # with AsyncShieldCancellation(): + # ... # clean-up operations, shielded from cancellation. + + def __init__(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a shielded scope with the correct implementation. + """ + self._backend = sniffio.current_async_library() + + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio requires the 'trio' package to be installed." + ) + + self._trio_shield = trio.CancelScope(shield=True) + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + + self._anyio_shield = anyio.CancelScope(shield=True) + + def __enter__(self) -> "AsyncShieldCancellation": + if self._backend == "trio": + self._trio_shield.__enter__() + else: + self._anyio_shield.__enter__() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self._backend == "trio": + self._trio_shield.__exit__(exc_type, exc_value, traceback) + else: + self._anyio_shield.__exit__(exc_type, exc_value, traceback) + + +# Our thread-based synchronization primitives... + + +class Lock: + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> "Lock": + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self._lock.release() + + +class Event: + def __init__(self) -> None: + self._event = threading.Event() + + def set(self) -> None: + self._event.set() + + def wait(self, timeout: Optional[float] = None) -> None: + if not self._event.wait(timeout=timeout): + raise PoolTimeout() # pragma: nocover + + +class Semaphore: + def __init__(self, bound: int) -> None: + self._semaphore = threading.Semaphore(value=bound) + + def acquire(self) -> None: + self._semaphore.acquire() + + def release(self) -> None: + self._semaphore.release() + + +class ShieldCancellation: + # Thread-synchronous codebases don't support cancellation semantics. + # We have this class because we need to mirror the async and sync + # cases within our package, but it's just a no-op. + def __enter__(self) -> "ShieldCancellation": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + pass diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_trace.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_trace.py new file mode 100644 index 00000000..b122a53e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_trace.py @@ -0,0 +1,105 @@ +import inspect +import logging +from types import TracebackType +from typing import Any, Dict, Optional, Type + +from ._models import Request + + +class Trace: + def __init__( + self, + name: str, + logger: logging.Logger, + request: Optional[Request] = None, + kwargs: Optional[Dict[str, Any]] = None, + ) -> None: + self.name = name + self.logger = logger + self.trace_extension = ( + None if request is None else request.extensions.get("trace") + ) + self.debug = self.logger.isEnabledFor(logging.DEBUG) + self.kwargs = kwargs or {} + self.return_value: Any = None + self.should_trace = self.debug or self.trace_extension is not None + self.prefix = self.logger.name.split(".")[-1] + + def trace(self, name: str, info: Dict[str, Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + ret = self.trace_extension(prefix_and_name, info) + if inspect.iscoroutine(ret): # pragma: no cover + raise TypeError( + "If you are using a synchronous interface, " + "the callback of the `trace` extension should " + "be a normal function instead of an asynchronous function." + ) + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + def __enter__(self) -> "Trace": + if self.should_trace: + info = self.kwargs + self.trace(f"{self.name}.started", info) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + self.trace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + self.trace(f"{self.name}.failed", info) + + async def atrace(self, name: str, info: Dict[str, Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + coro = self.trace_extension(prefix_and_name, info) + if not inspect.iscoroutine(coro): # pragma: no cover + raise TypeError( + "If you're using an asynchronous interface, " + "the callback of the `trace` extension should " + "be an asynchronous function rather than a normal function." + ) + await coro + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + async def __aenter__(self) -> "Trace": + if self.should_trace: + info = self.kwargs + await self.atrace(f"{self.name}.started", info) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + await self.atrace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + await self.atrace(f"{self.name}.failed", info) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/_utils.py b/Backend/venv/lib/python3.12/site-packages/httpcore/_utils.py new file mode 100644 index 00000000..df5dea8f --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpcore/_utils.py @@ -0,0 +1,36 @@ +import select +import socket +import sys +import typing + + +def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: + """ + Return whether a socket, as identifed by its file descriptor, is readable. + "A socket is readable" means that the read buffer isn't empty, i.e. that calling + .recv() on it would immediately return some data. + """ + # NOTE: we want check for readability without actually attempting to read, because + # we don't want to block forever if it's not readable. + + # In the case that the socket no longer exists, or cannot return a file + # descriptor, we treat it as being readable, as if it the next read operation + # on it is ready to return the terminating `b""`. + sock_fd = None if sock is None else sock.fileno() + if sock_fd is None or sock_fd < 0: # pragma: nocover + return True + + # The implementation below was stolen from: + # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 + # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 + + # Use select.select on Windows, and when poll is unavailable and select.poll + # everywhere else. (E.g. When eventlet is in use. See #327) + if ( + sys.platform == "win32" or getattr(select, "poll", None) is None + ): # pragma: nocover + rready, _, _ = select.select([sock_fd], [], [], 0) + return bool(rready) + p = select.poll() + p.register(sock_fd, select.POLLIN) + return bool(p.poll(0)) diff --git a/Backend/venv/lib/python3.12/site-packages/httpcore/py.typed b/Backend/venv/lib/python3.12/site-packages/httpcore/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/METADATA new file mode 100644 index 00000000..84b89fb8 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/METADATA @@ -0,0 +1,212 @@ +Metadata-Version: 2.1 +Name: httpx +Version: 0.24.1 +Summary: The next generation HTTP client. +Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md +Project-URL: Documentation, https://www.python-httpx.org +Project-URL: Homepage, https://github.com/encode/httpx +Project-URL: Source, https://github.com/encode/httpx +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.7 +Requires-Dist: certifi +Requires-Dist: httpcore<0.18.0,>=0.15.0 +Requires-Dist: idna +Requires-Dist: sniffio +Provides-Extra: brotli +Requires-Dist: brotli; platform_python_implementation == 'CPython' and extra == 'brotli' +Requires-Dist: brotlicffi; platform_python_implementation != 'CPython' and extra == 'brotli' +Provides-Extra: cli +Requires-Dist: click==8.*; extra == 'cli' +Requires-Dist: pygments==2.*; extra == 'cli' +Requires-Dist: rich<14,>=10; extra == 'cli' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Description-Content-Type: text/markdown + +

+ HTTPX +

+ +

HTTPX - A next-generation HTTP client for Python.

+ +

+ + Test Suite + + + Package version + +

+ +HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated +command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync +and async APIs**. + +--- + +Install HTTPX using pip: + +```shell +$ pip install httpx +``` + +Now, let's get started: + +```pycon +>>> import httpx +>>> r = httpx.get('https://www.example.org/') +>>> r + +>>> r.status_code +200 +>>> r.headers['content-type'] +'text/html; charset=UTF-8' +>>> r.text +'\n\n\nExample Domain...' +``` + +Or, using the command-line client. + +```shell +$ pip install 'httpx[cli]' # The command line client is an optional dependency. +``` + +Which now allows us to use HTTPX directly from the command-line... + +

+ httpx --help +

+ +Sending a request... + +

+ httpx http://httpbin.org/json +

+ +## Features + +HTTPX builds on the well-established usability of `requests`, and gives you: + +* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). +* An integrated command-line client. +* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). +* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). +* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/#calling-into-python-web-apps) or [ASGI applications](https://www.python-httpx.org/async/#calling-into-python-web-apps). +* Strict timeouts everywhere. +* Fully type annotated. +* 100% test coverage. + +Plus all the standard features of `requests`... + +* International Domains and URLs +* Keep-Alive & Connection Pooling +* Sessions with Cookie Persistence +* Browser-style SSL Verification +* Basic/Digest Authentication +* Elegant Key/Value Cookies +* Automatic Decompression +* Automatic Content Decoding +* Unicode Response Bodies +* Multipart File Uploads +* HTTP(S) Proxy Support +* Connection Timeouts +* Streaming Downloads +* .netrc Support +* Chunked Requests + +## Installation + +Install with pip: + +```shell +$ pip install httpx +``` + +Or, to include the optional HTTP/2 support, use: + +```shell +$ pip install httpx[http2] +``` + +HTTPX requires Python 3.7+. + +## Documentation + +Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). + +For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). + +For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. + +The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. + +To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). + +## Contribute + +If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. + +## Dependencies + +The HTTPX project relies on these excellent libraries: + +* `httpcore` - The underlying transport implementation for `httpx`. + * `h11` - HTTP/1.1 support. +* `certifi` - SSL certificates. +* `idna` - Internationalized domain name support. +* `sniffio` - Async library autodetection. + +As well as these optional installs: + +* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* +* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* +* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* +* `click` - Command line client support. *(Optional, with `httpx[cli]`)* +* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* + +A huge amount of credit is due to `requests` for the API layout that +much of this work follows, as well as to `urllib3` for plenty of design +inspiration around the lower-level networking details. + +--- + +

HTTPX is BSD licensed code.
Designed & crafted with care.

— 🦋 —

+ +## Release Information + +### Added + +* Provide additional context in some `InvalidURL` exceptions. (#2675) + +### Fixed + +* Fix optional percent-encoding behaviour. (#2671) +* More robust checking for opening upload files in binary mode. (#2630) +* Properly support IP addresses in `NO_PROXY` environment variable. (#2659) +* Set default file for `NetRCAuth()` to `None` to use the stdlib default. (#2667) +* Set logging request lines to INFO level for async requests, in line with sync requests. (#2656) +* Fix which gen-delims need to be escaped for path/query/fragment components in URL. (#2701) + + +--- + +[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/RECORD new file mode 100644 index 00000000..1cbe3ad6 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/RECORD @@ -0,0 +1,57 @@ +../../../bin/httpx,sha256=YhM3lroNg2xkNdaFKicQjc2KywBLVV9r6yrAbSYZAio,216 +httpx-0.24.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpx-0.24.1.dist-info/METADATA,sha256=ZBqGMGxXnjZ-UpNiE4mXBHZzuC4lRx_mTPc_R4YNoiQ,7428 +httpx-0.24.1.dist-info/RECORD,, +httpx-0.24.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx-0.24.1.dist-info/WHEEL,sha256=y1bSCq4r5i4nMmpXeUJMqs3ipKvkZObrIXSvJHm1qCI,87 +httpx-0.24.1.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 +httpx-0.24.1.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 +httpx/__init__.py,sha256=oCxVAsePEy5DE9eLhGAAq9H3RBGZUDaUROtGEyzbBRo,3210 +httpx/__pycache__/__init__.cpython-312.pyc,, +httpx/__pycache__/__version__.cpython-312.pyc,, +httpx/__pycache__/_api.cpython-312.pyc,, +httpx/__pycache__/_auth.cpython-312.pyc,, +httpx/__pycache__/_client.cpython-312.pyc,, +httpx/__pycache__/_compat.cpython-312.pyc,, +httpx/__pycache__/_config.cpython-312.pyc,, +httpx/__pycache__/_content.cpython-312.pyc,, +httpx/__pycache__/_decoders.cpython-312.pyc,, +httpx/__pycache__/_exceptions.cpython-312.pyc,, +httpx/__pycache__/_main.cpython-312.pyc,, +httpx/__pycache__/_models.cpython-312.pyc,, +httpx/__pycache__/_multipart.cpython-312.pyc,, +httpx/__pycache__/_status_codes.cpython-312.pyc,, +httpx/__pycache__/_types.cpython-312.pyc,, +httpx/__pycache__/_urlparse.cpython-312.pyc,, +httpx/__pycache__/_urls.cpython-312.pyc,, +httpx/__pycache__/_utils.cpython-312.pyc,, +httpx/__version__.py,sha256=bg4cSle4BdKgSjAPJGqR4kGXZ-nTOXf_1g68lFLU8To,108 +httpx/_api.py,sha256=cVU9ErzaXve5rqoPoSHr9yJbovHtICrcxR7yBoNSeOw,13011 +httpx/_auth.py,sha256=58FA-xqqp-XgLZ7Emd4-et-XXuTRaa5buiBYB2MzyvE,11773 +httpx/_client.py,sha256=A9MPP_d1ZlqcO5CeGLgyzVwdHgCpROYSdjoAUA6rpYE,68131 +httpx/_compat.py,sha256=lQa4SnZhS-kNQ8HKpSwKrmJ00nYQKDVaWwwnOYEvjMI,1602 +httpx/_config.py,sha256=9Tg0-pV93Hl5knjyZhCLcoEXymAMn-OLaDsEn2uPK14,12391 +httpx/_content.py,sha256=olbWqawdWWweXeW6gDYHPiEGjip5lqFZKv9OmVd-zIg,8092 +httpx/_decoders.py,sha256=dd8GSkEAe45BzRUF47zH_lg3-BcwXtxzPBSGP5Y4F90,9739 +httpx/_exceptions.py,sha256=xKw-U6vW7zmdReUAGYHMegYWZuDAuE5039L087SHe4Q,7880 +httpx/_main.py,sha256=m9C4RuqjOB6UqL3FFHMjmC45f4SDSO-iOREFLdw4IdM,15784 +httpx/_models.py,sha256=Ho9YjmVMkS-lEMhCGpecfYsenVZy2jsLJmKCexO50tI,42696 +httpx/_multipart.py,sha256=qzt35jAgapaRPwdq-lTKSA5YY6ayrfDIsZLdr3t4NWc,8972 +httpx/_status_codes.py,sha256=XKArMrSoo8oKBQCHdFGA-wsM2PcSTaHE8svDYOUcwWk,5584 +httpx/_transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx/_transports/__pycache__/__init__.cpython-312.pyc,, +httpx/_transports/__pycache__/asgi.cpython-312.pyc,, +httpx/_transports/__pycache__/base.cpython-312.pyc,, +httpx/_transports/__pycache__/default.cpython-312.pyc,, +httpx/_transports/__pycache__/mock.cpython-312.pyc,, +httpx/_transports/__pycache__/wsgi.cpython-312.pyc,, +httpx/_transports/asgi.py,sha256=lKAL-6dhxqSnZA2fMWtj-MokSTIzjnwwa3DTkkof5cE,5317 +httpx/_transports/base.py,sha256=0BM8yZZEkdFT4tXXSm0h0dK0cSYA4hLgInj_BljGEGw,2510 +httpx/_transports/default.py,sha256=fla9xvSAM3BuGtaMa4PhbX1gW_9oafl8vzujOhcE-H8,12626 +httpx/_transports/mock.py,sha256=sDt3BDXbz8-W94kC8OXtGzF1PWH0y73h1De7Q-XkVtg,1179 +httpx/_transports/wsgi.py,sha256=72ZMPBLPV-aZB4gfsz_SOrJpgKJb6Z9W5wFxhlMQcqg,4754 +httpx/_types.py,sha256=BnX0adSAxLT9BzkxuX96S4odkC9UdLMgws6waxqEKuI,3333 +httpx/_urlparse.py,sha256=JvFjro7sdHohzXwybwYALTTGy2MakRpfFreBTQu9A4w,16669 +httpx/_urls.py,sha256=JAONd-2reXpB_WuQ7WuvhUcLuebiQeYJQPyszADmCow,21840 +httpx/_utils.py,sha256=jaCEUHN9jpHfoudrtSNxYTmTeRLeOrP-s-MOTvq23rA,15397 +httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/WHEEL new file mode 100644 index 00000000..27627551 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.17.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/entry_points.txt b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/entry_points.txt new file mode 100644 index 00000000..8ae96007 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +httpx = httpx:main diff --git a/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/licenses/LICENSE.md b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/licenses/LICENSE.md new file mode 100644 index 00000000..ab79d16a --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx-0.24.1.dist-info/licenses/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__init__.py b/Backend/venv/lib/python3.12/site-packages/httpx/__init__.py new file mode 100644 index 00000000..f61112f8 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/__init__.py @@ -0,0 +1,138 @@ +from .__version__ import __description__, __title__, __version__ +from ._api import delete, get, head, options, patch, post, put, request, stream +from ._auth import Auth, BasicAuth, DigestAuth, NetRCAuth +from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client +from ._config import Limits, Proxy, Timeout, create_ssl_context +from ._content import ByteStream +from ._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + CookieConflict, + DecodingError, + HTTPError, + HTTPStatusError, + InvalidURL, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + RequestError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + StreamError, + TimeoutException, + TooManyRedirects, + TransportError, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.mock import MockTransport +from ._transports.wsgi import WSGITransport +from ._types import AsyncByteStream, SyncByteStream +from ._urls import URL, QueryParams + +try: + from ._main import main +except ImportError: # pragma: no cover + + def main() -> None: # type: ignore + import sys + + print( + "The httpx command line client could not run because the required " + "dependencies were not installed.\nMake sure you've installed " + "everything with: pip install 'httpx[cli]'" + ) + sys.exit(1) + + +__all__ = [ + "__description__", + "__title__", + "__version__", + "ASGITransport", + "AsyncBaseTransport", + "AsyncByteStream", + "AsyncClient", + "AsyncHTTPTransport", + "Auth", + "BaseTransport", + "BasicAuth", + "ByteStream", + "Client", + "CloseError", + "codes", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "Cookies", + "create_ssl_context", + "DecodingError", + "delete", + "DigestAuth", + "get", + "head", + "Headers", + "HTTPError", + "HTTPStatusError", + "HTTPTransport", + "InvalidURL", + "Limits", + "LocalProtocolError", + "main", + "MockTransport", + "NetRCAuth", + "NetworkError", + "options", + "patch", + "PoolTimeout", + "post", + "ProtocolError", + "Proxy", + "ProxyError", + "put", + "QueryParams", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "request", + "Request", + "RequestError", + "RequestNotRead", + "Response", + "ResponseNotRead", + "stream", + "StreamClosed", + "StreamConsumed", + "StreamError", + "SyncByteStream", + "Timeout", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "URL", + "USE_CLIENT_DEFAULT", + "WriteError", + "WriteTimeout", + "WSGITransport", +] + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..454f0557 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc new file mode 100644 index 00000000..b717cd0b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc new file mode 100644 index 00000000..91bae824 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc new file mode 100644 index 00000000..5db3cd87 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc new file mode 100644 index 00000000..400ed724 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 00000000..966e3642 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc new file mode 100644 index 00000000..e06c2acc Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc new file mode 100644 index 00000000..ecab8a8e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc new file mode 100644 index 00000000..ddaf8525 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 00000000..dfb3bba5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc new file mode 100644 index 00000000..88e6ba56 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc new file mode 100644 index 00000000..f1dba74f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc new file mode 100644 index 00000000..041ef623 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc new file mode 100644 index 00000000..c44b8e1c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc new file mode 100644 index 00000000..daf260cf Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc new file mode 100644 index 00000000..09f3aa57 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc new file mode 100644 index 00000000..a4469325 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 00000000..cb978079 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/__version__.py b/Backend/venv/lib/python3.12/site-packages/httpx/__version__.py new file mode 100644 index 00000000..6a8e63c6 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/__version__.py @@ -0,0 +1,3 @@ +__title__ = "httpx" +__description__ = "A next generation HTTP client, for Python 3." +__version__ = "0.24.1" diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_api.py b/Backend/venv/lib/python3.12/site-packages/httpx/_api.py new file mode 100644 index 00000000..571289cf --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_api.py @@ -0,0 +1,445 @@ +import typing +from contextlib import contextmanager + +from ._client import Client +from ._config import DEFAULT_TIMEOUT_CONFIG +from ._models import Response +from ._types import ( + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, + URLTypes, + VerifyTypes, +) + + +def request( + method: str, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + trust_env: bool = True, +) -> Response: + """ + Sends an HTTP request. + + **Parameters:** + + * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, + `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. + * **url** - URL for the new `Request` object. + * **params** - *(optional)* Query parameters to include in the URL, as a + string, dictionary, or sequence of two-tuples. + * **content** - *(optional)* Binary content to include in the body of the + request, as bytes or a byte iterator. + * **data** - *(optional)* Form data to include in the body of the request, + as a dictionary. + * **files** - *(optional)* A dictionary of upload files to include in the + body of the request. + * **json** - *(optional)* A JSON serializable object to include in the body + of the request. + * **headers** - *(optional)* Dictionary of HTTP headers to include in the + request. + * **cookies** - *(optional)* Dictionary of Cookie items to include in the + request. + * **auth** - *(optional)* An authentication class to use when sending the + request. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + the request. + * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + + **Returns:** `Response` + + Usage: + + ``` + >>> import httpx + >>> response = httpx.request('GET', 'https://httpbin.org/get') + >>> response + + ``` + """ + with Client( + cookies=cookies, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + return client.request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) + + +@contextmanager +def stream( + method: str, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + trust_env: bool = True, +) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + with Client( + cookies=cookies, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + with client.stream( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) as response: + yield response + + +def get( + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `GET` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `GET` requests should not include a request body. + """ + return request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def options( + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `OPTIONS` requests should not include a request body. + """ + return request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def head( + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `HEAD` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `HEAD` requests should not include a request body. + """ + return request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def post( + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def put( + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def patch( + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def delete( + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `DELETE` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `DELETE` requests should not include a request body. + """ + return request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_auth.py b/Backend/venv/lib/python3.12/site-packages/httpx/_auth.py new file mode 100644 index 00000000..1d7385d5 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_auth.py @@ -0,0 +1,347 @@ +import hashlib +import netrc +import os +import re +import time +import typing +from base64 import b64encode +from urllib.request import parse_http_list + +from ._exceptions import ProtocolError +from ._models import Request, Response +from ._utils import to_bytes, to_str, unquote + +if typing.TYPE_CHECKING: # pragma: no cover + from hashlib import _Hash + + +class Auth: + """ + Base class for all authentication schemes. + + To implement a custom authentication scheme, subclass `Auth` and override + the `.auth_flow()` method. + + If the authentication scheme does I/O such as disk access or network calls, or uses + synchronization primitives such as locks, you should override `.sync_auth_flow()` + and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized + implementations that will be used by `Client` and `AsyncClient` respectively. + """ + + requires_request_body = False + requires_response_body = False + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow. + + To dispatch a request, `yield` it: + + ``` + yield request + ``` + + The client will `.send()` the response back into the flow generator. You can + access it like so: + + ``` + response = yield request + ``` + + A `return` (or reaching the end of the generator) will result in the + client returning the last response obtained from the server. + + You can dispatch as many requests as is necessary. + """ + yield request + + def sync_auth_flow( + self, request: Request + ) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow synchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + request.read() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + response.read() + + try: + request = flow.send(response) + except StopIteration: + break + + async def async_auth_flow( + self, request: Request + ) -> typing.AsyncGenerator[Request, Response]: + """ + Execute the authentication flow asynchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + await request.aread() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + await response.aread() + + try: + request = flow.send(response) + except StopIteration: + break + + +class FunctionAuth(Auth): + """ + Allows the 'auth' argument to be passed as a simple callable function, + that takes the request, and returns a new, modified request. + """ + + def __init__(self, func: typing.Callable[[Request], Request]) -> None: + self._func = func + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + yield self._func(request) + + +class BasicAuth(Auth): + """ + Allows the 'auth' argument to be passed as a (username, password) pair, + and uses HTTP Basic authentication. + """ + + def __init__( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ): + self._auth_header = self._build_auth_header(username, password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + request.headers["Authorization"] = self._auth_header + yield request + + def _build_auth_header( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class NetRCAuth(Auth): + """ + Use a 'netrc' file to lookup basic auth credentials based on the url host. + """ + + def __init__(self, file: typing.Optional[str] = None): + self._netrc_info = netrc.netrc(file) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + auth_info = self._netrc_info.authenticators(request.url.host) + if auth_info is None or not auth_info[2]: + # The netrc file did not have authentication credentials for this host. + yield request + else: + # Build a basic auth header with credentials from the netrc file. + request.headers["Authorization"] = self._build_auth_header( + username=auth_info[0], password=auth_info[2] + ) + yield request + + def _build_auth_header( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class DigestAuth(Auth): + _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable[[bytes], "_Hash"]] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, + } + + def __init__( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> None: + self._username = to_bytes(username) + self._password = to_bytes(password) + self._last_challenge: typing.Optional[_DigestAuthChallenge] = None + self._nonce_count = 1 + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + if self._last_challenge: + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + + response = yield request + + if response.status_code != 401 or "www-authenticate" not in response.headers: + # If the response is not a 401 then we don't + # need to build an authenticated request. + return + + for auth_header in response.headers.get_list("www-authenticate"): + if auth_header.lower().startswith("digest "): + break + else: + # If the response does not include a 'WWW-Authenticate: Digest ...' + # header, then we don't need to build an authenticated request. + return + + self._last_challenge = self._parse_challenge(request, response, auth_header) + self._nonce_count = 1 + + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + yield request + + def _parse_challenge( + self, request: Request, response: Response, auth_header: str + ) -> "_DigestAuthChallenge": + """ + Returns a challenge from a Digest WWW-Authenticate header. + These take the form of: + `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` + """ + scheme, _, fields = auth_header.partition(" ") + + # This method should only ever have been called with a Digest auth header. + assert scheme.lower() == "digest" + + header_dict: typing.Dict[str, str] = {} + for field in parse_http_list(fields): + key, value = field.strip().split("=", 1) + header_dict[key] = unquote(value) + + try: + realm = header_dict["realm"].encode() + nonce = header_dict["nonce"].encode() + algorithm = header_dict.get("algorithm", "MD5") + opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None + qop = header_dict["qop"].encode() if "qop" in header_dict else None + return _DigestAuthChallenge( + realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop + ) + except KeyError as exc: + message = "Malformed Digest WWW-Authenticate header" + raise ProtocolError(message, request=request) from exc + + def _build_auth_header( + self, request: Request, challenge: "_DigestAuthChallenge" + ) -> str: + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] + + def digest(data: bytes) -> bytes: + return hash_func(data).hexdigest().encode() + + A1 = b":".join((self._username, challenge.realm, self._password)) + + path = request.url.raw_path + A2 = b":".join((request.method.encode(), path)) + # TODO: implement auth-int + HA2 = digest(A2) + + nc_value = b"%08x" % self._nonce_count + cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) + self._nonce_count += 1 + + HA1 = digest(A1) + if challenge.algorithm.lower().endswith("-sess"): + HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) + + qop = self._resolve_qop(challenge.qop, request=request) + if qop is None: + digest_data = [HA1, challenge.nonce, HA2] + else: + digest_data = [challenge.nonce, nc_value, cnonce, qop, HA2] + key_digest = b":".join(digest_data) + + format_args = { + "username": self._username, + "realm": challenge.realm, + "nonce": challenge.nonce, + "uri": path, + "response": digest(b":".join((HA1, key_digest))), + "algorithm": challenge.algorithm.encode(), + } + if challenge.opaque: + format_args["opaque"] = challenge.opaque + if qop: + format_args["qop"] = b"auth" + format_args["nc"] = nc_value + format_args["cnonce"] = cnonce + + return "Digest " + self._get_header_value(format_args) + + def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: + s = str(nonce_count).encode() + s += nonce + s += time.ctime().encode() + s += os.urandom(8) + + return hashlib.sha1(s).hexdigest()[:16].encode() + + def _get_header_value(self, header_fields: typing.Dict[str, bytes]) -> str: + NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") + QUOTED_TEMPLATE = '{}="{}"' + NON_QUOTED_TEMPLATE = "{}={}" + + header_value = "" + for i, (field, value) in enumerate(header_fields.items()): + if i > 0: + header_value += ", " + template = ( + QUOTED_TEMPLATE + if field not in NON_QUOTED_FIELDS + else NON_QUOTED_TEMPLATE + ) + header_value += template.format(field, to_str(value)) + + return header_value + + def _resolve_qop( + self, qop: typing.Optional[bytes], request: Request + ) -> typing.Optional[bytes]: + if qop is None: + return None + qops = re.split(b", ?", qop) + if b"auth" in qops: + return b"auth" + + if qops == [b"auth-int"]: + raise NotImplementedError("Digest auth-int support is not yet implemented") + + message = f'Unexpected qop value "{qop!r}" in digest auth' + raise ProtocolError(message, request=request) + + +class _DigestAuthChallenge(typing.NamedTuple): + realm: bytes + nonce: bytes + algorithm: str + opaque: typing.Optional[bytes] + qop: typing.Optional[bytes] diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_client.py b/Backend/venv/lib/python3.12/site-packages/httpx/_client.py new file mode 100644 index 00000000..cb475e02 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_client.py @@ -0,0 +1,2006 @@ +import datetime +import enum +import logging +import typing +import warnings +from contextlib import asynccontextmanager, contextmanager +from types import TracebackType + +from .__version__ import __version__ +from ._auth import Auth, BasicAuth, FunctionAuth +from ._config import ( + DEFAULT_LIMITS, + DEFAULT_MAX_REDIRECTS, + DEFAULT_TIMEOUT_CONFIG, + Limits, + Proxy, + Timeout, +) +from ._decoders import SUPPORTED_DECODERS +from ._exceptions import ( + InvalidURL, + RemoteProtocolError, + TooManyRedirects, + request_context, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.wsgi import WSGITransport +from ._types import ( + AsyncByteStream, + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + SyncByteStream, + TimeoutTypes, + URLTypes, + VerifyTypes, +) +from ._urls import URL, QueryParams +from ._utils import ( + Timer, + URLPattern, + get_environment_proxies, + is_https_redirect, + same_origin, +) + +# The type annotation for @classmethod and context managers here follows PEP 484 +# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods +T = typing.TypeVar("T", bound="Client") +U = typing.TypeVar("U", bound="AsyncClient") + + +class UseClientDefault: + """ + For some parameters such as `auth=...` and `timeout=...` we need to be able + to indicate the default "unset" state, in a way that is distinctly different + to using `None`. + + The default "unset" state indicates that whatever default is set on the + client should be used. This is different to setting `None`, which + explicitly disables the parameter, possibly overriding a client default. + + For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. + Omitting the `timeout` parameter will send a request using whatever default + timeout has been configured on the client. Including `timeout=None` will + ensure no timeout is used. + + Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, + but it is used internally when a parameter is not included. + """ + + +USE_CLIENT_DEFAULT = UseClientDefault() + + +logger = logging.getLogger("httpx") + +USER_AGENT = f"python-httpx/{__version__}" +ACCEPT_ENCODING = ", ".join( + [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] +) + + +class ClientState(enum.Enum): + # UNOPENED: + # The client has been instantiated, but has not been used to send a request, + # or been opened by entering the context of a `with` block. + UNOPENED = 1 + # OPENED: + # The client has either sent a request, or is within a `with` block. + OPENED = 2 + # CLOSED: + # The client has either exited the `with` block, or `close()` has + # been called explicitly. + CLOSED = 3 + + +class BoundSyncStream(SyncByteStream): + """ + A byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: SyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self._stream: + yield chunk + + def close(self) -> None: + seconds = self._timer.sync_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + self._stream.close() + + +class BoundAsyncStream(AsyncByteStream): + """ + An async byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: AsyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async for chunk in self._stream: + yield chunk + + async def aclose(self) -> None: + seconds = await self._timer.async_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + await self._stream.aclose() + + +EventHook = typing.Callable[..., typing.Any] + + +class BaseClient: + def __init__( + self, + *, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[EventHook]] + ] = None, + base_url: URLTypes = "", + trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + ): + event_hooks = {} if event_hooks is None else event_hooks + + self._base_url = self._enforce_trailing_slash(URL(base_url)) + + self._auth = self._build_auth(auth) + self._params = QueryParams(params) + self.headers = Headers(headers) + self._cookies = Cookies(cookies) + self._timeout = Timeout(timeout) + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + self._trust_env = trust_env + self._default_encoding = default_encoding + self._state = ClientState.UNOPENED + + @property + def is_closed(self) -> bool: + """ + Check if the client being closed + """ + return self._state == ClientState.CLOSED + + @property + def trust_env(self) -> bool: + return self._trust_env + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _get_proxy_map( + self, proxies: typing.Optional[ProxiesTypes], allow_env_proxies: bool + ) -> typing.Dict[str, typing.Optional[Proxy]]: + if proxies is None: + if allow_env_proxies: + return { + key: None if url is None else Proxy(url=url) + for key, url in get_environment_proxies().items() + } + return {} + if isinstance(proxies, dict): + new_proxies = {} + for key, value in proxies.items(): + proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value + new_proxies[str(key)] = proxy + return new_proxies + else: + proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies + return {"all://": proxy} + + @property + def timeout(self) -> Timeout: + return self._timeout + + @timeout.setter + def timeout(self, timeout: TimeoutTypes) -> None: + self._timeout = Timeout(timeout) + + @property + def event_hooks(self) -> typing.Dict[str, typing.List[EventHook]]: + return self._event_hooks + + @event_hooks.setter + def event_hooks( + self, event_hooks: typing.Dict[str, typing.List[EventHook]] + ) -> None: + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + + @property + def auth(self) -> typing.Optional[Auth]: + """ + Authentication class used when none is passed at the request-level. + + See also [Authentication][0]. + + [0]: /quickstart/#authentication + """ + return self._auth + + @auth.setter + def auth(self, auth: AuthTypes) -> None: + self._auth = self._build_auth(auth) + + @property + def base_url(self) -> URL: + """ + Base URL to use when sending requests with relative URLs. + """ + return self._base_url + + @base_url.setter + def base_url(self, url: URLTypes) -> None: + self._base_url = self._enforce_trailing_slash(URL(url)) + + @property + def headers(self) -> Headers: + """ + HTTP headers to include when sending requests. + """ + return self._headers + + @headers.setter + def headers(self, headers: HeaderTypes) -> None: + client_headers = Headers( + { + b"Accept": b"*/*", + b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), + b"Connection": b"keep-alive", + b"User-Agent": USER_AGENT.encode("ascii"), + } + ) + client_headers.update(headers) + self._headers = client_headers + + @property + def cookies(self) -> Cookies: + """ + Cookie values to include when sending requests. + """ + return self._cookies + + @cookies.setter + def cookies(self, cookies: CookieTypes) -> None: + self._cookies = Cookies(cookies) + + @property + def params(self) -> QueryParams: + """ + Query parameters to include in the URL when sending requests. + """ + return self._params + + @params.setter + def params(self, params: QueryParamTypes) -> None: + self._params = QueryParams(params) + + def build_request( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Request: + """ + Build and return a request instance. + + * The `params`, `headers` and `cookies` arguments + are merged with any values set on the client. + * The `url` argument is merged with any `base_url` set on the client. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + url = self._merge_url(url) + headers = self._merge_headers(headers) + cookies = self._merge_cookies(cookies) + params = self._merge_queryparams(params) + extensions = {} if extensions is None else extensions + if "timeout" not in extensions: + timeout = ( + self.timeout + if isinstance(timeout, UseClientDefault) + else Timeout(timeout) + ) + extensions = dict(**extensions, timeout=timeout.as_dict()) + return Request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + extensions=extensions, + ) + + def _merge_url(self, url: URLTypes) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + merge_url = URL(url) + if merge_url.is_relative_url: + # To merge URLs we always append to the base URL. To get this + # behaviour correct we always ensure the base URL ends in a '/' + # separator, and strip any leading '/' from the merge URL. + # + # So, eg... + # + # >>> client = Client(base_url="https://www.example.com/subpath") + # >>> client.base_url + # URL('https://www.example.com/subpath/') + # >>> client.build_request("GET", "/path").url + # URL('https://www.example.com/subpath/path') + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + return merge_url + + def _merge_cookies( + self, cookies: typing.Optional[CookieTypes] = None + ) -> typing.Optional[CookieTypes]: + """ + Merge a cookies argument together with any cookies on the client, + to create the cookies used for the outgoing request. + """ + if cookies or self.cookies: + merged_cookies = Cookies(self.cookies) + merged_cookies.update(cookies) + return merged_cookies + return cookies + + def _merge_headers( + self, headers: typing.Optional[HeaderTypes] = None + ) -> typing.Optional[HeaderTypes]: + """ + Merge a headers argument together with any headers on the client, + to create the headers used for the outgoing request. + """ + merged_headers = Headers(self.headers) + merged_headers.update(headers) + return merged_headers + + def _merge_queryparams( + self, params: typing.Optional[QueryParamTypes] = None + ) -> typing.Optional[QueryParamTypes]: + """ + Merge a queryparams argument together with any queryparams on the client, + to create the queryparams used for the outgoing request. + """ + if params or self.params: + merged_queryparams = QueryParams(self.params) + return merged_queryparams.merge(params) + return params + + def _build_auth(self, auth: typing.Optional[AuthTypes]) -> typing.Optional[Auth]: + if auth is None: + return None + elif isinstance(auth, tuple): + return BasicAuth(username=auth[0], password=auth[1]) + elif isinstance(auth, Auth): + return auth + elif callable(auth): + return FunctionAuth(func=auth) + else: + raise TypeError(f'Invalid "auth" argument: {auth!r}') + + def _build_request_auth( + self, + request: Request, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + ) -> Auth: + auth = ( + self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) + ) + + if auth is not None: + return auth + + username, password = request.url.username, request.url.password + if username or password: + return BasicAuth(username=username, password=password) + + return Auth() + + def _build_redirect_request(self, request: Request, response: Response) -> Request: + """ + Given a request and a redirect response, return a new request that + should be used to effect the redirect. + """ + method = self._redirect_method(request, response) + url = self._redirect_url(request, response) + headers = self._redirect_headers(request, url, method) + stream = self._redirect_stream(request, method) + cookies = Cookies(self.cookies) + return Request( + method=method, + url=url, + headers=headers, + cookies=cookies, + stream=stream, + extensions=request.extensions, + ) + + def _redirect_method(self, request: Request, response: Response) -> str: + """ + When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.SEE_OTHER and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # Turn 302s into GETs. + if response.status_code == codes.FOUND and method != "HEAD": + method = "GET" + + # If a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in 'requests' issue 1704. + if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": + method = "GET" + + return method + + def _redirect_url(self, request: Request, response: Response) -> URL: + """ + Return the URL for the redirect to follow. + """ + location = response.headers["Location"] + + try: + url = URL(location) + except InvalidURL as exc: + raise RemoteProtocolError( + f"Invalid URL in location header: {exc}.", request=request + ) from None + + # Handle malformed 'Location' headers that are "absolute" form, have no host. + # See: https://github.com/encode/httpx/issues/771 + if url.scheme and not url.host: + url = url.copy_with(host=request.url.host) + + # Facilitate relative 'Location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if url.is_relative_url: + url = request.url.join(url) + + # Attach previous fragment if needed (RFC 7231 7.1.2) + if request.url.fragment and not url.fragment: + url = url.copy_with(fragment=request.url.fragment) + + return url + + def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: + """ + Return the headers that should be used for the redirect request. + """ + headers = Headers(request.headers) + + if not same_origin(url, request.url): + if not is_https_redirect(request.url, url): + # Strip Authorization headers when responses are redirected + # away from the origin. (Except for direct HTTP to HTTPS redirects.) + headers.pop("Authorization", None) + + # Update the Host header. + headers["Host"] = url.netloc.decode("ascii") + + if method != request.method and method == "GET": + # If we've switch to a 'GET' request, then strip any headers which + # are only relevant to the request body. + headers.pop("Content-Length", None) + headers.pop("Transfer-Encoding", None) + + # We should use the client cookie store to determine any cookie header, + # rather than whatever was on the original outgoing request. + headers.pop("Cookie", None) + + return headers + + def _redirect_stream( + self, request: Request, method: str + ) -> typing.Optional[typing.Union[SyncByteStream, AsyncByteStream]]: + """ + Return the body that should be used for the redirect request. + """ + if method != request.method and method == "GET": + return None + + return request.stream + + +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + It can be shared between threads. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An WSGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + proxies: typing.Optional[ProxiesTypes] = None, + mounts: typing.Optional[typing.Mapping[str, BaseTransport]] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[EventHook]] + ] = None, + base_url: URLTypes = "", + transport: typing.Optional[BaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, + trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + ): + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + self._mounts: typing.Dict[URLPattern, typing.Optional[BaseTransport]] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: typing.Optional[BaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, + trust_env: bool = True, + ) -> BaseTransport: + if transport is not None: + return transport + + if app is not None: + return WSGITransport(app=app) + + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> BaseTransport: + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> BaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + def request( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = client.send(request, ...) + ``` + + See `Client.build_request()`, `Client.send()` and + [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + if cookies is not None: + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return self.send(request, auth=auth, follow_redirects=follow_redirects) + + @contextmanager + def stream( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + response.close() + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `Client.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + auth = self._build_request_auth(request, auth) + + response = self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + response.read() + + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: typing.List[Response], + ) -> Response: + auth_flow = auth.sync_auth_flow(request) + try: + request = next(auth_flow) + + while True: + response = self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + + response.history = list(history) + response.read() + request = next_request + history.append(response) + + except BaseException as exc: + response.close() + raise exc + finally: + auth_flow.close() + + def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: typing.List[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + hook(request) + + response = self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + hook(response) + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + response.read() + else: + response.next_request = request + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + timer.sync_start() + + if not isinstance(request.stream, SyncByteStream): + raise RuntimeError( + "Attempted to send an async request with a sync Client instance." + ) + + with request_context(request=request): + response = transport.handle_request(request) + + assert isinstance(response.stream, SyncByteStream) + + response.request = request + response.stream = BoundSyncStream( + response.stream, response=response, timer=timer + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + def get( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def options( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def head( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def post( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def put( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def patch( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def delete( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def close(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + self._transport.close() + for transport in self._mounts.values(): + if transport is not None: + transport.close() + + def __enter__(self: T) -> T: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: "Cannot reopen a client instance, once it has been closed.", + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + self._transport.__enter__() + for transport in self._mounts.values(): + if transport is not None: + transport.__enter__() + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + self._state = ClientState.CLOSED + + self._transport.__exit__(exc_type, exc_value, traceback) + for transport in self._mounts.values(): + if transport is not None: + transport.__exit__(exc_type, exc_value, traceback) + + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + Usage: + + ```python + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An ASGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + proxies: typing.Optional[ProxiesTypes] = None, + mounts: typing.Optional[typing.Mapping[str, AsyncBaseTransport]] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[typing.Callable[..., typing.Any]]] + ] = None, + base_url: URLTypes = "", + transport: typing.Optional[AsyncBaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, + trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + ): + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + + self._mounts: typing.Dict[URLPattern, typing.Optional[AsyncBaseTransport]] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: typing.Optional[AsyncBaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, + trust_env: bool = True, + ) -> AsyncBaseTransport: + if transport is not None: + return transport + + if app is not None: + return ASGITransport(app=app) + + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> AsyncBaseTransport: + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> AsyncBaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + async def request( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = await client.send(request, ...) + ``` + + See `AsyncClient.build_request()`, `AsyncClient.send()` + and [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return await self.send(request, auth=auth, follow_redirects=follow_redirects) + + @asynccontextmanager + async def stream( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> typing.AsyncIterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = await self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + await response.aclose() + + async def send( + self, + request: Request, + *, + stream: bool = False, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `AsyncClient.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + auth = self._build_request_auth(request, auth) + + response = await self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + await response.aread() + + return response + + except BaseException as exc: # pragma: no cover + await response.aclose() + raise exc + + async def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: typing.List[Response], + ) -> Response: + auth_flow = auth.async_auth_flow(request) + try: + request = await auth_flow.__anext__() + + while True: + response = await self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = await auth_flow.asend(response) + except StopAsyncIteration: + return response + + response.history = list(history) + await response.aread() + request = next_request + history.append(response) + + except BaseException as exc: + await response.aclose() + raise exc + finally: + await auth_flow.aclose() + + async def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: typing.List[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + await hook(request) + + response = await self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + await hook(response) + + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + await response.aread() + else: + response.next_request = request + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + await timer.async_start() + + if not isinstance(request.stream, AsyncByteStream): + raise RuntimeError( + "Attempted to send an sync request with an AsyncClient instance." + ) + + with request_context(request=request): + response = await transport.handle_async_request(request) + + assert isinstance(response.stream, AsyncByteStream) + response.request = request + response.stream = BoundAsyncStream( + response.stream, response=response, timer=timer + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + async def get( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def options( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def head( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def post( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def put( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def patch( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def delete( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def aclose(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + await self._transport.aclose() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.aclose() + + async def __aenter__(self: U) -> U: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: "Cannot reopen a client instance, once it has been closed.", + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + await self._transport.__aenter__() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aenter__() + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + self._state = ClientState.CLOSED + + await self._transport.__aexit__(exc_type, exc_value, traceback) + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_compat.py b/Backend/venv/lib/python3.12/site-packages/httpx/_compat.py new file mode 100644 index 00000000..a9b9c630 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_compat.py @@ -0,0 +1,43 @@ +""" +The _compat module is used for code which requires branching between different +Python environments. It is excluded from the code coverage checks. +""" +import ssl +import sys + +# Brotli support is optional +# The C bindings in `brotli` are recommended for CPython. +# The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else. +try: + import brotlicffi as brotli +except ImportError: # pragma: no cover + try: + import brotli + except ImportError: + brotli = None + +if sys.version_info >= (3, 10) or ( + sys.version_info >= (3, 7) and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7) +): + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of + # 'SSLContext.minimum_version' from Python 3.7 onwards, however + # this attribute is not available unless the ssl module is compiled + # with OpenSSL 1.1.0g or newer. + # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version + # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version + context.minimum_version = ssl.TLSVersion.TLSv1_2 + +else: + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # If 'minimum_version' isn't available, we configure these options with + # the older deprecated variants. + context.options |= ssl.OP_NO_SSLv2 + context.options |= ssl.OP_NO_SSLv3 + context.options |= ssl.OP_NO_TLSv1 + context.options |= ssl.OP_NO_TLSv1_1 + + +__all__ = ["brotli", "set_minimum_tls_version_1_2"] diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_config.py b/Backend/venv/lib/python3.12/site-packages/httpx/_config.py new file mode 100644 index 00000000..f46a5bfe --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_config.py @@ -0,0 +1,369 @@ +import logging +import os +import ssl +import sys +import typing +from pathlib import Path + +import certifi + +from ._compat import set_minimum_tls_version_1_2 +from ._models import Headers +from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes +from ._urls import URL +from ._utils import get_ca_bundle_from_env + +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + + +logger = logging.getLogger("httpx") + + +class UnsetType: + pass # pragma: no cover + + +UNSET = UnsetType() + + +def create_ssl_context( + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, +) -> ssl.SSLContext: + return SSLConfig( + cert=cert, verify=verify, trust_env=trust_env, http2=http2 + ).ssl_context + + +class SSLConfig: + """ + SSL Configuration. + """ + + DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) + + def __init__( + self, + *, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, + ): + self.cert = cert + self.verify = verify + self.trust_env = trust_env + self.http2 = http2 + self.ssl_context = self.load_ssl_context() + + def load_ssl_context(self) -> ssl.SSLContext: + logger.debug( + "load_ssl_context verify=%r cert=%r trust_env=%r http2=%r", + self.verify, + self.cert, + self.trust_env, + self.http2, + ) + + if self.verify: + return self.load_ssl_context_verify() + return self.load_ssl_context_no_verify() + + def load_ssl_context_no_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for unverified connections. + """ + context = self._create_default_ssl_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + self._load_client_certs(context) + return context + + def load_ssl_context_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for verified connections. + """ + if self.trust_env and self.verify is True: + ca_bundle = get_ca_bundle_from_env() + if ca_bundle is not None: + self.verify = ca_bundle + + if isinstance(self.verify, ssl.SSLContext): + # Allow passing in our own SSLContext object that's pre-configured. + context = self.verify + self._load_client_certs(context) + return context + elif isinstance(self.verify, bool): + ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH + elif Path(self.verify).exists(): + ca_bundle_path = Path(self.verify) + else: + raise IOError( + "Could not find a suitable TLS CA certificate bundle, " + "invalid path: {}".format(self.verify) + ) + + context = self._create_default_ssl_context() + context.verify_mode = ssl.CERT_REQUIRED + context.check_hostname = True + + # Signal to server support for PHA in TLS 1.3. Raises an + # AttributeError if only read-only access is implemented. + if sys.version_info >= (3, 8): # pragma: no cover + try: + context.post_handshake_auth = True + except AttributeError: # pragma: no cover + pass + + # Disable using 'commonName' for SSLContext.check_hostname + # when the 'subjectAltName' extension isn't available. + try: + context.hostname_checks_common_name = False + except AttributeError: # pragma: no cover + pass + + if ca_bundle_path.is_file(): + cafile = str(ca_bundle_path) + logger.debug("load_verify_locations cafile=%r", cafile) + context.load_verify_locations(cafile=cafile) + elif ca_bundle_path.is_dir(): + capath = str(ca_bundle_path) + logger.debug("load_verify_locations capath=%r", capath) + context.load_verify_locations(capath=capath) + + self._load_client_certs(context) + + return context + + def _create_default_ssl_context(self) -> ssl.SSLContext: + """ + Creates the default SSLContext object that's used for both verified + and unverified connections. + """ + context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + set_minimum_tls_version_1_2(context) + context.options |= ssl.OP_NO_COMPRESSION + context.set_ciphers(DEFAULT_CIPHERS) + + if ssl.HAS_ALPN: + alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] + context.set_alpn_protocols(alpn_idents) + + if sys.version_info >= (3, 8): # pragma: no cover + keylogfile = os.environ.get("SSLKEYLOGFILE") + if keylogfile and self.trust_env: + context.keylog_filename = keylogfile + + return context + + def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: + """ + Loads client certificates into our SSLContext object + """ + if self.cert is not None: + if isinstance(self.cert, str): + ssl_context.load_cert_chain(certfile=self.cert) + elif isinstance(self.cert, tuple) and len(self.cert) == 2: + ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) + elif isinstance(self.cert, tuple) and len(self.cert) == 3: + ssl_context.load_cert_chain( + certfile=self.cert[0], + keyfile=self.cert[1], + password=self.cert[2], # type: ignore + ) + + +class Timeout: + """ + Timeout configuration. + + **Usage**: + + Timeout(None) # No timeouts. + Timeout(5.0) # 5s timeout on all operations. + Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. + Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. + Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. + # 5s timeout elsewhere. + """ + + def __init__( + self, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + *, + connect: typing.Union[None, float, UnsetType] = UNSET, + read: typing.Union[None, float, UnsetType] = UNSET, + write: typing.Union[None, float, UnsetType] = UNSET, + pool: typing.Union[None, float, UnsetType] = UNSET, + ): + if isinstance(timeout, Timeout): + # Passed as a single explicit Timeout. + assert connect is UNSET + assert read is UNSET + assert write is UNSET + assert pool is UNSET + self.connect = timeout.connect # type: typing.Optional[float] + self.read = timeout.read # type: typing.Optional[float] + self.write = timeout.write # type: typing.Optional[float] + self.pool = timeout.pool # type: typing.Optional[float] + elif isinstance(timeout, tuple): + # Passed as a tuple. + self.connect = timeout[0] + self.read = timeout[1] + self.write = None if len(timeout) < 3 else timeout[2] + self.pool = None if len(timeout) < 4 else timeout[3] + elif not ( + isinstance(connect, UnsetType) + or isinstance(read, UnsetType) + or isinstance(write, UnsetType) + or isinstance(pool, UnsetType) + ): + self.connect = connect + self.read = read + self.write = write + self.pool = pool + else: + if isinstance(timeout, UnsetType): + raise ValueError( + "httpx.Timeout must either include a default, or set all " + "four parameters explicitly." + ) + self.connect = timeout if isinstance(connect, UnsetType) else connect + self.read = timeout if isinstance(read, UnsetType) else read + self.write = timeout if isinstance(write, UnsetType) else write + self.pool = timeout if isinstance(pool, UnsetType) else pool + + def as_dict(self) -> typing.Dict[str, typing.Optional[float]]: + return { + "connect": self.connect, + "read": self.read, + "write": self.write, + "pool": self.pool, + } + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.connect == other.connect + and self.read == other.read + and self.write == other.write + and self.pool == other.pool + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + if len({self.connect, self.read, self.write, self.pool}) == 1: + return f"{class_name}(timeout={self.connect})" + return ( + f"{class_name}(connect={self.connect}, " + f"read={self.read}, write={self.write}, pool={self.pool})" + ) + + +class Limits: + """ + Configuration for limits to various client behaviors. + + **Parameters:** + + * **max_connections** - The maximum number of concurrent connections that may be + established. + * **max_keepalive_connections** - Allow the connection pool to maintain + keep-alive connections below this point. Should be less than or equal + to `max_connections`. + * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. + """ + + def __init__( + self, + *, + max_connections: typing.Optional[int] = None, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = 5.0, + ): + self.max_connections = max_connections + self.max_keepalive_connections = max_keepalive_connections + self.keepalive_expiry = keepalive_expiry + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.max_connections == other.max_connections + and self.max_keepalive_connections == other.max_keepalive_connections + and self.keepalive_expiry == other.keepalive_expiry + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return ( + f"{class_name}(max_connections={self.max_connections}, " + f"max_keepalive_connections={self.max_keepalive_connections}, " + f"keepalive_expiry={self.keepalive_expiry})" + ) + + +class Proxy: + def __init__( + self, + url: URLTypes, + *, + auth: typing.Optional[typing.Tuple[str, str]] = None, + headers: typing.Optional[HeaderTypes] = None, + ): + url = URL(url) + headers = Headers(headers) + + if url.scheme not in ("http", "https", "socks5"): + raise ValueError(f"Unknown scheme for proxy URL {url!r}") + + if url.username or url.password: + # Remove any auth credentials from the URL. + auth = (url.username, url.password) + url = url.copy_with(username=None, password=None) + + self.url = url + self.auth = auth + self.headers = headers + + @property + def raw_auth(self) -> typing.Optional[typing.Tuple[bytes, bytes]]: + # The proxy authentication as raw bytes. + return ( + None + if self.auth is None + else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) + ) + + def __repr__(self) -> str: + # The authentication is represented with the password component masked. + auth = (self.auth[0], "********") if self.auth else None + + # Build a nice concise representation. + url_str = f"{str(self.url)!r}" + auth_str = f", auth={auth!r}" if auth else "" + headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" + return f"Proxy({url_str}{auth_str}{headers_str})" + + +DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) +DEFAULT_MAX_REDIRECTS = 20 diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_content.py b/Backend/venv/lib/python3.12/site-packages/httpx/_content.py new file mode 100644 index 00000000..b16e12d9 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_content.py @@ -0,0 +1,238 @@ +import inspect +import warnings +from json import dumps as json_dumps +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Dict, + Iterable, + Iterator, + Mapping, + Optional, + Tuple, + Union, +) +from urllib.parse import urlencode + +from ._exceptions import StreamClosed, StreamConsumed +from ._multipart import MultipartStream +from ._types import ( + AsyncByteStream, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + SyncByteStream, +) +from ._utils import peek_filelike_length, primitive_value_to_str + + +class ByteStream(AsyncByteStream, SyncByteStream): + def __init__(self, stream: bytes) -> None: + self._stream = stream + + def __iter__(self) -> Iterator[bytes]: + yield self._stream + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._stream + + +class IteratorByteStream(SyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: Iterable[bytes]): + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isgenerator(stream) + + def __iter__(self) -> Iterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "read"): + # File-like interfaces should use 'read' directly. + chunk = self._stream.read(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = self._stream.read(self.CHUNK_SIZE) + else: + # Otherwise iterate. + for part in self._stream: + yield part + + +class AsyncIteratorByteStream(AsyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: AsyncIterable[bytes]): + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isasyncgen(stream) + + async def __aiter__(self) -> AsyncIterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "aread"): + # File-like interfaces should use 'aread' directly. + chunk = await self._stream.aread(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = await self._stream.aread(self.CHUNK_SIZE) + else: + # Otherwise iterate. + async for part in self._stream: + yield part + + +class UnattachedStream(AsyncByteStream, SyncByteStream): + """ + If a request or response is serialized using pickle, then it is no longer + attached to a stream for I/O purposes. Any stream operations should result + in `httpx.StreamClosed`. + """ + + def __iter__(self) -> Iterator[bytes]: + raise StreamClosed() + + async def __aiter__(self) -> AsyncIterator[bytes]: + raise StreamClosed() + yield b"" # pragma: no cover + + +def encode_content( + content: Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + if isinstance(content, (bytes, str)): + body = content.encode("utf-8") if isinstance(content, str) else content + content_length = len(body) + headers = {"Content-Length": str(content_length)} if body else {} + return headers, ByteStream(body) + + elif isinstance(content, Iterable) and not isinstance(content, dict): + # `not isinstance(content, dict)` is a bit oddly specific, but it + # catches a case that's easy for users to make in error, and would + # otherwise pass through here, like any other bytes-iterable, + # because `dict` happens to be iterable. See issue #2491. + content_length_or_none = peek_filelike_length(content) + + if content_length_or_none is None: + headers = {"Transfer-Encoding": "chunked"} + else: + headers = {"Content-Length": str(content_length_or_none)} + return headers, IteratorByteStream(content) # type: ignore + + elif isinstance(content, AsyncIterable): + headers = {"Transfer-Encoding": "chunked"} + return headers, AsyncIteratorByteStream(content) + + raise TypeError(f"Unexpected type for 'content', {type(content)!r}") + + +def encode_urlencoded_data( + data: RequestData, +) -> Tuple[Dict[str, str], ByteStream]: + plain_data = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)): + plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) + else: + plain_data.append((key, primitive_value_to_str(value))) + body = urlencode(plain_data, doseq=True).encode("utf-8") + content_length = str(len(body)) + content_type = "application/x-www-form-urlencoded" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_multipart_data( + data: RequestData, files: RequestFiles, boundary: Optional[bytes] +) -> Tuple[Dict[str, str], MultipartStream]: + multipart = MultipartStream(data=data, files=files, boundary=boundary) + headers = multipart.get_headers() + return headers, multipart + + +def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]: + body = text.encode("utf-8") + content_length = str(len(body)) + content_type = "text/plain; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]: + body = html.encode("utf-8") + content_length = str(len(body)) + content_type = "text/html; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]: + body = json_dumps(json).encode("utf-8") + content_length = str(len(body)) + content_type = "application/json" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_request( + content: Optional[RequestContent] = None, + data: Optional[RequestData] = None, + files: Optional[RequestFiles] = None, + json: Optional[Any] = None, + boundary: Optional[bytes] = None, +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + """ + Handles encoding the given `content`, `data`, `files`, and `json`, + returning a two-tuple of (, ). + """ + if data is not None and not isinstance(data, Mapping): + # We prefer to separate `content=` + # for raw request content, and `data=
` for url encoded or + # multipart form content. + # + # However for compat with requests, we *do* still support + # `data=` usages. We deal with that case here, treating it + # as if `content=<...>` had been supplied instead. + message = "Use 'content=<...>' to upload raw bytes/text content." + warnings.warn(message, DeprecationWarning) + return encode_content(data) + + if content is not None: + return encode_content(content) + elif files: + return encode_multipart_data(data or {}, files, boundary) + elif data: + return encode_urlencoded_data(data) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") + + +def encode_response( + content: Optional[ResponseContent] = None, + text: Optional[str] = None, + html: Optional[str] = None, + json: Optional[Any] = None, +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + """ + Handles encoding the given `content`, returning a two-tuple of + (, ). + """ + if content is not None: + return encode_content(content) + elif text is not None: + return encode_text(text) + elif html is not None: + return encode_html(html) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_decoders.py b/Backend/venv/lib/python3.12/site-packages/httpx/_decoders.py new file mode 100644 index 00000000..500ce7ff --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_decoders.py @@ -0,0 +1,324 @@ +""" +Handlers for Content-Encoding. + +See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding +""" +import codecs +import io +import typing +import zlib + +from ._compat import brotli +from ._exceptions import DecodingError + + +class ContentDecoder: + def decode(self, data: bytes) -> bytes: + raise NotImplementedError() # pragma: no cover + + def flush(self) -> bytes: + raise NotImplementedError() # pragma: no cover + + +class IdentityDecoder(ContentDecoder): + """ + Handle unencoded data. + """ + + def decode(self, data: bytes) -> bytes: + return data + + def flush(self) -> bytes: + return b"" + + +class DeflateDecoder(ContentDecoder): + """ + Handle 'deflate' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.first_attempt = True + self.decompressor = zlib.decompressobj() + + def decode(self, data: bytes) -> bytes: + was_first_attempt = self.first_attempt + self.first_attempt = False + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + if was_first_attempt: + self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) + return self.decode(data) + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class GZipDecoder(ContentDecoder): + """ + Handle 'gzip' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) + + def decode(self, data: bytes) -> bytes: + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class BrotliDecoder(ContentDecoder): + """ + Handle 'brotli' decoding. + + Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ + or `pip install brotli`. See https://github.com/google/brotli + Supports both 'brotlipy' and 'Brotli' packages since they share an import + name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' + """ + + def __init__(self) -> None: + if brotli is None: # pragma: no cover + raise ImportError( + "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " + "packages have been installed. " + "Make sure to install httpx using `pip install httpx[brotli]`." + ) from None + + self.decompressor = brotli.Decompressor() + self.seen_data = False + self._decompress: typing.Callable[[bytes], bytes] + if hasattr(self.decompressor, "decompress"): + # The 'brotlicffi' package. + self._decompress = self.decompressor.decompress # pragma: no cover + else: + # The 'brotli' package. + self._decompress = self.decompressor.process # pragma: no cover + + def decode(self, data: bytes) -> bytes: + if not data: + return b"" + self.seen_data = True + try: + return self._decompress(data) + except brotli.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + try: + if hasattr(self.decompressor, "finish"): + # Only available in the 'brotlicffi' package. + + # As the decompressor decompresses eagerly, this + # will never actually emit any data. However, it will potentially throw + # errors if a truncated or damaged data stream has been used. + self.decompressor.finish() # pragma: no cover + return b"" + except brotli.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class MultiDecoder(ContentDecoder): + """ + Handle the case where multiple encodings have been applied. + """ + + def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: + """ + 'children' should be a sequence of decoders in the order in which + each was applied. + """ + # Note that we reverse the order for decoding. + self.children = list(reversed(children)) + + def decode(self, data: bytes) -> bytes: + for child in self.children: + data = child.decode(data) + return data + + def flush(self) -> bytes: + data = b"" + for child in self.children: + data = child.decode(data) + child.flush() + return data + + +class ByteChunker: + """ + Handles returning byte content in fixed-size chunks. + """ + + def __init__(self, chunk_size: typing.Optional[int] = None) -> None: + self._buffer = io.BytesIO() + self._chunk_size = chunk_size + + def decode(self, content: bytes) -> typing.List[bytes]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> typing.List[bytes]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextChunker: + """ + Handles returning text content in fixed-size chunks. + """ + + def __init__(self, chunk_size: typing.Optional[int] = None) -> None: + self._buffer = io.StringIO() + self._chunk_size = chunk_size + + def decode(self, content: str) -> typing.List[str]: + if self._chunk_size is None: + return [content] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> typing.List[str]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextDecoder: + """ + Handles incrementally decoding bytes into text + """ + + def __init__(self, encoding: str = "utf-8"): + self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") + + def decode(self, data: bytes) -> str: + return self.decoder.decode(data) + + def flush(self) -> str: + return self.decoder.decode(b"", True) + + +class LineDecoder: + """ + Handles incrementally reading lines from text. + + Has the same behaviour as the stdllib splitlines, but handling the input iteratively. + """ + + def __init__(self) -> None: + self.buffer: typing.List[str] = [] + self.trailing_cr: bool = False + + def decode(self, text: str) -> typing.List[str]: + # See https://docs.python.org/3/library/stdtypes.html#str.splitlines + NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" + + # We always push a trailing `\r` into the next decode iteration. + if self.trailing_cr: + text = "\r" + text + self.trailing_cr = False + if text.endswith("\r"): + self.trailing_cr = True + text = text[:-1] + + if not text: + return [] + + trailing_newline = text[-1] in NEWLINE_CHARS + lines = text.splitlines() + + if len(lines) == 1 and not trailing_newline: + # No new lines, buffer the input and continue. + self.buffer.append(lines[0]) + return [] + + if self.buffer: + # Include any existing buffer in the first portion of the + # splitlines result. + lines = ["".join(self.buffer) + lines[0]] + lines[1:] + self.buffer = [] + + if not trailing_newline: + # If the last segment of splitlines is not newline terminated, + # then drop it from our output and start a new buffer. + self.buffer = [lines.pop()] + + return lines + + def flush(self) -> typing.List[str]: + if not self.buffer and not self.trailing_cr: + return [] + + lines = ["".join(self.buffer)] + self.buffer = [] + self.trailing_cr = False + return lines + + +SUPPORTED_DECODERS = { + "identity": IdentityDecoder, + "gzip": GZipDecoder, + "deflate": DeflateDecoder, + "br": BrotliDecoder, +} + + +if brotli is None: + SUPPORTED_DECODERS.pop("br") # pragma: no cover diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_exceptions.py b/Backend/venv/lib/python3.12/site-packages/httpx/_exceptions.py new file mode 100644 index 00000000..24a4f8ab --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_exceptions.py @@ -0,0 +1,343 @@ +""" +Our exception hierarchy: + +* HTTPError + x RequestError + + TransportError + - TimeoutException + · ConnectTimeout + · ReadTimeout + · WriteTimeout + · PoolTimeout + - NetworkError + · ConnectError + · ReadError + · WriteError + · CloseError + - ProtocolError + · LocalProtocolError + · RemoteProtocolError + - ProxyError + - UnsupportedProtocol + + DecodingError + + TooManyRedirects + x HTTPStatusError +* InvalidURL +* CookieConflict +* StreamError + x StreamConsumed + x StreamClosed + x ResponseNotRead + x RequestNotRead +""" +import contextlib +import typing + +if typing.TYPE_CHECKING: + from ._models import Request, Response # pragma: no cover + + +class HTTPError(Exception): + """ + Base class for `RequestError` and `HTTPStatusError`. + + Useful for `try...except` blocks when issuing a request, + and then calling `.raise_for_status()`. + + For example: + + ``` + try: + response = httpx.get("https://www.example.com") + response.raise_for_status() + except httpx.HTTPError as exc: + print(f"HTTP Exception for {exc.request.url} - {exc}") + ``` + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + self._request: typing.Optional["Request"] = None + + @property + def request(self) -> "Request": + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: "Request") -> None: + self._request = request + + +class RequestError(HTTPError): + """ + Base class for all exceptions that may occur when issuing a `.request()`. + """ + + def __init__( + self, message: str, *, request: typing.Optional["Request"] = None + ) -> None: + super().__init__(message) + # At the point an exception is raised we won't typically have a request + # instance to associate it with. + # + # The 'request_context' context manager is used within the Client and + # Response methods in order to ensure that any raised exceptions + # have a `.request` property set on them. + self._request = request + + +class TransportError(RequestError): + """ + Base class for all exceptions that occur at the level of the Transport API. + """ + + +# Timeout exceptions... + + +class TimeoutException(TransportError): + """ + The base class for timeout errors. + + An operation has timed out. + """ + + +class ConnectTimeout(TimeoutException): + """ + Timed out while connecting to the host. + """ + + +class ReadTimeout(TimeoutException): + """ + Timed out while receiving data from the host. + """ + + +class WriteTimeout(TimeoutException): + """ + Timed out while sending data to the host. + """ + + +class PoolTimeout(TimeoutException): + """ + Timed out waiting to acquire a connection from the pool. + """ + + +# Core networking exceptions... + + +class NetworkError(TransportError): + """ + The base class for network-related errors. + + An error occurred while interacting with the network. + """ + + +class ReadError(NetworkError): + """ + Failed to receive data from the network. + """ + + +class WriteError(NetworkError): + """ + Failed to send data through the network. + """ + + +class ConnectError(NetworkError): + """ + Failed to establish a connection. + """ + + +class CloseError(NetworkError): + """ + Failed to close a connection. + """ + + +# Other transport exceptions... + + +class ProxyError(TransportError): + """ + An error occurred while establishing a proxy connection. + """ + + +class UnsupportedProtocol(TransportError): + """ + Attempted to make a request to an unsupported protocol. + + For example issuing a request to `ftp://www.example.com`. + """ + + +class ProtocolError(TransportError): + """ + The protocol was violated. + """ + + +class LocalProtocolError(ProtocolError): + """ + A protocol was violated by the client. + + For example if the user instantiated a `Request` instance explicitly, + failed to include the mandatory `Host:` header, and then issued it directly + using `client.send()`. + """ + + +class RemoteProtocolError(ProtocolError): + """ + The protocol was violated by the server. + + For example, returning malformed HTTP. + """ + + +# Other request exceptions... + + +class DecodingError(RequestError): + """ + Decoding of the response failed, due to a malformed encoding. + """ + + +class TooManyRedirects(RequestError): + """ + Too many redirects. + """ + + +# Client errors + + +class HTTPStatusError(HTTPError): + """ + The response had an error HTTP status of 4xx or 5xx. + + May be raised when calling `response.raise_for_status()` + """ + + def __init__( + self, message: str, *, request: "Request", response: "Response" + ) -> None: + super().__init__(message) + self.request = request + self.response = response + + +class InvalidURL(Exception): + """ + URL is improperly formed or cannot be parsed. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class CookieConflict(Exception): + """ + Attempted to lookup a cookie by name, but multiple cookies existed. + + Can occur when calling `response.cookies.get(...)`. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +# Stream exceptions... + +# These may occur as the result of a programming error, by accessing +# the request/response stream in an invalid manner. + + +class StreamError(RuntimeError): + """ + The base class for stream exceptions. + + The developer made an error in accessing the request stream in + an invalid way. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class StreamConsumed(StreamError): + """ + Attempted to read or stream content, but the content has already + been streamed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. For requests, this could be due to passing " + "a generator as request content, and then receiving a redirect " + "response or a secondary request as part of an authentication flow." + "For responses, this could be due to attempting to stream the response " + "content more than once." + ) + super().__init__(message) + + +class StreamClosed(StreamError): + """ + Attempted to read or stream response content, but the request has been + closed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream content, but the stream has " "been closed." + ) + super().__init__(message) + + +class ResponseNotRead(StreamError): + """ + Attempted to access streaming response content, without having called `read()`. + """ + + def __init__(self) -> None: + message = "Attempted to access streaming response content, without having called `read()`." + super().__init__(message) + + +class RequestNotRead(StreamError): + """ + Attempted to access streaming request content, without having called `read()`. + """ + + def __init__(self) -> None: + message = "Attempted to access streaming request content, without having called `read()`." + super().__init__(message) + + +@contextlib.contextmanager +def request_context( + request: typing.Optional["Request"] = None, +) -> typing.Iterator[None]: + """ + A context manager that can be used to attach the given request context + to any `RequestError` exceptions that are raised within the block. + """ + try: + yield + except RequestError as exc: + if request is not None: + exc.request = request + raise exc diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_main.py b/Backend/venv/lib/python3.12/site-packages/httpx/_main.py new file mode 100644 index 00000000..7c12ce84 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_main.py @@ -0,0 +1,506 @@ +import functools +import json +import sys +import typing + +import click +import httpcore +import pygments.lexers +import pygments.util +import rich.console +import rich.markup +import rich.progress +import rich.syntax +import rich.table + +from ._client import Client +from ._exceptions import RequestError +from ._models import Response +from ._status_codes import codes + + +def print_help() -> None: + console = rich.console.Console() + + console.print("[bold]HTTPX :butterfly:", justify="center") + console.print() + console.print("A next generation HTTP client.", justify="center") + console.print() + console.print( + "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" + ) + console.print() + + table = rich.table.Table.grid(padding=1, pad_edge=True) + table.add_column("Parameter", no_wrap=True, justify="left", style="bold") + table.add_column("Description") + table.add_row( + "-m, --method [cyan]METHOD", + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" + "[Default: GET, or POST if a request body is included]", + ) + table.add_row( + "-p, --params [cyan] ...", + "Query parameters to include in the request URL.", + ) + table.add_row( + "-c, --content [cyan]TEXT", "Byte content to include in the request body." + ) + table.add_row( + "-d, --data [cyan] ...", "Form data to include in the request body." + ) + table.add_row( + "-f, --files [cyan] ...", + "Form files to include in the request body.", + ) + table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") + table.add_row( + "-h, --headers [cyan] ...", + "Include additional HTTP headers in the request.", + ) + table.add_row( + "--cookies [cyan] ...", "Cookies to include in the request." + ) + table.add_row( + "--auth [cyan]", + "Username and password to include in the request. Specify '-' for the password to use " + "a password prompt. Note that using --verbose/-v will expose the Authorization " + "header, including the password encoding in a trivially reversible format.", + ) + + table.add_row( + "--proxies [cyan]URL", + "Send the request via a proxy. Should be the URL giving the proxy address.", + ) + + table.add_row( + "--timeout [cyan]FLOAT", + "Timeout value to use for network operations, such as establishing the connection, " + "reading some data, etc... [Default: 5.0]", + ) + + table.add_row("--follow-redirects", "Automatically follow redirects.") + table.add_row("--no-verify", "Disable SSL verification.") + table.add_row( + "--http2", "Send the request using HTTP/2, if the remote server supports it." + ) + + table.add_row( + "--download [cyan]FILE", + "Save the response content as a file, rather than displaying it.", + ) + + table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") + table.add_row("--help", "Show this message and exit.") + console.print(table) + + +def get_lexer_for_response(response: Response) -> str: + content_type = response.headers.get("Content-Type") + if content_type is not None: + mime_type, _, _ = content_type.partition(";") + try: + return typing.cast( + str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name + ) + except pygments.util.ClassNotFound: # pragma: no cover + pass + return "" # pragma: no cover + + +def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: + version = "HTTP/2" if http2 else "HTTP/1.1" + headers = [ + (name.lower() if http2 else name, value) for name, value in request.headers + ] + method = request.method.decode("ascii") + target = request.url.target.decode("ascii") + lines = [f"{method} {target} {version}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def format_response_headers( + http_version: bytes, + status: int, + reason_phrase: typing.Optional[bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], +) -> str: + version = http_version.decode("ascii") + reason = ( + codes.get_reason_phrase(status) + if reason_phrase is None + else reason_phrase.decode("ascii") + ) + lines = [f"{version} {status} {reason}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: + console = rich.console.Console() + http_text = format_request_headers(request, http2=http2) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response_headers( + http_version: bytes, + status: int, + reason_phrase: typing.Optional[bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], +) -> None: + console = rich.console.Console() + http_text = format_response_headers(http_version, status, reason_phrase, headers) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response(response: Response) -> None: + console = rich.console.Console() + lexer_name = get_lexer_for_response(response) + if lexer_name: + if lexer_name.lower() == "json": + try: + data = response.json() + text = json.dumps(data, indent=4) + except ValueError: # pragma: no cover + text = response.text + else: + text = response.text + + syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) + console.print(syntax) + else: + console.print(f"<{len(response.content)} bytes of binary data>") + + +_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] +_PCTRTTT = typing.Tuple[_PCTRTT, ...] +_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] + + +def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover + lines = [] + for key, value in cert.items(): + if isinstance(value, (list, tuple)): + lines.append(f"* {key}:") + for item in value: + if key in ("subject", "issuer"): + for sub_item in item: + lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") + elif isinstance(item, tuple) and len(item) == 2: + lines.append(f"* {item[0]}: {item[1]!r}") + else: + lines.append(f"* {item!r}") + else: + lines.append(f"* {key}: {value!r}") + return "\n".join(lines) + + +def trace( + name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False +) -> None: + console = rich.console.Console() + if name == "connection.connect_tcp.started" and verbose: + host = info["host"] + console.print(f"* Connecting to {host!r}") + elif name == "connection.connect_tcp.complete" and verbose: + stream = info["return_value"] + server_addr = stream.get_extra_info("server_addr") + console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") + elif name == "connection.start_tls.complete" and verbose: # pragma: no cover + stream = info["return_value"] + ssl_object = stream.get_extra_info("ssl_object") + version = ssl_object.version() + cipher = ssl_object.cipher() + server_cert = ssl_object.getpeercert() + alpn = ssl_object.selected_alpn_protocol() + console.print(f"* SSL established using {version!r} / {cipher[0]!r}") + console.print(f"* Selected ALPN protocol: {alpn!r}") + if server_cert: + console.print("* Server certificate:") + console.print(format_certificate(server_cert)) + elif name == "http11.send_request_headers.started" and verbose: + request = info["request"] + print_request_headers(request, http2=False) + elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover + request = info["request"] + print_request_headers(request, http2=True) + elif name == "http11.receive_response_headers.complete": + http_version, status, reason_phrase, headers = info["return_value"] + print_response_headers(http_version, status, reason_phrase, headers) + elif name == "http2.receive_response_headers.complete": # pragma: no cover + status, headers = info["return_value"] + http_version = b"HTTP/2" + reason_phrase = None + print_response_headers(http_version, status, reason_phrase, headers) + + +def download_response(response: Response, download: typing.BinaryIO) -> None: + console = rich.console.Console() + console.print() + content_length = response.headers.get("Content-Length") + with rich.progress.Progress( + "[progress.description]{task.description}", + "[progress.percentage]{task.percentage:>3.0f}%", + rich.progress.BarColumn(bar_width=None), + rich.progress.DownloadColumn(), + rich.progress.TransferSpeedColumn(), + ) as progress: + description = f"Downloading [bold]{rich.markup.escape(download.name)}" + download_task = progress.add_task( + description, + total=int(content_length or 0), + start=content_length is not None, + ) + for chunk in response.iter_bytes(): + download.write(chunk) + progress.update(download_task, completed=response.num_bytes_downloaded) + + +def validate_json( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> typing.Any: + if value is None: + return None + + try: + return json.loads(value) + except json.JSONDecodeError: # pragma: no cover + raise click.BadParameter("Not valid JSON") + + +def validate_auth( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> typing.Any: + if value == (None, None): + return None + + username, password = value + if password == "-": # pragma: no cover + password = click.prompt("Password", hide_input=True) + return (username, password) + + +def handle_help( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> None: + if not value or ctx.resilient_parsing: + return + + print_help() + ctx.exit() + + +@click.command(add_help_option=False) +@click.argument("url", type=str) +@click.option( + "--method", + "-m", + "method", + type=str, + help=( + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " + "[Default: GET, or POST if a request body is included]" + ), +) +@click.option( + "--params", + "-p", + "params", + type=(str, str), + multiple=True, + help="Query parameters to include in the request URL.", +) +@click.option( + "--content", + "-c", + "content", + type=str, + help="Byte content to include in the request body.", +) +@click.option( + "--data", + "-d", + "data", + type=(str, str), + multiple=True, + help="Form data to include in the request body.", +) +@click.option( + "--files", + "-f", + "files", + type=(str, click.File(mode="rb")), + multiple=True, + help="Form files to include in the request body.", +) +@click.option( + "--json", + "-j", + "json", + type=str, + callback=validate_json, + help="JSON data to include in the request body.", +) +@click.option( + "--headers", + "-h", + "headers", + type=(str, str), + multiple=True, + help="Include additional HTTP headers in the request.", +) +@click.option( + "--cookies", + "cookies", + type=(str, str), + multiple=True, + help="Cookies to include in the request.", +) +@click.option( + "--auth", + "auth", + type=(str, str), + default=(None, None), + callback=validate_auth, + help=( + "Username and password to include in the request. " + "Specify '-' for the password to use a password prompt. " + "Note that using --verbose/-v will expose the Authorization header, " + "including the password encoding in a trivially reversible format." + ), +) +@click.option( + "--proxies", + "proxies", + type=str, + default=None, + help="Send the request via a proxy. Should be the URL giving the proxy address.", +) +@click.option( + "--timeout", + "timeout", + type=float, + default=5.0, + help=( + "Timeout value to use for network operations, such as establishing the " + "connection, reading some data, etc... [Default: 5.0]" + ), +) +@click.option( + "--follow-redirects", + "follow_redirects", + is_flag=True, + default=False, + help="Automatically follow redirects.", +) +@click.option( + "--no-verify", + "verify", + is_flag=True, + default=True, + help="Disable SSL verification.", +) +@click.option( + "--http2", + "http2", + type=bool, + is_flag=True, + default=False, + help="Send the request using HTTP/2, if the remote server supports it.", +) +@click.option( + "--download", + type=click.File("wb"), + help="Save the response content as a file, rather than displaying it.", +) +@click.option( + "--verbose", + "-v", + type=bool, + is_flag=True, + default=False, + help="Verbose. Show request as well as response.", +) +@click.option( + "--help", + is_flag=True, + is_eager=True, + expose_value=False, + callback=handle_help, + help="Show this message and exit.", +) +def main( + url: str, + method: str, + params: typing.List[typing.Tuple[str, str]], + content: str, + data: typing.List[typing.Tuple[str, str]], + files: typing.List[typing.Tuple[str, click.File]], + json: str, + headers: typing.List[typing.Tuple[str, str]], + cookies: typing.List[typing.Tuple[str, str]], + auth: typing.Optional[typing.Tuple[str, str]], + proxies: str, + timeout: float, + follow_redirects: bool, + verify: bool, + http2: bool, + download: typing.Optional[typing.BinaryIO], + verbose: bool, +) -> None: + """ + An HTTP command line client. + Sends a request and displays the response. + """ + if not method: + method = "POST" if content or data or files or json else "GET" + + try: + with Client( + proxies=proxies, + timeout=timeout, + verify=verify, + http2=http2, + ) as client: + with client.stream( + method, + url, + params=list(params), + content=content, + data=dict(data), + files=files, # type: ignore + json=json, + headers=headers, + cookies=dict(cookies), + auth=auth, + follow_redirects=follow_redirects, + extensions={"trace": functools.partial(trace, verbose=verbose)}, + ) as response: + if download is not None: + download_response(response, download) + else: + response.read() + if response.content: + print_response(response) + + except RequestError as exc: + console = rich.console.Console() + console.print(f"[red]{type(exc).__name__}[/red]: {exc}") + sys.exit(1) + + sys.exit(0 if response.is_success else 1) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_models.py b/Backend/venv/lib/python3.12/site-packages/httpx/_models.py new file mode 100644 index 00000000..e0e5278c --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_models.py @@ -0,0 +1,1209 @@ +import datetime +import email.message +import json as jsonlib +import typing +import urllib.request +from collections.abc import Mapping +from http.cookiejar import Cookie, CookieJar + +from ._content import ByteStream, UnattachedStream, encode_request, encode_response +from ._decoders import ( + SUPPORTED_DECODERS, + ByteChunker, + ContentDecoder, + IdentityDecoder, + LineDecoder, + MultiDecoder, + TextChunker, + TextDecoder, +) +from ._exceptions import ( + CookieConflict, + HTTPStatusError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + request_context, +) +from ._multipart import get_multipart_boundary_from_content_type +from ._status_codes import codes +from ._types import ( + AsyncByteStream, + CookieTypes, + HeaderTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + ResponseContent, + ResponseExtensions, + SyncByteStream, +) +from ._urls import URL +from ._utils import ( + guess_json_utf, + is_known_encoding, + normalize_header_key, + normalize_header_value, + obfuscate_sensitive_headers, + parse_content_type_charset, + parse_header_links, +) + + +class Headers(typing.MutableMapping[str, str]): + """ + HTTP headers, as a case-insensitive multi-dict. + """ + + def __init__( + self, + headers: typing.Optional[HeaderTypes] = None, + encoding: typing.Optional[str] = None, + ) -> None: + if headers is None: + self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] + elif isinstance(headers, Headers): + self._list = list(headers._list) + elif isinstance(headers, Mapping): + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers.items() + ] + else: + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers + ] + + self._encoding = encoding + + @property + def encoding(self) -> str: + """ + Header encoding is mandated as ascii, but we allow fallbacks to utf-8 + or iso-8859-1. + """ + if self._encoding is None: + for encoding in ["ascii", "utf-8"]: + for key, value in self.raw: + try: + key.decode(encoding) + value.decode(encoding) + except UnicodeDecodeError: + break + else: + # The else block runs if 'break' did not occur, meaning + # all values fitted the encoding. + self._encoding = encoding + break + else: + # The ISO-8859-1 encoding covers all 256 code points in a byte, + # so will never raise decode errors. + self._encoding = "iso-8859-1" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]: + """ + Returns a list of the raw header items, as byte pairs. + """ + return [(raw_key, value) for raw_key, _, value in self._list] + + def keys(self) -> typing.KeysView[str]: + return {key.decode(self.encoding): None for _, key, value in self._list}.keys() + + def values(self) -> typing.ValuesView[str]: + values_dict: typing.Dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return `(key, value)` items of headers. Concatenate headers + into a single comma separated value when a key occurs multiple times. + """ + values_dict: typing.Dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return a list of `(key, value)` pairs of headers. Allow multiple + occurrences of the same key without concatenating into a single + comma separated value. + """ + return [ + (key.decode(self.encoding), value.decode(self.encoding)) + for _, key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Return a header value. If multiple occurrences of the header occur + then concatenate them together with commas. + """ + try: + return self[key] + except KeyError: + return default + + def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: + """ + Return a list of all header values for a given key. + If `split_commas=True` is passed, then any comma separated header + values are split into multiple return strings. + """ + get_header_key = key.lower().encode(self.encoding) + + values = [ + item_value.decode(self.encoding) + for _, item_key, item_value in self._list + if item_key.lower() == get_header_key + ] + + if not split_commas: + return values + + split_values = [] + for value in values: + split_values.extend([item.strip() for item in value.split(",")]) + return split_values + + def update(self, headers: typing.Optional[HeaderTypes] = None) -> None: # type: ignore + headers = Headers(headers) + for key in headers.keys(): + if key in self: + self.pop(key) + self._list.extend(headers._list) + + def copy(self) -> "Headers": + return Headers(self, encoding=self.encoding) + + def __getitem__(self, key: str) -> str: + """ + Return a single header value. + + If there are multiple headers with the same key, then we concatenate + them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + normalized_key = key.lower().encode(self.encoding) + + items = [ + header_value.decode(self.encoding) + for _, header_key, header_value in self._list + if header_key == normalized_key + ] + + if items: + return ", ".join(items) + + raise KeyError(key) + + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.encode(self._encoding or "utf-8") + set_value = value.encode(self._encoding or "utf-8") + lookup_key = set_key.lower() + + found_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key == lookup_key + ] + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, lookup_key, set_value) + else: + self._list.append((set_key, lookup_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode(self.encoding) + + pop_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key.lower() == del_key + ] + + if not pop_indexes: + raise KeyError(key) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __contains__(self, key: typing.Any) -> bool: + header_key = key.lower().encode(self.encoding) + return header_key in [key for _, key, _ in self._list] + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_headers = Headers(other) + except ValueError: + return False + + self_list = [(key, value) for _, key, value in self._list] + other_list = [(key, value) for _, key, value in other_headers._list] + return sorted(self_list) == sorted(other_list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + + encoding_str = "" + if self.encoding != "ascii": + encoding_str = f", encoding={self.encoding!r}" + + as_list = list(obfuscate_sensitive_headers(self.multi_items())) + as_dict = dict(as_list) + + no_duplicate_keys = len(as_dict) == len(as_list) + if no_duplicate_keys: + return f"{class_name}({as_dict!r}{encoding_str})" + return f"{class_name}({as_list!r}{encoding_str})" + + +class Request: + def __init__( + self, + method: typing.Union[str, bytes], + url: typing.Union["URL", str], + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, + extensions: typing.Optional[RequestExtensions] = None, + ): + self.method = ( + method.decode("ascii").upper() + if isinstance(method, bytes) + else method.upper() + ) + self.url = URL(url) + if params is not None: + self.url = self.url.copy_merge_params(params=params) + self.headers = Headers(headers) + self.extensions = {} if extensions is None else extensions + + if cookies: + Cookies(cookies).set_cookie_header(self) + + if stream is None: + content_type: typing.Optional[str] = self.headers.get("content-type") + headers, stream = encode_request( + content=content, + data=data, + files=files, + json=json, + boundary=get_multipart_boundary_from_content_type( + content_type=content_type.encode(self.headers.encoding) + if content_type + else None + ), + ) + self._prepare(headers) + self.stream = stream + # Load the request body, except for streaming content. + if isinstance(stream, ByteStream): + self.read() + else: + # There's an important distinction between `Request(content=...)`, + # and `Request(stream=...)`. + # + # Using `content=...` implies automatically populated `Host` and content + # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include *any* auto-populated headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when: + # + # * Preserving the request stream when copying requests, eg for redirects. + # * Creating request instances on the *server-side* of the transport API. + self.stream = stream + + def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(key, value) + + auto_headers: typing.List[typing.Tuple[bytes, bytes]] = [] + + has_host = "Host" in self.headers + has_content_length = ( + "Content-Length" in self.headers or "Transfer-Encoding" in self.headers + ) + + if not has_host and self.url.host: + auto_headers.append((b"Host", self.url.netloc)) + if not has_content_length and self.method in ("POST", "PUT", "PATCH"): + auto_headers.append((b"Content-Length", b"0")) + + self.headers = Headers(auto_headers + self.headers.raw) + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise RequestNotRead() + return self._content + + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.Iterable) + self._content = b"".join(self.stream) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + async def aread(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.AsyncIterable) + self._content = b"".join([part async for part in self.stream]) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url = str(self.url) + return f"<{class_name}({self.method!r}, {url!r})>" + + def __getstate__(self) -> typing.Dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream"] + } + + def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.extensions = {} + self.stream = UnattachedStream() + + +class Response: + def __init__( + self, + status_code: int, + *, + headers: typing.Optional[HeaderTypes] = None, + content: typing.Optional[ResponseContent] = None, + text: typing.Optional[str] = None, + html: typing.Optional[str] = None, + json: typing.Any = None, + stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, + request: typing.Optional[Request] = None, + extensions: typing.Optional[ResponseExtensions] = None, + history: typing.Optional[typing.List["Response"]] = None, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + ): + self.status_code = status_code + self.headers = Headers(headers) + + self._request: typing.Optional[Request] = request + + # When follow_redirects=False and a redirect is received, + # the client will set `response.next_request`. + self.next_request: typing.Optional[Request] = None + + self.extensions = {} if extensions is None else extensions + self.history = [] if history is None else list(history) + + self.is_closed = False + self.is_stream_consumed = False + + self.default_encoding = default_encoding + + if stream is None: + headers, stream = encode_response(content, text, html, json) + self._prepare(headers) + self.stream = stream + if isinstance(stream, ByteStream): + # Load the response body, except for streaming content. + self.read() + else: + # There's an important distinction between `Response(content=...)`, + # and `Response(stream=...)`. + # + # Using `content=...` implies automatically populated content headers, + # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include any content headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when creating response instances having received a stream + # from the transport API. + self.stream = stream + + self._num_bytes_downloaded = 0 + + def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "content-length" in self.headers: + continue + self.headers.setdefault(key, value) + + @property + def elapsed(self) -> datetime.timedelta: + """ + Returns the time taken for the complete request/response + cycle to complete. + """ + if not hasattr(self, "_elapsed"): + raise RuntimeError( + "'.elapsed' may only be accessed after the response " + "has been read or closed." + ) + return self._elapsed + + @elapsed.setter + def elapsed(self, elapsed: datetime.timedelta) -> None: + self._elapsed = elapsed + + @property + def request(self) -> Request: + """ + Returns the request instance associated to the current response. + """ + if self._request is None: + raise RuntimeError( + "The request instance has not been set on this response." + ) + return self._request + + @request.setter + def request(self, value: Request) -> None: + self._request = value + + @property + def http_version(self) -> str: + try: + http_version: bytes = self.extensions["http_version"] + except KeyError: + return "HTTP/1.1" + else: + return http_version.decode("ascii", errors="ignore") + + @property + def reason_phrase(self) -> str: + try: + reason_phrase: bytes = self.extensions["reason_phrase"] + except KeyError: + return codes.get_reason_phrase(self.status_code) + else: + return reason_phrase.decode("ascii", errors="ignore") + + @property + def url(self) -> URL: + """ + Returns the URL for which the request was made. + """ + return self.request.url + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise ResponseNotRead() + return self._content + + @property + def text(self) -> str: + if not hasattr(self, "_text"): + content = self.content + if not content: + self._text = "" + else: + decoder = TextDecoder(encoding=self.encoding or "utf-8") + self._text = "".join([decoder.decode(self.content), decoder.flush()]) + return self._text + + @property + def encoding(self) -> typing.Optional[str]: + """ + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `default_encoding`, which may either be + a string like "utf-8" indicating the encoding to use, or may be a callable + which enables charset autodetection. + """ + if not hasattr(self, "_encoding"): + encoding = self.charset_encoding + if encoding is None or not is_known_encoding(encoding): + if isinstance(self.default_encoding, str): + encoding = self.default_encoding + elif hasattr(self, "_content"): + encoding = self.default_encoding(self._content) + self._encoding = encoding or "utf-8" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def charset_encoding(self) -> typing.Optional[str]: + """ + Return the encoding, as specified by the Content-Type header. + """ + content_type = self.headers.get("Content-Type") + if content_type is None: + return None + + return parse_content_type_charset(content_type) + + def _get_content_decoder(self) -> ContentDecoder: + """ + Returns a decoder instance which can be used to decode the raw byte + content, depending on the Content-Encoding used in the response. + """ + if not hasattr(self, "_decoder"): + decoders: typing.List[ContentDecoder] = [] + values = self.headers.get_list("content-encoding", split_commas=True) + for value in values: + value = value.strip().lower() + try: + decoder_cls = SUPPORTED_DECODERS[value] + decoders.append(decoder_cls()) + except KeyError: + continue + + if len(decoders) == 1: + self._decoder = decoders[0] + elif len(decoders) > 1: + self._decoder = MultiDecoder(children=decoders) + else: + self._decoder = IdentityDecoder() + + return self._decoder + + @property + def is_informational(self) -> bool: + """ + A property which is `True` for 1xx status codes, `False` otherwise. + """ + return codes.is_informational(self.status_code) + + @property + def is_success(self) -> bool: + """ + A property which is `True` for 2xx status codes, `False` otherwise. + """ + return codes.is_success(self.status_code) + + @property + def is_redirect(self) -> bool: + """ + A property which is `True` for 3xx status codes, `False` otherwise. + + Note that not all responses with a 3xx status code indicate a URL redirect. + + Use `response.has_redirect_location` to determine responses with a properly + formed URL redirection. + """ + return codes.is_redirect(self.status_code) + + @property + def is_client_error(self) -> bool: + """ + A property which is `True` for 4xx status codes, `False` otherwise. + """ + return codes.is_client_error(self.status_code) + + @property + def is_server_error(self) -> bool: + """ + A property which is `True` for 5xx status codes, `False` otherwise. + """ + return codes.is_server_error(self.status_code) + + @property + def is_error(self) -> bool: + """ + A property which is `True` for 4xx and 5xx status codes, `False` otherwise. + """ + return codes.is_error(self.status_code) + + @property + def has_redirect_location(self) -> bool: + """ + Returns True for 3xx responses with a properly formed URL redirection, + `False` otherwise. + """ + return ( + self.status_code + in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + and "Location" in self.headers + ) + + def raise_for_status(self) -> None: + """ + Raise the `HTTPStatusError` if one occurred. + """ + request = self._request + if request is None: + raise RuntimeError( + "Cannot call `raise_for_status` as the request " + "instance has not been set on this response." + ) + + if self.is_success: + return + + if self.has_redirect_location: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "Redirect location: '{0.headers[location]}'\n" + "For more information check: https://httpstatuses.com/{0.status_code}" + ) + else: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "For more information check: https://httpstatuses.com/{0.status_code}" + ) + + status_class = self.status_code // 100 + error_types = { + 1: "Informational response", + 3: "Redirect response", + 4: "Client error", + 5: "Server error", + } + error_type = error_types.get(status_class, "Invalid status code") + message = message.format(self, error_type=error_type) + raise HTTPStatusError(message, request=request, response=self) + + def json(self, **kwargs: typing.Any) -> typing.Any: + if self.charset_encoding is None and self.content and len(self.content) > 3: + encoding = guess_json_utf(self.content) + if encoding is not None: + return jsonlib.loads(self.content.decode(encoding), **kwargs) + return jsonlib.loads(self.text, **kwargs) + + @property + def cookies(self) -> "Cookies": + if not hasattr(self, "_cookies"): + self._cookies = Cookies() + self._cookies.extract_cookies(self) + return self._cookies + + @property + def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]: + """ + Returns the parsed header links of the response, if any + """ + header = self.headers.get("link") + ldict = {} + if header: + links = parse_header_links(header) + for link in links: + key = link.get("rel") or link.get("url") + ldict[key] = link + return ldict + + @property + def num_bytes_downloaded(self) -> int: + return self._num_bytes_downloaded + + def __repr__(self) -> str: + return f"" + + def __getstate__(self) -> typing.Dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream", "is_closed", "_decoder"] + } + + def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.is_closed = True + self.extensions = {} + self.stream = UnattachedStream() + + def read(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join(self.iter_bytes()) + return self._content + + def iter_bytes( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for raw_bytes in self.iter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_text( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for byte_content in self.iter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk + for chunk in chunker.flush(): + yield chunk + + def iter_lines(self) -> typing.Iterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + for text in self.iter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + def iter_raw( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call a sync iterator on an async stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call an sync close on an async stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + self.stream.close() + + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_bytes()]) + return self._content + + async def aiter_bytes( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for raw_bytes in self.aiter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_text( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for byte_content in self.aiter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk + for chunk in chunker.flush(): + yield chunk + + async def aiter_lines(self) -> typing.AsyncIterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + async for text in self.aiter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + async def aiter_raw( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async iterator on an sync stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + async for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + await self.aclose() + + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async close on an sync stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + await self.stream.aclose() + + +class Cookies(typing.MutableMapping[str, str]): + """ + HTTP Cookies, as a mutable mapping. + """ + + def __init__(self, cookies: typing.Optional[CookieTypes] = None) -> None: + if cookies is None or isinstance(cookies, dict): + self.jar = CookieJar() + if isinstance(cookies, dict): + for key, value in cookies.items(): + self.set(key, value) + elif isinstance(cookies, list): + self.jar = CookieJar() + for key, value in cookies: + self.set(key, value) + elif isinstance(cookies, Cookies): + self.jar = CookieJar() + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + else: + self.jar = cookies + + def extract_cookies(self, response: Response) -> None: + """ + Loads any cookies based on the response `Set-Cookie` headers. + """ + urllib_response = self._CookieCompatResponse(response) + urllib_request = self._CookieCompatRequest(response.request) + + self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore + + def set_cookie_header(self, request: Request) -> None: + """ + Sets an appropriate 'Cookie:' HTTP header on the `Request`. + """ + urllib_request = self._CookieCompatRequest(request) + self.jar.add_cookie_header(urllib_request) + + def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: + """ + Set a cookie value by name. May optionally include domain and path. + """ + kwargs = { + "version": 0, + "name": name, + "value": value, + "port": None, + "port_specified": False, + "domain": domain, + "domain_specified": bool(domain), + "domain_initial_dot": domain.startswith("."), + "path": path, + "path_specified": bool(path), + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + cookie = Cookie(**kwargs) # type: ignore + self.jar.set_cookie(cookie) + + def get( # type: ignore + self, + name: str, + default: typing.Optional[str] = None, + domain: typing.Optional[str] = None, + path: typing.Optional[str] = None, + ) -> typing.Optional[str]: + """ + Get a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to retrieve. + """ + value = None + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if value is not None: + message = f"Multiple cookies exist with name={name}" + raise CookieConflict(message) + value = cookie.value + + if value is None: + return default + return value + + def delete( + self, + name: str, + domain: typing.Optional[str] = None, + path: typing.Optional[str] = None, + ) -> None: + """ + Delete a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to delete. + """ + if domain is not None and path is not None: + return self.jar.clear(domain, path, name) + + remove = [ + cookie + for cookie in self.jar + if cookie.name == name + and (domain is None or cookie.domain == domain) + and (path is None or cookie.path == path) + ] + + for cookie in remove: + self.jar.clear(cookie.domain, cookie.path, cookie.name) + + def clear( + self, domain: typing.Optional[str] = None, path: typing.Optional[str] = None + ) -> None: + """ + Delete all cookies. Optionally include a domain and path in + order to only delete a subset of all the cookies. + """ + args = [] + if domain is not None: + args.append(domain) + if path is not None: + assert domain is not None + args.append(path) + self.jar.clear(*args) + + def update(self, cookies: typing.Optional[CookieTypes] = None) -> None: # type: ignore + cookies = Cookies(cookies) + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + + def __setitem__(self, name: str, value: str) -> None: + return self.set(name, value) + + def __getitem__(self, name: str) -> str: + value = self.get(name) + if value is None: + raise KeyError(name) + return value + + def __delitem__(self, name: str) -> None: + return self.delete(name) + + def __len__(self) -> int: + return len(self.jar) + + def __iter__(self) -> typing.Iterator[str]: + return (cookie.name for cookie in self.jar) + + def __bool__(self) -> bool: + for _ in self.jar: + return True + return False + + def __repr__(self) -> str: + cookies_repr = ", ".join( + [ + f"" + for cookie in self.jar + ] + ) + + return f"" + + class _CookieCompatRequest(urllib.request.Request): + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, request: Request) -> None: + super().__init__( + url=str(request.url), + headers=dict(request.headers), + method=request.method, + ) + self.request = request + + def add_unredirected_header(self, key: str, value: str) -> None: + super().add_unredirected_header(key, value) + self.request.headers[key] = value + + class _CookieCompatResponse: + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, response: Response): + self.response = response + + def info(self) -> email.message.Message: + info = email.message.Message() + for key, value in self.response.headers.multi_items(): + # Note that setting `info[key]` here is an "append" operation, + # not a "replace" operation. + # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ + info[key] = value + return info diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_multipart.py b/Backend/venv/lib/python3.12/site-packages/httpx/_multipart.py new file mode 100644 index 00000000..446f4ad2 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_multipart.py @@ -0,0 +1,267 @@ +import binascii +import io +import os +import typing +from pathlib import Path + +from ._types import ( + AsyncByteStream, + FileContent, + FileTypes, + RequestData, + RequestFiles, + SyncByteStream, +) +from ._utils import ( + format_form_param, + guess_content_type, + peek_filelike_length, + primitive_value_to_str, + to_bytes, +) + + +def get_multipart_boundary_from_content_type( + content_type: typing.Optional[bytes], +) -> typing.Optional[bytes]: + if not content_type or not content_type.startswith(b"multipart/form-data"): + return None + # parse boundary according to + # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 + if b";" in content_type: + for section in content_type.split(b";"): + if section.strip().lower().startswith(b"boundary="): + return section.strip()[len(b"boundary=") :].strip(b'"') + return None + + +class DataField: + """ + A single form field item, within a multipart form field. + """ + + def __init__( + self, name: str, value: typing.Union[str, bytes, int, float, None] + ) -> None: + if not isinstance(name, str): + raise TypeError( + f"Invalid type for name. Expected str, got {type(name)}: {name!r}" + ) + if value is not None and not isinstance(value, (str, bytes, int, float)): + raise TypeError( + f"Invalid type for value. Expected primitive type, got {type(value)}: {value!r}" + ) + self.name = name + self.value: typing.Union[str, bytes] = ( + value if isinstance(value, bytes) else primitive_value_to_str(value) + ) + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + name = format_form_param("name", self.name) + self._headers = b"".join( + [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] + ) + + return self._headers + + def render_data(self) -> bytes: + if not hasattr(self, "_data"): + self._data = to_bytes(self.value) + + return self._data + + def get_length(self) -> int: + headers = self.render_headers() + data = self.render_data() + return len(headers) + len(data) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield self.render_data() + + +class FileField: + """ + A single file field item, within a multipart form field. + """ + + CHUNK_SIZE = 64 * 1024 + + def __init__(self, name: str, value: FileTypes) -> None: + self.name = name + + fileobj: FileContent + + headers: typing.Dict[str, str] = {} + content_type: typing.Optional[str] = None + + # This large tuple based API largely mirror's requests' API + # It would be good to think of better APIs for this that we could include in httpx 2.0 + # since variable length tuples (especially of 4 elements) are quite unwieldly + if isinstance(value, tuple): + if len(value) == 2: + # neither the 3rd parameter (content_type) nor the 4th (headers) was included + filename, fileobj = value # type: ignore + elif len(value) == 3: + filename, fileobj, content_type = value # type: ignore + else: + # all 4 parameters included + filename, fileobj, content_type, headers = value # type: ignore + else: + filename = Path(str(getattr(value, "name", "upload"))).name + fileobj = value + + if content_type is None: + content_type = guess_content_type(filename) + + has_content_type_header = any("content-type" in key.lower() for key in headers) + if content_type is not None and not has_content_type_header: + # note that unlike requests, we ignore the content_type + # provided in the 3rd tuple element if it is also included in the headers + # requests does the opposite (it overwrites the header with the 3rd tuple element) + headers["Content-Type"] = content_type + + if isinstance(fileobj, io.StringIO): + raise TypeError( + "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." + ) + if isinstance(fileobj, io.TextIOBase): + raise TypeError( + "Multipart file uploads must be opened in binary mode, not text mode." + ) + + self.filename = filename + self.file = fileobj + self.headers = headers + + def get_length(self) -> typing.Optional[int]: + headers = self.render_headers() + + if isinstance(self.file, (str, bytes)): + return len(headers) + len(to_bytes(self.file)) + + file_length = peek_filelike_length(self.file) + + # If we can't determine the filesize without reading it into memory, + # then return `None` here, to indicate an unknown file length. + if file_length is None: + return None + + return len(headers) + file_length + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + parts = [ + b"Content-Disposition: form-data; ", + format_form_param("name", self.name), + ] + if self.filename: + filename = format_form_param("filename", self.filename) + parts.extend([b"; ", filename]) + for header_name, header_value in self.headers.items(): + key, val = f"\r\n{header_name}: ".encode(), header_value.encode() + parts.extend([key, val]) + parts.append(b"\r\n\r\n") + self._headers = b"".join(parts) + + return self._headers + + def render_data(self) -> typing.Iterator[bytes]: + if isinstance(self.file, (str, bytes)): + yield to_bytes(self.file) + return + + if hasattr(self.file, "seek"): + try: + self.file.seek(0) + except io.UnsupportedOperation: + pass + + chunk = self.file.read(self.CHUNK_SIZE) + while chunk: + yield to_bytes(chunk) + chunk = self.file.read(self.CHUNK_SIZE) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield from self.render_data() + + +class MultipartStream(SyncByteStream, AsyncByteStream): + """ + Request content as streaming multipart encoded form data. + """ + + def __init__( + self, + data: RequestData, + files: RequestFiles, + boundary: typing.Optional[bytes] = None, + ) -> None: + if boundary is None: + boundary = binascii.hexlify(os.urandom(16)) + + self.boundary = boundary + self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( + "ascii" + ) + self.fields = list(self._iter_fields(data, files)) + + def _iter_fields( + self, data: RequestData, files: RequestFiles + ) -> typing.Iterator[typing.Union[FileField, DataField]]: + for name, value in data.items(): + if isinstance(value, (tuple, list)): + for item in value: + yield DataField(name=name, value=item) + else: + yield DataField(name=name, value=value) + + file_items = files.items() if isinstance(files, typing.Mapping) else files + for name, value in file_items: + yield FileField(name=name, value=value) + + def iter_chunks(self) -> typing.Iterator[bytes]: + for field in self.fields: + yield b"--%s\r\n" % self.boundary + yield from field.render() + yield b"\r\n" + yield b"--%s--\r\n" % self.boundary + + def get_content_length(self) -> typing.Optional[int]: + """ + Return the length of the multipart encoded content, or `None` if + any of the files have a length that cannot be determined upfront. + """ + boundary_length = len(self.boundary) + length = 0 + + for field in self.fields: + field_length = field.get_length() + if field_length is None: + return None + + length += 2 + boundary_length + 2 # b"--{boundary}\r\n" + length += field_length + length += 2 # b"\r\n" + + length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" + return length + + # Content stream interface. + + def get_headers(self) -> typing.Dict[str, str]: + content_length = self.get_content_length() + content_type = self.content_type + if content_length is None: + return {"Transfer-Encoding": "chunked", "Content-Type": content_type} + return {"Content-Length": str(content_length), "Content-Type": content_type} + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_status_codes.py b/Backend/venv/lib/python3.12/site-packages/httpx/_status_codes.py new file mode 100644 index 00000000..671c30e1 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_status_codes.py @@ -0,0 +1,158 @@ +from enum import IntEnum + + +class codes(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + + def __new__(cls, value: int, phrase: str = "") -> "codes": + obj = int.__new__(cls, value) + obj._value_ = value + + obj.phrase = phrase # type: ignore[attr-defined] + return obj + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def get_reason_phrase(cls, value: int) -> str: + try: + return codes(value).phrase # type: ignore + except ValueError: + return "" + + @classmethod + def is_informational(cls, value: int) -> bool: + """ + Returns `True` for 1xx status codes, `False` otherwise. + """ + return 100 <= value <= 199 + + @classmethod + def is_success(cls, value: int) -> bool: + """ + Returns `True` for 2xx status codes, `False` otherwise. + """ + return 200 <= value <= 299 + + @classmethod + def is_redirect(cls, value: int) -> bool: + """ + Returns `True` for 3xx status codes, `False` otherwise. + """ + return 300 <= value <= 399 + + @classmethod + def is_client_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx status codes, `False` otherwise. + """ + return 400 <= value <= 499 + + @classmethod + def is_server_error(cls, value: int) -> bool: + """ + Returns `True` for 5xx status codes, `False` otherwise. + """ + return 500 <= value <= 599 + + @classmethod + def is_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx or 5xx status codes, `False` otherwise. + """ + return 400 <= value <= 599 + + # informational + CONTINUE = 100, "Continue" + SWITCHING_PROTOCOLS = 101, "Switching Protocols" + PROCESSING = 102, "Processing" + EARLY_HINTS = 103, "Early Hints" + + # success + OK = 200, "OK" + CREATED = 201, "Created" + ACCEPTED = 202, "Accepted" + NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" + NO_CONTENT = 204, "No Content" + RESET_CONTENT = 205, "Reset Content" + PARTIAL_CONTENT = 206, "Partial Content" + MULTI_STATUS = 207, "Multi-Status" + ALREADY_REPORTED = 208, "Already Reported" + IM_USED = 226, "IM Used" + + # redirection + MULTIPLE_CHOICES = 300, "Multiple Choices" + MOVED_PERMANENTLY = 301, "Moved Permanently" + FOUND = 302, "Found" + SEE_OTHER = 303, "See Other" + NOT_MODIFIED = 304, "Not Modified" + USE_PROXY = 305, "Use Proxy" + TEMPORARY_REDIRECT = 307, "Temporary Redirect" + PERMANENT_REDIRECT = 308, "Permanent Redirect" + + # client error + BAD_REQUEST = 400, "Bad Request" + UNAUTHORIZED = 401, "Unauthorized" + PAYMENT_REQUIRED = 402, "Payment Required" + FORBIDDEN = 403, "Forbidden" + NOT_FOUND = 404, "Not Found" + METHOD_NOT_ALLOWED = 405, "Method Not Allowed" + NOT_ACCEPTABLE = 406, "Not Acceptable" + PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" + REQUEST_TIMEOUT = 408, "Request Timeout" + CONFLICT = 409, "Conflict" + GONE = 410, "Gone" + LENGTH_REQUIRED = 411, "Length Required" + PRECONDITION_FAILED = 412, "Precondition Failed" + REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" + REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" + UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" + REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" + EXPECTATION_FAILED = 417, "Expectation Failed" + IM_A_TEAPOT = 418, "I'm a teapot" + MISDIRECTED_REQUEST = 421, "Misdirected Request" + UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" + LOCKED = 423, "Locked" + FAILED_DEPENDENCY = 424, "Failed Dependency" + TOO_EARLY = 425, "Too Early" + UPGRADE_REQUIRED = 426, "Upgrade Required" + PRECONDITION_REQUIRED = 428, "Precondition Required" + TOO_MANY_REQUESTS = 429, "Too Many Requests" + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" + UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" + + # server errors + INTERNAL_SERVER_ERROR = 500, "Internal Server Error" + NOT_IMPLEMENTED = 501, "Not Implemented" + BAD_GATEWAY = 502, "Bad Gateway" + SERVICE_UNAVAILABLE = 503, "Service Unavailable" + GATEWAY_TIMEOUT = 504, "Gateway Timeout" + HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" + VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" + INSUFFICIENT_STORAGE = 507, "Insufficient Storage" + LOOP_DETECTED = 508, "Loop Detected" + NOT_EXTENDED = 510, "Not Extended" + NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" + + +# Include lower-case styles for `requests` compatibility. +for code in codes: + setattr(codes, code._name_.lower(), int(code)) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..96bc2572 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc new file mode 100644 index 00000000..238c5210 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..b75c754f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc new file mode 100644 index 00000000..8adc1681 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc new file mode 100644 index 00000000..0955bd5b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc new file mode 100644 index 00000000..b41b6bfd Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py new file mode 100644 index 00000000..bdf7f7a1 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,173 @@ +import typing + +import sniffio + +from .._models import Request, Response +from .._types import AsyncByteStream +from .base import AsyncBaseTransport + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +_Message = typing.Dict[str, typing.Any] +_Receive = typing.Callable[[], typing.Awaitable[_Message]] +_Send = typing.Callable[ + [typing.Dict[str, typing.Any]], typing.Coroutine[None, None, None] +] +_ASGIApp = typing.Callable[ + [typing.Dict[str, typing.Any], _Receive, _Send], typing.Coroutine[None, None, None] +] + + +def create_event() -> "Event": + if sniffio.current_async_library() == "trio": + import trio + + return trio.Event() + else: + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: typing.List[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.AsyncClient(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the ASGITransport class: + + ``` + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: _ASGIApp, + raise_app_exceptions: bool = True, + root_path: str = "", + client: typing.Tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + # ASGI scope. + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path, + "query_string": request.url.query, + "server": (request.url.host, request.url.port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = request.stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> typing.Dict[str, typing.Any]: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: typing.Dict[str, typing.Any]) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and request.method != "HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: # noqa: PIE-786 + if self.raise_app_exceptions or not response_complete.is_set(): + raise + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + + return Response(status_code, headers=response_headers, stream=stream) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/base.py b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/base.py new file mode 100644 index 00000000..f6fdfe69 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/base.py @@ -0,0 +1,82 @@ +import typing +from types import TracebackType + +from .._models import Request, Response + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + self.close() + + def handle_request(self, request: Request) -> Response: + """ + Send a single HTTP request and return a response. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], + ) + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: no cover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + request: Request, + ) -> Response: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/default.py b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/default.py new file mode 100644 index 00000000..fca7de98 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/default.py @@ -0,0 +1,365 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int + +Example usages... + +# Disable HTTP/2 on a single specific domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" +import contextlib +import typing +from types import TracebackType + +import httpcore + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._models import Request, Response +from .._types import AsyncByteStream, CertTypes, SyncByteStream, VerifyTypes +from .base import AsyncBaseTransport, BaseTransport + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE-786 + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: no cover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +HTTPCORE_EXC_MAP = { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, +} + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: typing.Iterable[bytes]): + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + if hasattr(self._httpcore_stream, "close"): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: typing.Optional[Proxy] = None, + uds: typing.Optional[str] = None, + local_address: typing.Optional[str] = None, + retries: int = 0, + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.ConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.HTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.SOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + f"Proxy protocol must be either 'http', 'https', or 'socks5', but got {proxy.url.scheme!r}." + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, SyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = self._pool.handle_request(req) + + assert isinstance(resp.stream, typing.Iterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=ResponseStream(resp.stream), + extensions=resp.extensions, + ) + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]): + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + if hasattr(self._httpcore_stream, "aclose"): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: typing.Optional[Proxy] = None, + uds: typing.Optional[str] = None, + local_address: typing.Optional[str] = None, + retries: int = 0, + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.AsyncSOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + f"Proxy protocol must be either 'http', 'https', or 'socks5', but got {proxy.url.scheme!r}." + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = await self._pool.handle_async_request(req) + + assert isinstance(resp.stream, typing.AsyncIterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=AsyncResponseStream(resp.stream), + extensions=resp.extensions, + ) + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/mock.py b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/mock.py new file mode 100644 index 00000000..82043da2 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/mock.py @@ -0,0 +1,38 @@ +import typing + +from .._models import Request, Response +from .base import AsyncBaseTransport, BaseTransport + +SyncHandler = typing.Callable[[Request], Response] +AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: typing.Union[SyncHandler, AsyncHandler]) -> None: + self.handler = handler + + def handle_request( + self, + request: Request, + ) -> Response: + request.read() + response = self.handler(request) + if not isinstance(response, Response): # pragma: no cover + raise TypeError("Cannot use an async handler in a sync Client") + return response + + async def handle_async_request( + self, + request: Request, + ) -> Response: + await request.aread() + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + if not isinstance(response, Response): + response = await response + + return response diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 00000000..33035ce5 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,143 @@ +import io +import itertools +import sys +import typing + +from .._models import Request, Response +from .._types import SyncByteStream +from .base import BaseTransport + +if typing.TYPE_CHECKING: + from _typeshed import OptExcInfo # pragma: no cover + from _typeshed.wsgi import WSGIApplication # pragma: no cover + +_T = typing.TypeVar("_T") + + +def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._close = getattr(result, "close", None) + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + def close(self) -> None: + if self._close is not None: + self._close() + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The WSGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the WSGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: "WSGIApplication", + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + wsgi_errors: typing.Optional[typing.TextIO] = None, + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + self.wsgi_errors = wsgi_errors + + def handle_request(self, request: Request) -> Response: + request.read() + wsgi_input = io.BytesIO(request.content) + + port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.url.scheme, + "wsgi.input": wsgi_input, + "wsgi.errors": self.wsgi_errors or sys.stderr, + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query.decode("ascii"), + "SERVER_NAME": request.url.host, + "SERVER_PORT": str(port), + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in request.headers.raw: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, + response_headers: typing.List[typing.Tuple[str, str]], + exc_info: typing.Optional["OptExcInfo"] = None, + ) -> typing.Callable[[bytes], typing.Any]: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + return lambda _: None + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + + return Response(status_code, headers=headers, stream=stream) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_types.py b/Backend/venv/lib/python3.12/site-packages/httpx/_types.py new file mode 100644 index 00000000..6b610e14 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_types.py @@ -0,0 +1,132 @@ +""" +Type definitions for type checking purposes. +""" + +import ssl +from http.cookiejar import CookieJar +from typing import ( + IO, + TYPE_CHECKING, + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + NamedTuple, + Optional, + Sequence, + Tuple, + Union, +) + +if TYPE_CHECKING: # pragma: no cover + from ._auth import Auth # noqa: F401 + from ._config import Proxy, Timeout # noqa: F401 + from ._models import Cookies, Headers, Request # noqa: F401 + from ._urls import URL, QueryParams # noqa: F401 + + +PrimitiveData = Optional[Union[str, int, float, bool]] + +RawURL = NamedTuple( + "RawURL", + [ + ("raw_scheme", bytes), + ("raw_host", bytes), + ("port", Optional[int]), + ("raw_path", bytes), + ], +) + +URLTypes = Union["URL", str] + +QueryParamTypes = Union[ + "QueryParams", + Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], + List[Tuple[str, PrimitiveData]], + Tuple[Tuple[str, PrimitiveData], ...], + str, + bytes, +] + +HeaderTypes = Union[ + "Headers", + Mapping[str, str], + Mapping[bytes, bytes], + Sequence[Tuple[str, str]], + Sequence[Tuple[bytes, bytes]], +] + +CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] + +CertTypes = Union[ + # certfile + str, + # (certfile, keyfile) + Tuple[str, Optional[str]], + # (certfile, keyfile, password) + Tuple[str, Optional[str], Optional[str]], +] +VerifyTypes = Union[str, bool, ssl.SSLContext] +TimeoutTypes = Union[ + Optional[float], + Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], + "Timeout", +] +ProxiesTypes = Union[URLTypes, "Proxy", Dict[URLTypes, Union[None, URLTypes, "Proxy"]]] + +AuthTypes = Union[ + Tuple[Union[str, bytes], Union[str, bytes]], + Callable[["Request"], "Request"], + "Auth", +] + +RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseExtensions = Mapping[str, Any] + +RequestData = Mapping[str, Any] + +FileContent = Union[IO[bytes], bytes, str] +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +RequestExtensions = Mapping[str, Any] + + +class SyncByteStream: + def __iter__(self) -> Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + """ + + +class AsyncByteStream: + async def __aiter__(self) -> AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_urlparse.py b/Backend/venv/lib/python3.12/site-packages/httpx/_urlparse.py new file mode 100644 index 00000000..69ff0b4b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_urlparse.py @@ -0,0 +1,462 @@ +""" +An implementation of `urlparse` that provides URL validation and normalization +as described by RFC3986. + +We rely on this implementation rather than the one in Python's stdlib, because: + +* It provides more complete URL validation. +* It properly differentiates between an empty querystring and an absent querystring, + to distinguish URLs with a trailing '?'. +* It handles scheme, hostname, port, and path normalization. +* It supports IDNA hostnames, normalizing them to their encoded form. +* The API supports passing individual components, as well as the complete URL string. + +Previously we relied on the excellent `rfc3986` package to handle URL parsing and +validation, but this module provides a simpler alternative, with less indirection +required. +""" +import ipaddress +import re +import typing + +import idna + +from ._exceptions import InvalidURL + +MAX_URL_LENGTH = 65536 + +# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 +UNRESERVED_CHARACTERS = ( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" +) +SUB_DELIMS = "!$&'()*+,;=" + +PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") + + +# {scheme}: (optional) +# //{authority} (optional) +# {path} +# ?{query} (optional) +# #{fragment} (optional) +URL_REGEX = re.compile( + ( + r"(?:(?P{scheme}):)?" + r"(?://(?P{authority}))?" + r"(?P{path})" + r"(?:\?(?P{query}))?" + r"(?:#(?P{fragment}))?" + ).format( + scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", + authority="[^/?#]*", + path="[^?#]*", + query="[^#]*", + fragment=".*", + ) +) + +# {userinfo}@ (optional) +# {host} +# :{port} (optional) +AUTHORITY_REGEX = re.compile( + ( + r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" + ).format( + userinfo="[^@]*", # Any character sequence not including '@'. + host="(\\[.*\\]|[^:]*)", # Either any character sequence not including ':', + # or an IPv6 address enclosed within square brackets. + port=".*", # Any character sequence. + ) +) + + +# If we call urlparse with an individual component, then we need to regex +# validate that component individually. +# Note that we're duplicating the same strings as above. Shock! Horror!! +COMPONENT_REGEX = { + "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), + "authority": re.compile("[^/?#]*"), + "path": re.compile("[^?#]*"), + "query": re.compile("[^#]*"), + "fragment": re.compile(".*"), + "userinfo": re.compile("[^@]*"), + "host": re.compile("(\\[.*\\]|[^:]*)"), + "port": re.compile(".*"), +} + + +# We use these simple regexs as a first pass before handing off to +# the stdlib 'ipaddress' module for IP address validation. +IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+.[0-9]+.[0-9]+.[0-9]+$") +IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") + + +class ParseResult(typing.NamedTuple): + scheme: str + userinfo: str + host: str + port: typing.Optional[int] + path: str + query: typing.Optional[str] + fragment: typing.Optional[str] + + @property + def authority(self) -> str: + return "".join( + [ + f"{self.userinfo}@" if self.userinfo else "", + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + @property + def netloc(self) -> str: + return "".join( + [ + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + def copy_with(self, **kwargs: typing.Optional[str]) -> "ParseResult": + if not kwargs: + return self + + defaults = { + "scheme": self.scheme, + "authority": self.authority, + "path": self.path, + "query": self.query, + "fragment": self.fragment, + } + defaults.update(kwargs) + return urlparse("", **defaults) + + def __str__(self) -> str: + authority = self.authority + return "".join( + [ + f"{self.scheme}:" if self.scheme else "", + f"//{authority}" if authority else "", + self.path, + f"?{self.query}" if self.query is not None else "", + f"#{self.fragment}" if self.fragment is not None else "", + ] + ) + + +def urlparse(url: str = "", **kwargs: typing.Optional[str]) -> ParseResult: + # Initial basic checks on allowable URLs. + # --------------------------------------- + + # Hard limit the maximum allowable URL length. + if len(url) > MAX_URL_LENGTH: + raise InvalidURL("URL too long") + + # If a URL includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in url): + raise InvalidURL("Invalid non-printable ASCII character in URL") + + # Some keyword arguments require special handling. + # ------------------------------------------------ + + # Coerce "port" to a string, if it is provided as an integer. + if "port" in kwargs: + port = kwargs["port"] + kwargs["port"] = str(port) if isinstance(port, int) else port + + # Replace "netloc" with "host and "port". + if "netloc" in kwargs: + netloc = kwargs.pop("netloc") or "" + kwargs["host"], _, kwargs["port"] = netloc.partition(":") + + # Replace "username" and/or "password" with "userinfo". + if "username" in kwargs or "password" in kwargs: + username = quote(kwargs.pop("username", "") or "") + password = quote(kwargs.pop("password", "") or "") + kwargs["userinfo"] = f"{username}:{password}" if password else username + + # Replace "raw_path" with "path" and "query". + if "raw_path" in kwargs: + raw_path = kwargs.pop("raw_path") or "" + kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") + if not seperator: + kwargs["query"] = None + + # Ensure that IPv6 "host" addresses are always escaped with "[...]". + if "host" in kwargs: + host = kwargs.get("host") or "" + if ":" in host and not (host.startswith("[") and host.endswith("]")): + kwargs["host"] = f"[{host}]" + + # If any keyword arguments are provided, ensure they are valid. + # ------------------------------------------------------------- + + for key, value in kwargs.items(): + if value is not None: + if len(value) > MAX_URL_LENGTH: + raise InvalidURL(f"URL component '{key}' too long") + + # If a component includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in value): + raise InvalidURL( + f"Invalid non-printable ASCII character in URL component '{key}'" + ) + + # Ensure that keyword arguments match as a valid regex. + if not COMPONENT_REGEX[key].fullmatch(value): + raise InvalidURL(f"Invalid URL component '{key}'") + + # The URL_REGEX will always match, but may have empty components. + url_match = URL_REGEX.match(url) + assert url_match is not None + url_dict = url_match.groupdict() + + # * 'scheme', 'authority', and 'path' may be empty strings. + # * 'query' may be 'None', indicating no trailing "?" portion. + # Any string including the empty string, indicates a trailing "?". + # * 'fragment' may be 'None', indicating no trailing "#" portion. + # Any string including the empty string, indicates a trailing "#". + scheme = kwargs.get("scheme", url_dict["scheme"]) or "" + authority = kwargs.get("authority", url_dict["authority"]) or "" + path = kwargs.get("path", url_dict["path"]) or "" + query = kwargs.get("query", url_dict["query"]) + fragment = kwargs.get("fragment", url_dict["fragment"]) + + # The AUTHORITY_REGEX will always match, but may have empty components. + authority_match = AUTHORITY_REGEX.match(authority) + assert authority_match is not None + authority_dict = authority_match.groupdict() + + # * 'userinfo' and 'host' may be empty strings. + # * 'port' may be 'None'. + userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" + host = kwargs.get("host", authority_dict["host"]) or "" + port = kwargs.get("port", authority_dict["port"]) + + # Normalize and validate each component. + # We end up with a parsed representation of the URL, + # with components that are plain ASCII bytestrings. + parsed_scheme: str = scheme.lower() + parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":") + parsed_host: str = encode_host(host) + parsed_port: typing.Optional[int] = normalize_port(port, scheme) + + has_scheme = parsed_scheme != "" + has_authority = ( + parsed_userinfo != "" or parsed_host != "" or parsed_port is not None + ) + validate_path(path, has_scheme=has_scheme, has_authority=has_authority) + if has_authority: + path = normalize_path(path) + + # The GEN_DELIMS set is... : / ? # [ ] @ + # These do not need to be percent-quoted unless they serve as delimiters for the + # specific component. + + # For 'path' we need to drop ? and # from the GEN_DELIMS set. + parsed_path: str = quote(path, safe=SUB_DELIMS + ":/[]@") + # For 'query' we need to drop '#' from the GEN_DELIMS set. + parsed_query: typing.Optional[str] = ( + None if query is None else quote(query, safe=SUB_DELIMS + ":/?[]@") + ) + # For 'fragment' we can include all of the GEN_DELIMS set. + parsed_fragment: typing.Optional[str] = ( + None if fragment is None else quote(fragment, safe=SUB_DELIMS + ":/?#[]@") + ) + + # The parsed ASCII bytestrings are our canonical form. + # All properties of the URL are derived from these. + return ParseResult( + parsed_scheme, + parsed_userinfo, + parsed_host, + parsed_port, + parsed_path, + parsed_query, + parsed_fragment, + ) + + +def encode_host(host: str) -> str: + if not host: + return "" + + elif IPv4_STYLE_HOSTNAME.match(host): + # Validate IPv4 hostnames like #.#.#.# + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet + try: + ipaddress.IPv4Address(host) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv4 address: {host!r}") + return host + + elif IPv6_STYLE_HOSTNAME.match(host): + # Validate IPv6 hostnames like [...] + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # "A host identified by an Internet Protocol literal address, version 6 + # [RFC3513] or later, is distinguished by enclosing the IP literal + # within square brackets ("[" and "]"). This is the only place where + # square bracket characters are allowed in the URI syntax." + try: + ipaddress.IPv6Address(host[1:-1]) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv6 address: {host!r}") + return host[1:-1] + + elif host.isascii(): + # Regular ASCII hostnames + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # reg-name = *( unreserved / pct-encoded / sub-delims ) + return quote(host.lower(), safe=SUB_DELIMS) + + # IDNA hostnames + try: + return idna.encode(host.lower()).decode("ascii") + except idna.IDNAError: + raise InvalidURL(f"Invalid IDNA hostname: {host!r}") + + +def normalize_port( + port: typing.Optional[typing.Union[str, int]], scheme: str +) -> typing.Optional[int]: + # From https://tools.ietf.org/html/rfc3986#section-3.2.3 + # + # "A scheme may define a default port. For example, the "http" scheme + # defines a default port of "80", corresponding to its reserved TCP + # port number. The type of port designated by the port number (e.g., + # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and + # normalizers should omit the port component and its ":" delimiter if + # port is empty or if its value would be the same as that of the + # scheme's default." + if port is None or port == "": + return None + + try: + port_as_int = int(port) + except ValueError: + raise InvalidURL(f"Invalid port: {port!r}") + + # See https://url.spec.whatwg.org/#url-miscellaneous + default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( + scheme + ) + if port_as_int == default_port: + return None + return port_as_int + + +def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: + """ + Path validation rules that depend on if the URL contains a scheme or authority component. + + See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 + """ + if has_authority: + # > If a URI contains an authority component, then the path component + # > must either be empty or begin with a slash ("/") character." + if path and not path.startswith("/"): + raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") + else: + # > If a URI does not contain an authority component, then the path cannot begin + # > with two slash characters ("//"). + if path.startswith("//"): + raise InvalidURL( + "URLs with no authority component cannot have a path starting with '//'" + ) + # > In addition, a URI reference (Section 4.1) may be a relative-path reference, in which + # > case the first path segment cannot contain a colon (":") character. + if path.startswith(":") and not has_scheme: + raise InvalidURL( + "URLs with no scheme component cannot have a path starting with ':'" + ) + + +def normalize_path(path: str) -> str: + """ + Drop "." and ".." segments from a URL path. + + For example: + + normalize_path("/path/./to/somewhere/..") == "/path/to" + """ + # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 + components = path.split("/") + output: typing.List[str] = [] + for component in components: + if component == ".": + pass + elif component == "..": + if output and output != [""]: + output.pop() + else: + output.append(component) + return "/".join(output) + + +def percent_encode(char: str) -> str: + """ + Replace a single character with the percent-encoded representation. + + Characters outside the ASCII range are represented with their a percent-encoded + representation of their UTF-8 byte sequence. + + For example: + + percent_encode(" ") == "%20" + """ + return "".join([f"%{byte:02x}" for byte in char.encode("utf-8")]).upper() + + +def is_safe(string: str, safe: str = "/") -> bool: + """ + Determine if a given string is already quote-safe. + """ + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + "%" + + # All characters must already be non-escaping or '%' + for char in string: + if char not in NON_ESCAPED_CHARS: + return False + + # Any '%' characters must be valid '%xx' escape sequences. + return string.count("%") == len(PERCENT_ENCODED_REGEX.findall(string)) + + +def quote(string: str, safe: str = "/") -> str: + """ + Use percent-encoding to quote a string if required. + """ + if is_safe(string, safe=safe): + return string + + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + return "".join( + [char if char in NON_ESCAPED_CHARS else percent_encode(char) for char in string] + ) + + +def urlencode(items: typing.List[typing.Tuple[str, str]]) -> str: + # We can use a much simpler version of the stdlib urlencode here because + # we don't need to handle a bunch of different typing cases, such as bytes vs str. + # + # https://github.com/python/cpython/blob/b2f7b2ef0b5421e01efb8c7bee2ef95d3bab77eb/Lib/urllib/parse.py#L926 + # + # Note that we use '%20' encoding for spaces, and treat '/' as a safe + # character. This means our query params have the same escaping as other + # characters in the URL path. This is slightly different to `requests`, + # but is the behaviour that browsers use. + # + # See https://github.com/encode/httpx/issues/2536 and + # https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + return "&".join([quote(k) + "=" + quote(v) for k, v in items]) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_urls.py b/Backend/venv/lib/python3.12/site-packages/httpx/_urls.py new file mode 100644 index 00000000..b023941b --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_urls.py @@ -0,0 +1,642 @@ +import typing +from urllib.parse import parse_qs, unquote + +import idna + +from ._types import QueryParamTypes, RawURL, URLTypes +from ._urlparse import urlencode, urlparse +from ._utils import primitive_value_to_str + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work with + `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can only + be properly URL escaped when decoding the parameter names and values themselves. + """ + + def __init__( + self, url: typing.Union["URL", str] = "", **kwargs: typing.Any + ) -> None: + if kwargs: + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for URL()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + if isinstance(value, bytes): + kwargs[key] = value.decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + if isinstance(url, str): + self._uri_reference = urlparse(url, **kwargs) + elif isinstance(url, URL): + self._uri_reference = url._uri_reference.copy_with(**kwargs) + else: + raise TypeError( + f"Invalid type for url. Expected str or httpx.URL, got {type(url)}: {url!r}" + ) + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + return self._uri_reference.userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + return self._uri_reference.host.encode("ascii") + + @property + def port(self) -> typing.Optional[int]: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + return self._uri_reference.port + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + return self._uri_reference.netloc.encode("ascii") + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is necessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> "QueryParams": + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def raw(self) -> RawURL: + """ + Provides the (scheme, host, port, target) for the outgoing request. + + In older versions of `httpx` this was used in the low-level transport API. + We no longer use `RawURL`, and this property will be deprecated in a future release. + """ + return RawURL( + self.raw_scheme, + self.raw_host, + self.port, + self.raw_path, + ) + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> "URL": + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with(username="jo@gmail.com", password="a secret") + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + return URL(self, **kwargs) + + def copy_set_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> "URL": + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> "URL": + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URLTypes) -> "URL": + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + from urllib.parse import urljoin + + return URL(urljoin(str(self), str(URL(url)))) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return str(self._uri_reference) + + def __repr__(self) -> str: + scheme, userinfo, host, port, path, query, fragment = self._uri_reference + + if ":" in userinfo: + # Mask any password component. + userinfo = f'{userinfo.split(":")[0]}:[secure]' + + authority = "".join( + [ + f"{userinfo}@" if userinfo else "", + f"[{host}]" if ":" in host else host, + f":{port}" if port is not None else "", + ] + ) + url = "".join( + [ + f"{self.scheme}:" if scheme else "", + f"//{authority}" if authority else "", + path, + f"?{query}" if query is not None else "", + f"#{fragment}" if fragment is not None else "", + ] + ) + + return f"{self.__class__.__name__}({url!r})" + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__( + self, *args: typing.Optional[QueryParamTypes], **kwargs: typing.Any + ) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value, keep_blank_values=True) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView[str]: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView[str]: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: typing.List[typing.Tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> typing.List[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> "QueryParams": + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: typing.Optional[QueryParamTypes] = None) -> "QueryParams": + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + """ + Note that we use '%20' encoding for spaces, and treat '/' as a safe + character. + + See https://github.com/encode/httpx/issues/2536 and + https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + """ + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: typing.Optional[QueryParamTypes] = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/_utils.py b/Backend/venv/lib/python3.12/site-packages/httpx/_utils.py new file mode 100644 index 00000000..a3a045da --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/httpx/_utils.py @@ -0,0 +1,477 @@ +import codecs +import email.message +import ipaddress +import mimetypes +import os +import re +import time +import typing +from pathlib import Path +from urllib.request import getproxies + +import sniffio + +from ._types import PrimitiveData + +if typing.TYPE_CHECKING: # pragma: no cover + from ._urls import URL + + +_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} +_HTML5_FORM_ENCODING_REPLACEMENTS.update( + {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} +) +_HTML5_FORM_ENCODING_RE = re.compile( + r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) +) + + +def normalize_header_key( + value: typing.Union[str, bytes], + lower: bool, + encoding: typing.Optional[str] = None, +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header key. + """ + if isinstance(value, bytes): + bytes_value = value + else: + bytes_value = value.encode(encoding or "ascii") + + return bytes_value.lower() if lower else bytes_value + + +def normalize_header_value( + value: typing.Union[str, bytes], encoding: typing.Optional[str] = None +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header value. + """ + if isinstance(value, bytes): + return value + return value.encode(encoding or "ascii") + + +def primitive_value_to_str(value: "PrimitiveData") -> str: + """ + Coerce a primitive data type into a string value. + + Note that we prefer JSON-style 'true'/'false' for boolean values here. + """ + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +def is_known_encoding(encoding: str) -> bool: + """ + Return `True` if `encoding` is a known codec. + """ + try: + codecs.lookup(encoding) + except LookupError: + return False + return True + + +def format_form_param(name: str, value: str) -> bytes: + """ + Encode a name/value pair within a multipart form. + """ + + def replacer(match: typing.Match[str]) -> str: + return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] + + value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) + return f'{name}="{value}"'.encode() + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = b"\x00" +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data: bytes) -> typing.Optional[str]: + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +def get_ca_bundle_from_env() -> typing.Optional[str]: + if "SSL_CERT_FILE" in os.environ: + ssl_file = Path(os.environ["SSL_CERT_FILE"]) + if ssl_file.is_file(): + return str(ssl_file) + if "SSL_CERT_DIR" in os.environ: + ssl_path = Path(os.environ["SSL_CERT_DIR"]) + if ssl_path.is_dir(): + return str(ssl_path) + return None + + +def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: + """ + Returns a list of parsed link headers, for more info see: + https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link + The generic syntax of those is: + Link: < uri-reference >; param1=value1; param2="value2" + So for instance: + Link; '; type="image/jpeg",;' + would return + [ + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ] + :param value: HTTP Link entity-header field + :return: list of parsed link headers + """ + links: typing.List[typing.Dict[str, str]] = [] + replace_chars = " '\"" + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + link = {"url": url.strip("<> '\"")} + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + link[key.strip(replace_chars)] = value.strip(replace_chars) + links.append(link) + return links + + +def parse_content_type_charset(content_type: str) -> typing.Optional[str]: + # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. + # See: https://peps.python.org/pep-0594/#cgi + msg = email.message.Message() + msg["content-type"] = content_type + return msg.get_content_charset(failobj=None) + + +SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} + + +def obfuscate_sensitive_headers( + items: typing.Iterable[typing.Tuple[typing.AnyStr, typing.AnyStr]] +) -> typing.Iterator[typing.Tuple[typing.AnyStr, typing.AnyStr]]: + for k, v in items: + if to_str(k.lower()) in SENSITIVE_HEADERS: + v = to_bytes_or_str("[secure]", match_type_of=v) + yield k, v + + +def port_or_default(url: "URL") -> typing.Optional[int]: + if url.port is not None: + return url.port + return {"http": 80, "https": 443}.get(url.scheme) + + +def same_origin(url: "URL", other: "URL") -> bool: + """ + Return 'True' if the given URLs share the same origin. + """ + return ( + url.scheme == other.scheme + and url.host == other.host + and port_or_default(url) == port_or_default(other) + ) + + +def is_https_redirect(url: "URL", location: "URL") -> bool: + """ + Return 'True' if 'location' is a HTTPS upgrade of 'url' + """ + if url.host != location.host: + return False + + return ( + url.scheme == "http" + and port_or_default(url) == 80 + and location.scheme == "https" + and port_or_default(location) == 443 + ) + + +def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: + """Gets proxy information from the environment""" + + # urllib.request.getproxies() falls back on System + # Registry and Config for proxies on Windows and macOS. + # We don't want to propagate non-HTTP proxies into + # our configuration such as 'TRAVIS_APT_PROXY'. + proxy_info = getproxies() + mounts: typing.Dict[str, typing.Optional[str]] = {} + + for scheme in ("http", "https", "all"): + if proxy_info.get(scheme): + hostname = proxy_info[scheme] + mounts[f"{scheme}://"] = ( + hostname if "://" in hostname else f"http://{hostname}" + ) + + no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] + for hostname in no_proxy_hosts: + # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details + # on how names in `NO_PROXY` are handled. + if hostname == "*": + # If NO_PROXY=* is used or if "*" occurs as any one of the comma + # separated hostnames, then we should just bypass any information + # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore + # proxies. + return {} + elif hostname: + # NO_PROXY=.google.com is marked as "all://*.google.com, + # which disables "www.google.com" but not "google.com" + # NO_PROXY=google.com is marked as "all://*google.com, + # which disables "www.google.com" and "google.com". + # (But not "wwwgoogle.com") + # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" + # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 + if is_ipv4_hostname(hostname): + mounts[f"all://{hostname}"] = None + elif is_ipv6_hostname(hostname): + mounts[f"all://[{hostname}]"] = None + elif hostname.lower() == "localhost": + mounts[f"all://{hostname}"] = None + else: + mounts[f"all://*{hostname}"] = None + + return mounts + + +def to_bytes(value: typing.Union[str, bytes], encoding: str = "utf-8") -> bytes: + return value.encode(encoding) if isinstance(value, str) else value + + +def to_str(value: typing.Union[str, bytes], encoding: str = "utf-8") -> str: + return value if isinstance(value, str) else value.decode(encoding) + + +def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: + return value if isinstance(match_type_of, str) else value.encode() + + +def unquote(value: str) -> str: + return value[1:-1] if value[0] == value[-1] == '"' else value + + +def guess_content_type(filename: typing.Optional[str]) -> typing.Optional[str]: + if filename: + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + return None + + +def peek_filelike_length(stream: typing.Any) -> typing.Optional[int]: + """ + Given a file-like stream object, return its length in number of bytes + without reading it into memory. + """ + try: + # Is it an actual file? + fd = stream.fileno() + # Yup, seems to be an actual file. + length = os.fstat(fd).st_size + except (AttributeError, OSError): + # No... Maybe it's something that supports random access, like `io.BytesIO`? + try: + # Assuming so, go to end of stream to figure out its length, + # then put it back in place. + offset = stream.tell() + length = stream.seek(0, os.SEEK_END) + stream.seek(offset) + except (AttributeError, OSError): + # Not even that? Sorry, we're doomed... + return None + + return length + + +class Timer: + async def _get_time(self) -> float: + library = sniffio.current_async_library() + if library == "trio": + import trio + + return trio.current_time() + elif library == "curio": # pragma: no cover + import curio + + return typing.cast(float, await curio.clock()) + + import asyncio + + return asyncio.get_event_loop().time() + + def sync_start(self) -> None: + self.started = time.perf_counter() + + async def async_start(self) -> None: + self.started = await self._get_time() + + def sync_elapsed(self) -> float: + now = time.perf_counter() + return now - self.started + + async def async_elapsed(self) -> float: + now = await self._get_time() + return now - self.started + + +class URLPattern: + """ + A utility class currently used for making lookups against proxy keys... + + # Wildcard matching... + >>> pattern = URLPattern("all") + >>> pattern.matches(httpx.URL("http://example.com")) + True + + # Witch scheme matching... + >>> pattern = URLPattern("https") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + + # With domain matching... + >>> pattern = URLPattern("https://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # Wildcard scheme, with domain matching... + >>> pattern = URLPattern("all://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + True + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # With port matching... + >>> pattern = URLPattern("https://example.com:1234") + >>> pattern.matches(httpx.URL("https://example.com:1234")) + True + >>> pattern.matches(httpx.URL("https://example.com")) + False + """ + + def __init__(self, pattern: str) -> None: + from ._urls import URL + + if pattern and ":" not in pattern: + raise ValueError( + f"Proxy keys should use proper URL forms rather " + f"than plain scheme strings. " + f'Instead of "{pattern}", use "{pattern}://"' + ) + + url = URL(pattern) + self.pattern = pattern + self.scheme = "" if url.scheme == "all" else url.scheme + self.host = "" if url.host == "*" else url.host + self.port = url.port + if not url.host or url.host == "*": + self.host_regex: typing.Optional[typing.Pattern[str]] = None + elif url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") + else: + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") + + def matches(self, other: "URL") -> bool: + if self.scheme and self.scheme != other.scheme: + return False + if ( + self.host + and self.host_regex is not None + and not self.host_regex.match(other.host) + ): + return False + if self.port is not None and self.port != other.port: + return False + return True + + @property + def priority(self) -> typing.Tuple[int, int, int]: + """ + The priority allows URLPattern instances to be sortable, so that + we can match from most specific to least specific. + """ + # URLs with a port should take priority over URLs without a port. + port_priority = 0 if self.port is not None else 1 + # Longer hostnames should match first. + host_priority = -len(self.host) + # Longer schemes should match first. + scheme_priority = -len(self.scheme) + return (port_priority, host_priority, scheme_priority) + + def __hash__(self) -> int: + return hash(self.pattern) + + def __lt__(self, other: "URLPattern") -> bool: + return self.priority < other.priority + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, URLPattern) and self.pattern == other.pattern + + +def is_ipv4_hostname(hostname: str) -> bool: + try: + ipaddress.IPv4Address(hostname.split("/")[0]) + except Exception: + return False + return True + + +def is_ipv6_hostname(hostname: str) -> bool: + try: + ipaddress.IPv6Address(hostname.split("/")[0]) + except Exception: + return False + return True diff --git a/Backend/venv/lib/python3.12/site-packages/httpx/py.typed b/Backend/venv/lib/python3.12/site-packages/httpx/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc index d97f9f47..f523ba29 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc index a4d082a4..a0683adc 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc index b24016d5..48e76c54 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc index 8e435daf..f4e16082 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc index f949ace1..c8117e60 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc index 095fb11a..478f0049 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc index dcb1d04d..6feaa5dc 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc index b245bfd6..5841e1bb 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/METADATA new file mode 100644 index 00000000..fc3c00df --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/METADATA @@ -0,0 +1,79 @@ +Metadata-Version: 2.4 +Name: iniconfig +Version: 2.3.0 +Summary: brain-dead simple config-ini parsing +Author-email: Ronny Pfannschmidt , Holger Krekel +License-Expression: MIT +Project-URL: Homepage, https://github.com/pytest-dev/iniconfig +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=3.10 +Description-Content-Type: text/x-rst +License-File: LICENSE +Dynamic: license-file + +iniconfig: brain-dead simple parsing of ini files +======================================================= + +iniconfig is a small and simple INI-file parser module +having a unique set of features: + +* maintains order of sections and entries +* supports multi-line values with or without line-continuations +* supports "#" comments everywhere +* raises errors with proper line-numbers +* no bells and whistles like automatic substitutions +* iniconfig raises an Error if two sections have the same name. + +If you encounter issues or have feature wishes please report them to: + + https://github.com/RonnyPfannschmidt/iniconfig/issues + +Basic Example +=================================== + +If you have an ini file like this: + +.. code-block:: ini + + # content of example.ini + [section1] # comment + name1=value1 # comment + name1b=value1,value2 # comment + + [section2] + name2= + line1 + line2 + +then you can do: + +.. code-block:: pycon + + >>> import iniconfig + >>> ini = iniconfig.IniConfig("example.ini") + >>> ini['section1']['name1'] # raises KeyError if not exists + 'value1' + >>> ini.get('section1', 'name1b', [], lambda x: x.split(",")) + ['value1', 'value2'] + >>> ini.get('section1', 'notexist', [], lambda x: x.split(",")) + [] + >>> [x.name for x in list(ini)] + ['section1', 'section2'] + >>> list(list(ini)[0].items()) + [('name1', 'value1'), ('name1b', 'value1,value2')] + >>> 'section1' in ini + True + >>> 'inexistendsection' in ini + False diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/RECORD new file mode 100644 index 00000000..c9899e46 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/RECORD @@ -0,0 +1,15 @@ +iniconfig-2.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +iniconfig-2.3.0.dist-info/METADATA,sha256=QNdz-E5OES9JW79PG-nL0tRWwK6271MR910b8yLyFls,2526 +iniconfig-2.3.0.dist-info/RECORD,, +iniconfig-2.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +iniconfig-2.3.0.dist-info/licenses/LICENSE,sha256=NAn6kfes5VeJRjJnZlbjImT-XvdYFTVyXcmiN3RVG9Q,1098 +iniconfig-2.3.0.dist-info/top_level.txt,sha256=7KfM0fugdlToj9UW7enKXk2HYALQD8qHiyKtjhSzgN8,10 +iniconfig/__init__.py,sha256=XL5eqUYj4mskAOorZ5jfRAinJvJzTI-fJxpP4xfXtaw,7497 +iniconfig/__pycache__/__init__.cpython-312.pyc,, +iniconfig/__pycache__/_parse.cpython-312.pyc,, +iniconfig/__pycache__/_version.cpython-312.pyc,, +iniconfig/__pycache__/exceptions.cpython-312.pyc,, +iniconfig/_parse.py,sha256=5ncBl7MAQiaPNnpRrs9FR4t6G6DkgOUs458OY_1CR28,5223 +iniconfig/_version.py,sha256=KNFYe-Vtdt7Z-oHyl8jmDAQ9qXoCNMAEXigj6BR1QUI,704 +iniconfig/exceptions.py,sha256=mipQ_aMxD9CvSvFWN1oTXY4QuRnKAMZ1f3sCdmjDTU0,399 +iniconfig/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/WHEEL similarity index 65% rename from Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL rename to Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/WHEEL index 1eb3c49d..e7fa31b6 100644 --- a/Backend/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: setuptools (78.1.0) +Generator: setuptools (80.9.0) Root-Is-Purelib: true Tag: py3-none-any diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/licenses/LICENSE b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/licenses/LICENSE new file mode 100644 index 00000000..46f4b284 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010 - 2023 Holger Krekel and others + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/top_level.txt new file mode 100644 index 00000000..9dda5369 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +iniconfig diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/__init__.py b/Backend/venv/lib/python3.12/site-packages/iniconfig/__init__.py new file mode 100644 index 00000000..b84809f8 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig/__init__.py @@ -0,0 +1,249 @@ +"""brain-dead simple parser for ini-style files. +(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed +""" + +import os +from collections.abc import Callable +from collections.abc import Iterator +from collections.abc import Mapping +from typing import Final +from typing import TypeVar +from typing import overload + +__all__ = ["IniConfig", "ParseError", "COMMENTCHARS", "iscommentline"] + +from . import _parse +from ._parse import COMMENTCHARS +from ._parse import iscommentline +from .exceptions import ParseError + +_D = TypeVar("_D") +_T = TypeVar("_T") + + +class SectionWrapper: + config: Final["IniConfig"] + name: Final[str] + + def __init__(self, config: "IniConfig", name: str) -> None: + self.config = config + self.name = name + + def lineof(self, name: str) -> int | None: + return self.config.lineof(self.name, name) + + @overload + def get(self, key: str) -> str | None: ... + + @overload + def get( + self, + key: str, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, + key: str, + default: None, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get(self, key: str, default: _D, convert: None = None) -> str | _D: ... + + @overload + def get( + self, + key: str, + default: _D, + convert: Callable[[str], _T], + ) -> _T | _D: ... + + # TODO: investigate possible mypy bug wrt matching the passed over data + def get( # type: ignore [misc] + self, + key: str, + default: _D | None = None, + convert: Callable[[str], _T] | None = None, + ) -> _D | _T | str | None: + return self.config.get(self.name, key, convert=convert, default=default) + + def __getitem__(self, key: str) -> str: + return self.config.sections[self.name][key] + + def __iter__(self) -> Iterator[str]: + section: Mapping[str, str] = self.config.sections.get(self.name, {}) + + def lineof(key: str) -> int: + return self.config.lineof(self.name, key) # type: ignore[return-value] + + yield from sorted(section, key=lineof) + + def items(self) -> Iterator[tuple[str, str]]: + for name in self: + yield name, self[name] + + +class IniConfig: + path: Final[str] + sections: Final[Mapping[str, Mapping[str, str]]] + _sources: Final[Mapping[tuple[str, str | None], int]] + + def __init__( + self, + path: str | os.PathLike[str], + data: str | None = None, + encoding: str = "utf-8", + *, + _sections: Mapping[str, Mapping[str, str]] | None = None, + _sources: Mapping[tuple[str, str | None], int] | None = None, + ) -> None: + self.path = os.fspath(path) + + # Determine sections and sources + if _sections is not None and _sources is not None: + # Use provided pre-parsed data (called from parse()) + sections_data = _sections + sources = _sources + else: + # Parse the data (backward compatible path) + if data is None: + with open(self.path, encoding=encoding) as fp: + data = fp.read() + + # Use old behavior (no stripping) for backward compatibility + sections_data, sources = _parse.parse_ini_data( + self.path, data, strip_inline_comments=False + ) + + # Assign once to Final attributes + self._sources = sources + self.sections = sections_data + + @classmethod + def parse( + cls, + path: str | os.PathLike[str], + data: str | None = None, + encoding: str = "utf-8", + *, + strip_inline_comments: bool = True, + strip_section_whitespace: bool = False, + ) -> "IniConfig": + """Parse an INI file. + + Args: + path: Path to the INI file (used for error messages) + data: Optional INI content as string. If None, reads from path. + encoding: Encoding to use when reading the file (default: utf-8) + strip_inline_comments: Whether to strip inline comments from values + (default: True). When True, comments starting with # or ; are + removed from values, matching the behavior for section comments. + strip_section_whitespace: Whether to strip whitespace from section and key names + (default: False). When True, strips Unicode whitespace from section and key names, + addressing issue #4. When False, preserves existing behavior for backward compatibility. + + Returns: + IniConfig instance with parsed configuration + + Example: + # With comment stripping (default): + config = IniConfig.parse("setup.cfg") + # value = "foo" instead of "foo # comment" + + # Without comment stripping (old behavior): + config = IniConfig.parse("setup.cfg", strip_inline_comments=False) + # value = "foo # comment" + + # With section name stripping (opt-in for issue #4): + config = IniConfig.parse("setup.cfg", strip_section_whitespace=True) + # section names and keys have Unicode whitespace stripped + """ + fspath = os.fspath(path) + + if data is None: + with open(fspath, encoding=encoding) as fp: + data = fp.read() + + sections_data, sources = _parse.parse_ini_data( + fspath, + data, + strip_inline_comments=strip_inline_comments, + strip_section_whitespace=strip_section_whitespace, + ) + + # Call constructor with pre-parsed sections and sources + return cls(path=fspath, _sections=sections_data, _sources=sources) + + def lineof(self, section: str, name: str | None = None) -> int | None: + lineno = self._sources.get((section, name)) + return None if lineno is None else lineno + 1 + + @overload + def get( + self, + section: str, + name: str, + ) -> str | None: ... + + @overload + def get( + self, + section: str, + name: str, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, + section: str, + name: str, + default: None, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, section: str, name: str, default: _D, convert: None = None + ) -> str | _D: ... + + @overload + def get( + self, + section: str, + name: str, + default: _D, + convert: Callable[[str], _T], + ) -> _T | _D: ... + + def get( # type: ignore + self, + section: str, + name: str, + default: _D | None = None, + convert: Callable[[str], _T] | None = None, + ) -> _D | _T | str | None: + try: + value: str = self.sections[section][name] + except KeyError: + return default + else: + if convert is not None: + return convert(value) + else: + return value + + def __getitem__(self, name: str) -> SectionWrapper: + if name not in self.sections: + raise KeyError(name) + return SectionWrapper(self, name) + + def __iter__(self) -> Iterator[SectionWrapper]: + for name in sorted(self.sections, key=self.lineof): # type: ignore + yield SectionWrapper(self, name) + + def __contains__(self, arg: str) -> bool: + return arg in self.sections diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d0927627 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/_parse.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/_parse.cpython-312.pyc new file mode 100644 index 00000000..57048630 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/_parse.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/_version.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/_version.cpython-312.pyc new file mode 100644 index 00000000..ba4bbb76 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/_version.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 00000000..9634bbd8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/iniconfig/__pycache__/exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/_parse.py b/Backend/venv/lib/python3.12/site-packages/iniconfig/_parse.py new file mode 100644 index 00000000..57b9b44e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig/_parse.py @@ -0,0 +1,163 @@ +from collections.abc import Mapping +from typing import NamedTuple + +from .exceptions import ParseError + +COMMENTCHARS = "#;" + + +class ParsedLine(NamedTuple): + lineno: int + section: str | None + name: str | None + value: str | None + + +def parse_ini_data( + path: str, + data: str, + *, + strip_inline_comments: bool, + strip_section_whitespace: bool = False, +) -> tuple[Mapping[str, Mapping[str, str]], Mapping[tuple[str, str | None], int]]: + """Parse INI data and return sections and sources mappings. + + Args: + path: Path for error messages + data: INI content as string + strip_inline_comments: Whether to strip inline comments from values + strip_section_whitespace: Whether to strip whitespace from section and key names + (default: False). When True, addresses issue #4 by stripping Unicode whitespace. + + Returns: + Tuple of (sections_data, sources) where: + - sections_data: mapping of section -> {name -> value} + - sources: mapping of (section, name) -> line number + """ + tokens = parse_lines( + path, + data.splitlines(True), + strip_inline_comments=strip_inline_comments, + strip_section_whitespace=strip_section_whitespace, + ) + + sources: dict[tuple[str, str | None], int] = {} + sections_data: dict[str, dict[str, str]] = {} + + for lineno, section, name, value in tokens: + if section is None: + raise ParseError(path, lineno, "no section header defined") + sources[section, name] = lineno + if name is None: + if section in sections_data: + raise ParseError(path, lineno, f"duplicate section {section!r}") + sections_data[section] = {} + else: + if name in sections_data[section]: + raise ParseError(path, lineno, f"duplicate name {name!r}") + assert value is not None + sections_data[section][name] = value + + return sections_data, sources + + +def parse_lines( + path: str, + line_iter: list[str], + *, + strip_inline_comments: bool = False, + strip_section_whitespace: bool = False, +) -> list[ParsedLine]: + result: list[ParsedLine] = [] + section = None + for lineno, line in enumerate(line_iter): + name, data = _parseline( + path, line, lineno, strip_inline_comments, strip_section_whitespace + ) + # new value + if name is not None and data is not None: + result.append(ParsedLine(lineno, section, name, data)) + # new section + elif name is not None and data is None: + if not name: + raise ParseError(path, lineno, "empty section name") + section = name + result.append(ParsedLine(lineno, section, None, None)) + # continuation + elif name is None and data is not None: + if not result: + raise ParseError(path, lineno, "unexpected value continuation") + last = result.pop() + if last.name is None: + raise ParseError(path, lineno, "unexpected value continuation") + + if last.value: + last = last._replace(value=f"{last.value}\n{data}") + else: + last = last._replace(value=data) + result.append(last) + return result + + +def _parseline( + path: str, + line: str, + lineno: int, + strip_inline_comments: bool, + strip_section_whitespace: bool, +) -> tuple[str | None, str | None]: + # blank lines + if iscommentline(line): + line = "" + else: + line = line.rstrip() + if not line: + return None, None + # section + if line[0] == "[": + realline = line + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + if line[-1] == "]": + section_name = line[1:-1] + # Optionally strip whitespace from section name (issue #4) + if strip_section_whitespace: + section_name = section_name.strip() + return section_name, None + return None, realline.strip() + # value + elif not line[0].isspace(): + try: + name, value = line.split("=", 1) + if ":" in name: + raise ValueError() + except ValueError: + try: + name, value = line.split(":", 1) + except ValueError: + raise ParseError(path, lineno, f"unexpected line: {line!r}") from None + + # Strip key name (always for backward compatibility, optionally with unicode awareness) + key_name = name.strip() + + # Strip value + value = value.strip() + # Strip inline comments from values if requested (issue #55) + if strip_inline_comments: + for c in COMMENTCHARS: + value = value.split(c)[0].rstrip() + + return key_name, value + # continuation + else: + line = line.strip() + # Strip inline comments from continuations if requested (issue #55) + if strip_inline_comments: + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + return None, line + + +def iscommentline(line: str) -> bool: + c = line.lstrip()[:1] + return c in COMMENTCHARS diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/_version.py b/Backend/venv/lib/python3.12/site-packages/iniconfig/_version.py new file mode 100644 index 00000000..b982b024 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig/_version.py @@ -0,0 +1,34 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = [ + "__version__", + "__version_tuple__", + "version", + "version_tuple", + "__commit_id__", + "commit_id", +] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] + COMMIT_ID = Union[str, None] +else: + VERSION_TUPLE = object + COMMIT_ID = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE +commit_id: COMMIT_ID +__commit_id__: COMMIT_ID + +__version__ = version = '2.3.0' +__version_tuple__ = version_tuple = (2, 3, 0) + +__commit_id__ = commit_id = None diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/exceptions.py b/Backend/venv/lib/python3.12/site-packages/iniconfig/exceptions.py new file mode 100644 index 00000000..d078bc65 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/iniconfig/exceptions.py @@ -0,0 +1,16 @@ +from typing import Final + + +class ParseError(Exception): + path: Final[str] + lineno: Final[int] + msg: Final[str] + + def __init__(self, path: str, lineno: int, msg: str) -> None: + super().__init__(path, lineno, msg) + self.path = path + self.lineno = lineno + self.msg = msg + + def __str__(self) -> str: + return f"{self.path}:{self.lineno + 1}: {self.msg}" diff --git a/Backend/venv/lib/python3.12/site-packages/iniconfig/py.typed b/Backend/venv/lib/python3.12/site-packages/iniconfig/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/pip-25.3.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/pip-25.3.dist-info/RECORD index e15bca5c..ab4c123e 100644 --- a/Backend/venv/lib/python3.12/site-packages/pip-25.3.dist-info/RECORD +++ b/Backend/venv/lib/python3.12/site-packages/pip-25.3.dist-info/RECORD @@ -1,6 +1,6 @@ -../../../bin/pip,sha256=sEfserb233VAwZLq2MJoS4ouLsCNFNElVZqmo5vu7Mc,261 -../../../bin/pip3,sha256=sEfserb233VAwZLq2MJoS4ouLsCNFNElVZqmo5vu7Mc,261 -../../../bin/pip3.12,sha256=sEfserb233VAwZLq2MJoS4ouLsCNFNElVZqmo5vu7Mc,261 +../../../bin/pip,sha256=xnV6ugaf9cQyx47UVXbXc1bgi7SW7k_fteJz1seYa60,233 +../../../bin/pip3,sha256=xnV6ugaf9cQyx47UVXbXc1bgi7SW7k_fteJz1seYa60,233 +../../../bin/pip3.12,sha256=xnV6ugaf9cQyx47UVXbXc1bgi7SW7k_fteJz1seYa60,233 pip-25.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 pip-25.3.dist-info/METADATA,sha256=Khugcl59I2--LVxQpP_5yeP-NMpJTyzr3lxFw3kTedM,4672 pip-25.3.dist-info/RECORD,, diff --git a/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__init__.cpython-312.pyc index eb6365aa..133cde35 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__main__.cpython-312.pyc index f4776326..dfccec43 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__main__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__pip-runner__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__pip-runner__.cpython-312.pyc index fdb9a965..9bac8f40 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__pip-runner__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/__pycache__/__pip-runner__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/__init__.cpython-312.pyc index f2bccc95..5c438fa7 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/build_env.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/build_env.cpython-312.pyc index 75508433..8475356a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/build_env.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/build_env.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/cache.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/cache.cpython-312.pyc index 524f8273..bda1830c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/cache.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/cache.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/configuration.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/configuration.cpython-312.pyc index 8fae329f..50e17387 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/configuration.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/configuration.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/exceptions.cpython-312.pyc index 845f76b9..5275e972 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/exceptions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/main.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/main.cpython-312.pyc index 97c1bb6c..27ef350d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/main.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/main.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/pyproject.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/pyproject.cpython-312.pyc index 03abe16b..d1f1fd06 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/pyproject.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/pyproject.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc index 4ac5118b..8017583c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-312.pyc index a4f387cf..b3f6ff91 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-312.pyc index fab76519..f6962d6e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc index ba932156..eb971aed 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-312.pyc index 8d512ebd..79ce44c0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc index 7a910fed..9bf3d2ea 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-312.pyc index 4cfce28d..bff8db8e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-312.pyc index 7212d4c7..b04d882f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main.cpython-312.pyc index 9851bda6..2d3e1a58 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc index 751d4618..052063e6 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/parser.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/parser.cpython-312.pyc index 1e4d6cf8..8b55a966 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/parser.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/parser.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc index 0acb20aa..88760d39 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-312.pyc index 55cba8ac..df76262b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-312.pyc index 2aca524d..735b1621 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc index c44cf3fc..846bdbc8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc index 3508d125..22242d8d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc index 8f1bd439..8a0e6621 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc index e5927159..346c61fb 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc index 379a5c94..4d506edb 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc index 4ed9649b..2e60d59e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc index 8e4ff053..3b950b8c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc index 995a05e4..b4642008 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc index f3b7458b..9dfda734 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc index 9b80039d..ad2e6471 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc index 31d5e7cc..1fd285de 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc index ffab62b7..46104db9 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc index f4476d0e..2ce29836 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc index d782f075..ebc45ab5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc index ced13fd9..13580e00 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/lock.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/lock.cpython-312.pyc index b343a8b4..dca7656f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/lock.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/lock.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc index a16b8f39..81d32ea5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc index d2eeab11..42cc941d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc index df86c481..0a644edb 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc index db82819b..40d17a5a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc index 40dd56b6..c0700ef2 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc index cf0ed986..b0b6c357 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc index 0a2f672b..5356946d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc index 932d5fa0..73854602 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc index 7b5000b5..6fac619b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc index 332ce5b4..ea3d2fd8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc index 95972bf1..d3907c1a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc index 55f796a4..777e719e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc index 8c50ead6..3d6a7fe4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc index 18961d6a..098d9463 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc index 084300fd..c4f8996b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc index 296f3d48..724d169a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc index cc159556..87d44bae 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc index bae40aba..42cc0ba3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc index 0239300f..dc837b2b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc index 93609286..b91e6339 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc index 62985e12..5e703967 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc index fe44dffb..651a5452 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc index dec9034b..00f5dba0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc index 49d67401..45d55ee3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc index 8e5ded48..a9734970 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc index fa08d693..ee871b5c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc index ffeef5ba..5c9da878 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc index 960fa94b..6602847a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc index 8bf49db6..dc498d15 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc index f396668c..c1ff97ab 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc index 16b10270..cd343a34 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc index 6e60cbcf..d4e522b5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/pylock.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/pylock.cpython-312.pyc index d3ac234b..22410d84 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/pylock.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/pylock.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc index 1628ffe2..75f8c82c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc index af9aa8d3..86441fd5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc index 821afd14..e0d2fef2 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc index dfe9f155..53b65d4e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc index f53cde18..965e7628 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc index e98619ea..67b2270a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc index 4631a959..7524d5f3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc index 1ad360cb..590e0a73 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc index 28c4940a..8b5bf344 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc index 3ec3150a..5d0af4ad 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc index 3cd3427e..d1a5da7d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc index e85d8368..70c8285d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc index 193f4103..3c0fc6aa 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc index 8aca3380..8f26e353 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc index 449cb9e4..04aeb823 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc index 0dc6c7f6..5c2b3330 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc index dabc11e3..31c23e13 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc index 47ee022a..6000dbe7 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc index 45e1340a..ceccc6de 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc index 0a153aa1..b4bca657 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc index 1169cd0b..0fac9c00 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_dependency_group.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_dependency_group.cpython-312.pyc index a0dacc7c..b598c1dd 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_dependency_group.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_dependency_group.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc index 00db5453..afd3e60c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc index e4f97084..3c8b189f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc index ee93df03..05cf30e3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc index 20460d28..22f8c489 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc index b91a0c41..2ca3a0ce 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc index d13a5794..2bf70f8d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc index aa823a9f..644b3f2b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc index fd241f2d..22184306 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc index f92c1177..d10aaa89 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc index 30bd66ab..cfdb3c01 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc index 3e4e7595..7c8620f3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc index cf01af79..a0f193fa 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc index 20e417ce..b867d03c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc index 45f49ecc..6de48e6b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc index 5bf22dca..54dcb9e1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc index 68c1d939..5d1535a1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc index 7caa3721..6f321e2a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc index 7caabe29..0af4121c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc index fac6b566..a575fc30 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc index 28564720..6f7ea42b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc index c419e302..c2c26518 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc index 6d636c4a..531b73ff 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc index c9ea6dba..bef63b7f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc index cca3dcb9..082a9609 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc index 93fcfc2b..5da0ab84 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc index 268e949d..3f861407 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc index e65e93ea..4a900ad4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc index 7565c8eb..605efc65 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc index 6802e27d..11adda48 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc index 2e4be79e..1b260fa3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc index 8f72537c..1f03c930 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc index 655e568d..412d6234 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc index 1be2d7cc..5d2bbc3b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc index 02730b8f..551ed61b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc index 32540bb5..a4d5cd54 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/retry.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/retry.cpython-312.pyc index ece68d01..9acbdedb 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/retry.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/retry.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc index 14bd3262..7c9f51b0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc index 962f0442..74690613 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc index 0cd6ae34..04849962 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc index 9d183af0..4f0fccdc 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc index 47dfe26a..71331461 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc index 6c817076..08aa47a1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc index 7cbc9068..8a473abd 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc index 32a0c28c..a438bf5a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc index 394d7d12..3c9abe27 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc index 7f0ad29f..78b6b7f6 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc index 7bb41bba..95444ba0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc index ac9e879d..ef4851f8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc index 7c6a51e8..6656d655 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc index 12f546f6..85f54949 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc index c0a87bd4..9a32c408 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc index 8a79a16f..4ae19b16 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc index 2f9627f6..fcf2d987 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc index 8841cc42..891b4d2c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc index b54f094b..1ce95d6b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc index f42b1e27..83c1809e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc index 23a63c99..fb42afd4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc index e2f851b0..c489a202 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc index 88dcbced..cfcbfaf6 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc index d0929b3f..a5bc3a39 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc index 34c9034f..3b3054ea 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc index e3f2ae44..0f503b82 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc index d73c0c18..be3bb994 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc index b72eaaef..3824c58e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__init__.cpython-312.pyc index b325d27e..8b489485 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__main__.cpython-312.pyc index f72102aa..957c0ac7 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__main__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_implementation.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_implementation.cpython-312.pyc index 67e78172..f4cacefa 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_implementation.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_implementation.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_lint_dependency_groups.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_lint_dependency_groups.cpython-312.pyc index ce9aaeed..5f491cb6 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_lint_dependency_groups.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_lint_dependency_groups.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_pip_wrapper.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_pip_wrapper.cpython-312.pyc index 10440d85..63180e82 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_pip_wrapper.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_pip_wrapper.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_toml_compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_toml_compat.cpython-312.pyc index 4d618091..a59eab97 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_toml_compat.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_toml_compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc index e791b20d..59f774e1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc index 5280443a..fa0cd966 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc index e80e1ee5..a0184701 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc index 92ad4c03..f678bf5a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc index cc321fcf..aca477be 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc index 5bfa32fc..bf6696f0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc index 55088b05..6a414c16 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc index 49e70431..f6d1ee42 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc index 1aadbcaf..0b652dfe 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc index e8664cda..ccf33b61 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc index 08929bc6..60e9c028 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc index 68533ff6..dd2d11fc 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc index 72d3779e..b1f9d69b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc index b50bc89c..caf91e57 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc index 542c7b26..c6ee1a5c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc index 24ccdc8c..2fa91228 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc index 5dea47e5..582b4b4f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc index c2f21a2a..dc26f44f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc index dabca334..2ad222bb 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc index e89b703b..3d0d47c3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc index 89c4d37f..682ded91 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-312.pyc index faeb5e47..67ed4c4d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc index c2b840c7..fa33a35c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc index 5b9b4b4f..29207847 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_parser.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_parser.cpython-312.pyc index c6c8054d..127d8330 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_parser.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_parser.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc index ce640db7..a03654b3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-312.pyc index a3df5821..270daa65 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc index 2f309ea3..bbd2a554 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-312.pyc index c99997be..93cc7ff3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc index c2eb9706..c0e99597 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc index 597b255b..05ca010a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc index 0d6aba31..538d8d2d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc index 8635b546..42bde423 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc index 86159d36..641aa7b3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/__init__.cpython-312.pyc index 575a2199..04633ecf 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/_spdx.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/_spdx.cpython-312.pyc index db229955..b9cdaefc 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/_spdx.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/_spdx.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc index 439a592a..70e5e112 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc index 88f040ff..7a71430c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc index f4ccf640..d2970def 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc index 2661e6fa..356de440 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc index 12e96d98..94caa0b5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc index 528c5f0e..cc4fb52e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc index 5035c8fa..7100b4d5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc index 045739eb..d723c249 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc index 172386cc..6d9d8693 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc index 097bec2c..5e03fadf 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc index f7ed307d..bfb2cc25 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc index d99890af..ab73e8fd 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc index f91a136c..1becadd1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc index 996f15b4..369d16de 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc index 12b81bc6..7e7af94e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc index 52a44ed6..88393f13 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc index df18d9f5..5f9591e2 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc index b41fc7ba..1526b584 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc index 93fc2202..60eed60b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc index 53ba9c2e..6c7bb3e0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc index 18c41c13..f1843027 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc index 60028b42..63a7e2e9 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc index d2ae7e63..2fa974ec 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc index 15da8f18..2281eac4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc index 7c950910..20da6aed 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc index 1707812b..c9b55d91 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc index d1dfeb97..318510f0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc index 6498d502..b7e82b2b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc index 8dd925c0..db55df76 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc index f2132de9..053bdfcd 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc index 30352d13..18d9aa31 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-312.pyc index 23dfc780..1f7af8b9 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc index b4cc453a..b0a546ed 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc index 492a5e3c..bb549645 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc index 05203416..9e47d6bd 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc index 880af3fe..4808f352 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc index ee819f30..777fd771 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc index e442d8a1..9f41f01e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc index aa90d8c9..9391ac70 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc index 58e00877..410995ef 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc index b83f5235..2a6d32f4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc index ffbd9304..7142ce47 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc index 54ea7959..2437574d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc index a0911166..fe34d49b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc index 7f41a987..a8ffae9f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc index a843db28..820cf55b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc index 87f99189..92ffdb31 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc index e93e9243..a4e2ca76 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc index 609432fd..2794bb53 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc index 3a7ade88..01925963 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc index 5f2c8fc7..a30df361 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc index 9bf18a8a..e499c7a5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc index 18fe133e..0fda36a3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc index ea796cfc..0b7a2d4e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc index e50c517f..199b3f7b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc index 1fa77989..002e7e93 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc index 86657bf2..b27122d0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc index 835d94f4..66bbc7fd 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/__init__.cpython-312.pyc index fcdf3fef..3b5c3912 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/abstract.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/abstract.cpython-312.pyc index 14a83f96..d0cab2c2 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/abstract.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/abstract.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/criterion.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/criterion.cpython-312.pyc index 836b2a6b..cd326a93 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/criterion.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/criterion.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/exceptions.cpython-312.pyc index e24ea85e..da945bbf 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/exceptions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/resolution.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/resolution.cpython-312.pyc index 27031dd0..45f8d22b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/resolution.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/resolution.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc index d2db6d07..b0053818 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc index 5a858496..71fe4365 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc index 5bb972ac..57992b68 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc index 4c9cf9ef..5e6ad96d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc index a9e4d9f5..ab2c7516 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc index 1f04234d..a7f5b84a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc index 4e81483b..dc2c0504 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc index ef63b23b..af4d647a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc index 2d92474b..d707ee4c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc index 9442aca4..4438a793 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc index 2de311b0..27636dcf 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc index f6e780d4..17120939 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc index 4826cb74..3daa5a33 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc index a3b7bf8f..ddc63dad 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc index 29fbbdc5..9e24b299 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc index 821b5487..75f53077 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc index 321d4c3c..e4b39fb3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc index 06f961ac..a91dc9e5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc index 708fb62d..1dffca98 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc index db66afba..beebf0f8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc index 9b5104ec..3c88578e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc index 6f17409a..136ba0a0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc index 8d545d0b..a9846a3c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc index 3db2f2a2..5b2fee0f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc index 0335e5fc..c3160f83 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc index 7ec0b608..564c21df 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc index 8957de37..cf4ae3cc 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc index e16c2584..28f4ffd5 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc index e29508bc..20dcee3f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc index 026928be..3739b61b 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc index 29567e52..c9dae992 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc index 43ce1a80..e04b1515 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc index 7f178dcd..70d400f8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc index 8a8d2e9b..8c87d328 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc index 8224211f..106262d1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc index 728b7d65..19e2dea0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc index 78457ed4..26de12b4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc index fae5fcce..b9335386 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc index 6351d0ee..e1b577bf 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc index 03f8a90c..177916d3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc index e532f751..62f9b631 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc index ea2e0937..351208cb 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc index e3082933..d5d51645 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc index affedddb..27ea0a87 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc index f68d8f53..4da253e1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc index 1493ca5d..42303257 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc index bc7a95b0..7f6f74af 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc index 6cf2f9c7..d4bf2e4d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc index bc247803..e398eabd 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc index 0695f710..fe2db59c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc index aaa2219c..d682754f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc index 35541c0e..129768a8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc index 9f9837f2..b63c45c0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc index 5f78a256..65f2a092 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc index 105109c2..8c52d9eb 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc index deabee30..e6224e06 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc index 857dcdfb..d457175a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc index ef84c26d..29a77ee2 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc index 4a9684f7..23eb67f4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc index 9c542b1e..e8ccceef 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc index dbea2850..5921e6ad 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc index 1d506f0b..59df5cdf 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc index 1fd8d1fd..a0a38b62 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc index 7ada7b8d..de10ef2c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc index b6bae75b..13b67f5c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc index 20b6f437..5a41c3e1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc index 4ac408c7..a44af0c2 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc index c6f14008..7e1abbd1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc index 3e862cfb..839d5f9d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc index 6346be90..eb0a5470 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc index a35e9b90..b140502e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc index 765429ce..5f42af1d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc index f1ea584b..45e6f43a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc index 81c548f5..cca5408c 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc index ee8f9a4d..999f82d4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc index 8ef04e96..b8b538e0 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc index 6fcb9805..5037c7f6 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-312.pyc index 61e208e4..b142e3d4 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-312.pyc index d7f198e4..b4a76992 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-312.pyc index 98e93009..93d9355e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-312.pyc index 78b92c61..2eca3eef 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli_w/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli_w/__pycache__/__init__.cpython-312.pyc index 87783b13..65030bde 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli_w/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli_w/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli_w/__pycache__/_writer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli_w/__pycache__/_writer.cpython-312.pyc index 43e9a75a..0b61bfb8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli_w/__pycache__/_writer.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/tomli_w/__pycache__/_writer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/__init__.cpython-312.pyc index fa130eaf..275eff45 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_api.cpython-312.pyc index d76eceeb..5808faa1 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_api.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_api.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_macos.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_macos.cpython-312.pyc index 42aef5ff..848025ca 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_macos.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_macos.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_openssl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_openssl.cpython-312.pyc index 6d031fcb..1568f45d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_openssl.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_openssl.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_ssl_constants.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_ssl_constants.cpython-312.pyc index 66d00472..8a812ed7 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_ssl_constants.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_ssl_constants.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_windows.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_windows.cpython-312.pyc index 888c9651..ebdb0975 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_windows.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/truststore/__pycache__/_windows.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-312.pyc index 88d165fb..61b539b8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-312.pyc index 9628dd8b..0052f69f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-312.pyc index b780e7a9..fa5a774e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-312.pyc index 1dee2306..e8d5d924 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-312.pyc index 17fbcb6a..0ec988d2 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-312.pyc index 62b0411d..ac298f96 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-312.pyc index ed9d2ae2..8b973218 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-312.pyc index 33f21546..3f1b9732 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-312.pyc index ac02db7d..af6118a3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-312.pyc index d12842f7..8776a91f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-312.pyc index f6bb6956..7142faf8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-312.pyc index 6eb293d8..ebaa1c35 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-312.pyc index 07318225..d0afce5d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-312.pyc index 5967f6f6..e66424fd 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-312.pyc index 80f25486..721ee15a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc index b38fee5b..8d212c33 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-312.pyc index c835cdbd..bf37e769 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-312.pyc index 6b6103cd..13eb7681 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-312.pyc index 1e6de3b3..21c1688f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-312.pyc index a4253332..3a805339 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-312.pyc index 36c8143b..7b521d66 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-312.pyc index b18cfdca..23fe7cb8 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-312.pyc index bfb87ac5..7b57f306 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-312.pyc index 3c73e441..3d7741ac 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-312.pyc index f417655f..42358509 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-312.pyc index dcc8458f..0e486cf6 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-312.pyc index 94477306..286e8a8a 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-312.pyc index 254d2ca3..d3eaa37f 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-312.pyc index e6effebe..fdf64476 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-312.pyc index c1703e9e..a15ef655 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-312.pyc index f5e031a6..0f0bf695 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-312.pyc index 1187ad13..445fef98 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-312.pyc index e958696e..c1b42a11 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-312.pyc index 509e5aeb..66834f2e 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc index e9ac15a7..a4092b88 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-312.pyc index 617df44f..9c1a2fd7 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-312.pyc index f9d99c4b..302d209d 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-312.pyc index 31915db9..b0bd2987 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-312.pyc index 1fd540d3..cde806a3 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/METADATA new file mode 100644 index 00000000..12345f88 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/METADATA @@ -0,0 +1,152 @@ +Metadata-Version: 2.4 +Name: pluggy +Version: 1.6.0 +Summary: plugin and hook calling mechanisms for python +Author-email: Holger Krekel +License: MIT +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Topic :: Software Development :: Testing +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Provides-Extra: dev +Requires-Dist: pre-commit; extra == "dev" +Requires-Dist: tox; extra == "dev" +Provides-Extra: testing +Requires-Dist: pytest; extra == "testing" +Requires-Dist: pytest-benchmark; extra == "testing" +Requires-Dist: coverage; extra == "testing" +Dynamic: license-file + +==================================================== +pluggy - A minimalist production ready plugin system +==================================================== + +|pypi| |conda-forge| |versions| |github-actions| |gitter| |black| |codecov| + +This is the core framework used by the `pytest`_, `tox`_, and `devpi`_ projects. + +Please `read the docs`_ to learn more! + +A definitive example +==================== +.. code-block:: python + + import pluggy + + hookspec = pluggy.HookspecMarker("myproject") + hookimpl = pluggy.HookimplMarker("myproject") + + + class MySpec: + """A hook specification namespace.""" + + @hookspec + def myhook(self, arg1, arg2): + """My special little hook that you can customize.""" + + + class Plugin_1: + """A hook implementation namespace.""" + + @hookimpl + def myhook(self, arg1, arg2): + print("inside Plugin_1.myhook()") + return arg1 + arg2 + + + class Plugin_2: + """A 2nd hook implementation namespace.""" + + @hookimpl + def myhook(self, arg1, arg2): + print("inside Plugin_2.myhook()") + return arg1 - arg2 + + + # create a manager and add the spec + pm = pluggy.PluginManager("myproject") + pm.add_hookspecs(MySpec) + + # register plugins + pm.register(Plugin_1()) + pm.register(Plugin_2()) + + # call our ``myhook`` hook + results = pm.hook.myhook(arg1=1, arg2=2) + print(results) + + +Running this directly gets us:: + + $ python docs/examples/toy-example.py + inside Plugin_2.myhook() + inside Plugin_1.myhook() + [-1, 3] + + +.. badges + +.. |pypi| image:: https://img.shields.io/pypi/v/pluggy.svg + :target: https://pypi.org/pypi/pluggy + +.. |versions| image:: https://img.shields.io/pypi/pyversions/pluggy.svg + :target: https://pypi.org/pypi/pluggy + +.. |github-actions| image:: https://github.com/pytest-dev/pluggy/workflows/main/badge.svg + :target: https://github.com/pytest-dev/pluggy/actions + +.. |conda-forge| image:: https://img.shields.io/conda/vn/conda-forge/pluggy.svg + :target: https://anaconda.org/conda-forge/pytest + +.. |gitter| image:: https://badges.gitter.im/pytest-dev/pluggy.svg + :alt: Join the chat at https://gitter.im/pytest-dev/pluggy + :target: https://gitter.im/pytest-dev/pluggy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +.. |black| image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + +.. |codecov| image:: https://codecov.io/gh/pytest-dev/pluggy/branch/master/graph/badge.svg + :target: https://codecov.io/gh/pytest-dev/pluggy + :alt: Code coverage Status + +.. links +.. _pytest: + http://pytest.org +.. _tox: + https://tox.readthedocs.org +.. _devpi: + http://doc.devpi.net +.. _read the docs: + https://pluggy.readthedocs.io/en/latest/ + + +Support pluggy +-------------- + +`Open Collective`_ is an online funding platform for open and transparent communities. +It provides tools to raise money and share your finances in full transparency. + +It is the platform of choice for individuals and companies that want to make one-time or +monthly donations directly to the project. + +``pluggy`` is part of the ``pytest-dev`` project, see more details in the `pytest collective`_. + +.. _Open Collective: https://opencollective.com +.. _pytest collective: https://opencollective.com/pytest diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/RECORD new file mode 100644 index 00000000..fa76f4af --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/RECORD @@ -0,0 +1,23 @@ +pluggy-1.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pluggy-1.6.0.dist-info/METADATA,sha256=dDjDXuJaCV63QW-EtGHC10Qlxec0rVTDkSRTxlJE4Bw,4811 +pluggy-1.6.0.dist-info/RECORD,, +pluggy-1.6.0.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91 +pluggy-1.6.0.dist-info/licenses/LICENSE,sha256=1rZebCE6XQtXeRHTTW5ZSbn1nXbCOMUHGi8_wWz7JgY,1110 +pluggy-1.6.0.dist-info/top_level.txt,sha256=xKSCRhai-v9MckvMuWqNz16c1tbsmOggoMSwTgcpYHE,7 +pluggy/__init__.py,sha256=D6dp1gmEDjtDp8hAwQc-qrgaulnL4iltrqkLDd-g9tg,811 +pluggy/__pycache__/__init__.cpython-312.pyc,, +pluggy/__pycache__/_callers.cpython-312.pyc,, +pluggy/__pycache__/_hooks.cpython-312.pyc,, +pluggy/__pycache__/_manager.cpython-312.pyc,, +pluggy/__pycache__/_result.cpython-312.pyc,, +pluggy/__pycache__/_tracing.cpython-312.pyc,, +pluggy/__pycache__/_version.cpython-312.pyc,, +pluggy/__pycache__/_warnings.cpython-312.pyc,, +pluggy/_callers.py,sha256=gEZllGaSYVssZ2UmpNfmYC0bdVgh2jYbAFeYKvuRMjY,5991 +pluggy/_hooks.py,sha256=E6f3nYcI6dbEuO0Gmy61ozgGU_59_e69kC08a06EBuo,25218 +pluggy/_manager.py,sha256=K4Ip_pkEjvT2oOIfQPp8CwAWoXVnENgQRcy9tlGii0o,20219 +pluggy/_result.py,sha256=3Xfy7DrjXbYb7puRquyY2VbidIWNq6Pp7QnuElMdj8Q,3098 +pluggy/_tracing.py,sha256=nXd2BCmDgf8jJxV-HO3PqxR-WV53eWnF8B4AF1nJGgo,2073 +pluggy/_version.py,sha256=5FGJNp9Lkk9uOxeCjXpoCGBF79Ar6LGPOR7-atBqb_4,511 +pluggy/_warnings.py,sha256=td0AvZBpfamriCC3OqsLwxMh-SzAMjfjmc58T5vP3lw,828 +pluggy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/WHEEL new file mode 100644 index 00000000..e9653ae0 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.7.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/licenses/LICENSE b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/licenses/LICENSE new file mode 100644 index 00000000..85f4dd63 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 holger krekel (rather uses bitbucket/hpk42) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/top_level.txt new file mode 100644 index 00000000..11bdb5c1 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy-1.6.0.dist-info/top_level.txt @@ -0,0 +1 @@ +pluggy diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__init__.py b/Backend/venv/lib/python3.12/site-packages/pluggy/__init__.py new file mode 100644 index 00000000..8a651f49 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy/__init__.py @@ -0,0 +1,30 @@ +__all__ = [ + "__version__", + "PluginManager", + "PluginValidationError", + "HookCaller", + "HookCallError", + "HookspecOpts", + "HookimplOpts", + "HookImpl", + "HookRelay", + "HookspecMarker", + "HookimplMarker", + "Result", + "PluggyWarning", + "PluggyTeardownRaisedWarning", +] +from ._hooks import HookCaller +from ._hooks import HookImpl +from ._hooks import HookimplMarker +from ._hooks import HookimplOpts +from ._hooks import HookRelay +from ._hooks import HookspecMarker +from ._hooks import HookspecOpts +from ._manager import PluginManager +from ._manager import PluginValidationError +from ._result import HookCallError +from ._result import Result +from ._version import version as __version__ +from ._warnings import PluggyTeardownRaisedWarning +from ._warnings import PluggyWarning diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..e61f81b7 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_callers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_callers.cpython-312.pyc new file mode 100644 index 00000000..850e1dd2 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_callers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_hooks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_hooks.cpython-312.pyc new file mode 100644 index 00000000..7597b72e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_hooks.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_manager.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_manager.cpython-312.pyc new file mode 100644 index 00000000..675a6b35 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_manager.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_result.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_result.cpython-312.pyc new file mode 100644 index 00000000..9e32453b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_result.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_tracing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_tracing.cpython-312.pyc new file mode 100644 index 00000000..bb57ea56 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_tracing.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_version.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_version.cpython-312.pyc new file mode 100644 index 00000000..481ac832 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_version.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_warnings.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_warnings.cpython-312.pyc new file mode 100644 index 00000000..81e3da7f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pluggy/__pycache__/_warnings.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/_callers.py b/Backend/venv/lib/python3.12/site-packages/pluggy/_callers.py new file mode 100644 index 00000000..472d5dd0 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy/_callers.py @@ -0,0 +1,169 @@ +""" +Call loop machinery +""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import Sequence +from typing import cast +from typing import NoReturn +import warnings + +from ._hooks import HookImpl +from ._result import HookCallError +from ._result import Result +from ._warnings import PluggyTeardownRaisedWarning + + +# Need to distinguish between old- and new-style hook wrappers. +# Wrapping with a tuple is the fastest type-safe way I found to do it. +Teardown = Generator[None, object, object] + + +def run_old_style_hookwrapper( + hook_impl: HookImpl, hook_name: str, args: Sequence[object] +) -> Teardown: + """ + backward compatibility wrapper to run a old style hookwrapper as a wrapper + """ + + teardown: Teardown = cast(Teardown, hook_impl.function(*args)) + try: + next(teardown) + except StopIteration: + _raise_wrapfail(teardown, "did not yield") + try: + res = yield + result = Result(res, None) + except BaseException as exc: + result = Result(None, exc) + try: + teardown.send(result) + except StopIteration: + pass + except BaseException as e: + _warn_teardown_exception(hook_name, hook_impl, e) + raise + else: + _raise_wrapfail(teardown, "has second yield") + finally: + teardown.close() + return result.get_result() + + +def _raise_wrapfail( + wrap_controller: Generator[None, object, object], + msg: str, +) -> NoReturn: + co = wrap_controller.gi_code # type: ignore[attr-defined] + raise RuntimeError( + f"wrap_controller at {co.co_name!r} {co.co_filename}:{co.co_firstlineno} {msg}" + ) + + +def _warn_teardown_exception( + hook_name: str, hook_impl: HookImpl, e: BaseException +) -> None: + msg = "A plugin raised an exception during an old-style hookwrapper teardown.\n" + msg += f"Plugin: {hook_impl.plugin_name}, Hook: {hook_name}\n" + msg += f"{type(e).__name__}: {e}\n" + msg += "For more information see https://pluggy.readthedocs.io/en/stable/api_reference.html#pluggy.PluggyTeardownRaisedWarning" # noqa: E501 + warnings.warn(PluggyTeardownRaisedWarning(msg), stacklevel=6) + + +def _multicall( + hook_name: str, + hook_impls: Sequence[HookImpl], + caller_kwargs: Mapping[str, object], + firstresult: bool, +) -> object | list[object]: + """Execute a call into multiple python functions/methods and return the + result(s). + + ``caller_kwargs`` comes from HookCaller.__call__(). + """ + __tracebackhide__ = True + results: list[object] = [] + exception = None + try: # run impl and wrapper setup functions in a loop + teardowns: list[Teardown] = [] + try: + for hook_impl in reversed(hook_impls): + try: + args = [caller_kwargs[argname] for argname in hook_impl.argnames] + except KeyError as e: + # coverage bug - this is tested + for argname in hook_impl.argnames: # pragma: no cover + if argname not in caller_kwargs: + raise HookCallError( + f"hook call must provide argument {argname!r}" + ) from e + + if hook_impl.hookwrapper: + function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args) + + next(function_gen) # first yield + teardowns.append(function_gen) + + elif hook_impl.wrapper: + try: + # If this cast is not valid, a type error is raised below, + # which is the desired response. + res = hook_impl.function(*args) + function_gen = cast(Generator[None, object, object], res) + next(function_gen) # first yield + teardowns.append(function_gen) + except StopIteration: + _raise_wrapfail(function_gen, "did not yield") + else: + res = hook_impl.function(*args) + if res is not None: + results.append(res) + if firstresult: # halt further impl calls + break + except BaseException as exc: + exception = exc + finally: + if firstresult: # first result hooks return a single value + result = results[0] if results else None + else: + result = results + + # run all wrapper post-yield blocks + for teardown in reversed(teardowns): + try: + if exception is not None: + try: + teardown.throw(exception) + except RuntimeError as re: + # StopIteration from generator causes RuntimeError + # even for coroutine usage - see #544 + if ( + isinstance(exception, StopIteration) + and re.__cause__ is exception + ): + teardown.close() + continue + else: + raise + else: + teardown.send(result) + # Following is unreachable for a well behaved hook wrapper. + # Try to force finalizers otherwise postponed till GC action. + # Note: close() may raise if generator handles GeneratorExit. + teardown.close() + except StopIteration as si: + result = si.value + exception = None + continue + except BaseException as e: + exception = e + continue + _raise_wrapfail(teardown, "has second yield") + + if exception is not None: + raise exception + else: + return result diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/_hooks.py b/Backend/venv/lib/python3.12/site-packages/pluggy/_hooks.py new file mode 100644 index 00000000..97fef0d7 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy/_hooks.py @@ -0,0 +1,714 @@ +""" +Internal hook annotation, representation and calling machinery. +""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import Sequence +from collections.abc import Set +import inspect +import sys +from types import ModuleType +from typing import Any +from typing import Callable +from typing import Final +from typing import final +from typing import Optional +from typing import overload +from typing import TYPE_CHECKING +from typing import TypedDict +from typing import TypeVar +from typing import Union +import warnings + +from ._result import Result + + +_T = TypeVar("_T") +_F = TypeVar("_F", bound=Callable[..., object]) +_Namespace = Union[ModuleType, type] +_Plugin = object +_HookExec = Callable[ + [str, Sequence["HookImpl"], Mapping[str, object], bool], + Union[object, list[object]], +] +_HookImplFunction = Callable[..., Union[_T, Generator[None, Result[_T], None]]] + + +class HookspecOpts(TypedDict): + """Options for a hook specification.""" + + #: Whether the hook is :ref:`first result only `. + firstresult: bool + #: Whether the hook is :ref:`historic `. + historic: bool + #: Whether the hook :ref:`warns when implemented `. + warn_on_impl: Warning | None + #: Whether the hook warns when :ref:`certain arguments are requested + #: `. + #: + #: .. versionadded:: 1.5 + warn_on_impl_args: Mapping[str, Warning] | None + + +class HookimplOpts(TypedDict): + """Options for a hook implementation.""" + + #: Whether the hook implementation is a :ref:`wrapper `. + wrapper: bool + #: Whether the hook implementation is an :ref:`old-style wrapper + #: `. + hookwrapper: bool + #: Whether validation against a hook specification is :ref:`optional + #: `. + optionalhook: bool + #: Whether to try to order this hook implementation :ref:`first + #: `. + tryfirst: bool + #: Whether to try to order this hook implementation :ref:`last + #: `. + trylast: bool + #: The name of the hook specification to match, see :ref:`specname`. + specname: str | None + + +@final +class HookspecMarker: + """Decorator for marking functions as hook specifications. + + Instantiate it with a project_name to get a decorator. + Calling :meth:`PluginManager.add_hookspecs` later will discover all marked + functions if the :class:`PluginManager` uses the same project name. + """ + + __slots__ = ("project_name",) + + def __init__(self, project_name: str) -> None: + self.project_name: Final = project_name + + @overload + def __call__( + self, + function: _F, + firstresult: bool = False, + historic: bool = False, + warn_on_impl: Warning | None = None, + warn_on_impl_args: Mapping[str, Warning] | None = None, + ) -> _F: ... + + @overload # noqa: F811 + def __call__( # noqa: F811 + self, + function: None = ..., + firstresult: bool = ..., + historic: bool = ..., + warn_on_impl: Warning | None = ..., + warn_on_impl_args: Mapping[str, Warning] | None = ..., + ) -> Callable[[_F], _F]: ... + + def __call__( # noqa: F811 + self, + function: _F | None = None, + firstresult: bool = False, + historic: bool = False, + warn_on_impl: Warning | None = None, + warn_on_impl_args: Mapping[str, Warning] | None = None, + ) -> _F | Callable[[_F], _F]: + """If passed a function, directly sets attributes on the function + which will make it discoverable to :meth:`PluginManager.add_hookspecs`. + + If passed no function, returns a decorator which can be applied to a + function later using the attributes supplied. + + :param firstresult: + If ``True``, the 1:N hook call (N being the number of registered + hook implementation functions) will stop at I<=N when the I'th + function returns a non-``None`` result. See :ref:`firstresult`. + + :param historic: + If ``True``, every call to the hook will be memorized and replayed + on plugins registered after the call was made. See :ref:`historic`. + + :param warn_on_impl: + If given, every implementation of this hook will trigger the given + warning. See :ref:`warn_on_impl`. + + :param warn_on_impl_args: + If given, every implementation of this hook which requests one of + the arguments in the dict will trigger the corresponding warning. + See :ref:`warn_on_impl`. + + .. versionadded:: 1.5 + """ + + def setattr_hookspec_opts(func: _F) -> _F: + if historic and firstresult: + raise ValueError("cannot have a historic firstresult hook") + opts: HookspecOpts = { + "firstresult": firstresult, + "historic": historic, + "warn_on_impl": warn_on_impl, + "warn_on_impl_args": warn_on_impl_args, + } + setattr(func, self.project_name + "_spec", opts) + return func + + if function is not None: + return setattr_hookspec_opts(function) + else: + return setattr_hookspec_opts + + +@final +class HookimplMarker: + """Decorator for marking functions as hook implementations. + + Instantiate it with a ``project_name`` to get a decorator. + Calling :meth:`PluginManager.register` later will discover all marked + functions if the :class:`PluginManager` uses the same project name. + """ + + __slots__ = ("project_name",) + + def __init__(self, project_name: str) -> None: + self.project_name: Final = project_name + + @overload + def __call__( + self, + function: _F, + hookwrapper: bool = ..., + optionalhook: bool = ..., + tryfirst: bool = ..., + trylast: bool = ..., + specname: str | None = ..., + wrapper: bool = ..., + ) -> _F: ... + + @overload # noqa: F811 + def __call__( # noqa: F811 + self, + function: None = ..., + hookwrapper: bool = ..., + optionalhook: bool = ..., + tryfirst: bool = ..., + trylast: bool = ..., + specname: str | None = ..., + wrapper: bool = ..., + ) -> Callable[[_F], _F]: ... + + def __call__( # noqa: F811 + self, + function: _F | None = None, + hookwrapper: bool = False, + optionalhook: bool = False, + tryfirst: bool = False, + trylast: bool = False, + specname: str | None = None, + wrapper: bool = False, + ) -> _F | Callable[[_F], _F]: + """If passed a function, directly sets attributes on the function + which will make it discoverable to :meth:`PluginManager.register`. + + If passed no function, returns a decorator which can be applied to a + function later using the attributes supplied. + + :param optionalhook: + If ``True``, a missing matching hook specification will not result + in an error (by default it is an error if no matching spec is + found). See :ref:`optionalhook`. + + :param tryfirst: + If ``True``, this hook implementation will run as early as possible + in the chain of N hook implementations for a specification. See + :ref:`callorder`. + + :param trylast: + If ``True``, this hook implementation will run as late as possible + in the chain of N hook implementations for a specification. See + :ref:`callorder`. + + :param wrapper: + If ``True`` ("new-style hook wrapper"), the hook implementation + needs to execute exactly one ``yield``. The code before the + ``yield`` is run early before any non-hook-wrapper function is run. + The code after the ``yield`` is run after all non-hook-wrapper + functions have run. The ``yield`` receives the result value of the + inner calls, or raises the exception of inner calls (including + earlier hook wrapper calls). The return value of the function + becomes the return value of the hook, and a raised exception becomes + the exception of the hook. See :ref:`hookwrapper`. + + :param hookwrapper: + If ``True`` ("old-style hook wrapper"), the hook implementation + needs to execute exactly one ``yield``. The code before the + ``yield`` is run early before any non-hook-wrapper function is run. + The code after the ``yield`` is run after all non-hook-wrapper + function have run The ``yield`` receives a :class:`Result` object + representing the exception or result outcome of the inner calls + (including earlier hook wrapper calls). This option is mutually + exclusive with ``wrapper``. See :ref:`old_style_hookwrapper`. + + :param specname: + If provided, the given name will be used instead of the function + name when matching this hook implementation to a hook specification + during registration. See :ref:`specname`. + + .. versionadded:: 1.2.0 + The ``wrapper`` parameter. + """ + + def setattr_hookimpl_opts(func: _F) -> _F: + opts: HookimplOpts = { + "wrapper": wrapper, + "hookwrapper": hookwrapper, + "optionalhook": optionalhook, + "tryfirst": tryfirst, + "trylast": trylast, + "specname": specname, + } + setattr(func, self.project_name + "_impl", opts) + return func + + if function is None: + return setattr_hookimpl_opts + else: + return setattr_hookimpl_opts(function) + + +def normalize_hookimpl_opts(opts: HookimplOpts) -> None: + opts.setdefault("tryfirst", False) + opts.setdefault("trylast", False) + opts.setdefault("wrapper", False) + opts.setdefault("hookwrapper", False) + opts.setdefault("optionalhook", False) + opts.setdefault("specname", None) + + +_PYPY = hasattr(sys, "pypy_version_info") + + +def varnames(func: object) -> tuple[tuple[str, ...], tuple[str, ...]]: + """Return tuple of positional and keywrord argument names for a function, + method, class or callable. + + In case of a class, its ``__init__`` method is considered. + For methods the ``self`` parameter is not included. + """ + if inspect.isclass(func): + try: + func = func.__init__ + except AttributeError: # pragma: no cover - pypy special case + return (), () + elif not inspect.isroutine(func): # callable object? + try: + func = getattr(func, "__call__", func) + except Exception: # pragma: no cover - pypy special case + return (), () + + try: + # func MUST be a function or method here or we won't parse any args. + sig = inspect.signature( + func.__func__ if inspect.ismethod(func) else func # type:ignore[arg-type] + ) + except TypeError: # pragma: no cover + return (), () + + _valid_param_kinds = ( + inspect.Parameter.POSITIONAL_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD, + ) + _valid_params = { + name: param + for name, param in sig.parameters.items() + if param.kind in _valid_param_kinds + } + args = tuple(_valid_params) + defaults = ( + tuple( + param.default + for param in _valid_params.values() + if param.default is not param.empty + ) + or None + ) + + if defaults: + index = -len(defaults) + args, kwargs = args[:index], tuple(args[index:]) + else: + kwargs = () + + # strip any implicit instance arg + # pypy3 uses "obj" instead of "self" for default dunder methods + if not _PYPY: + implicit_names: tuple[str, ...] = ("self",) + else: # pragma: no cover + implicit_names = ("self", "obj") + if args: + qualname: str = getattr(func, "__qualname__", "") + if inspect.ismethod(func) or ("." in qualname and args[0] in implicit_names): + args = args[1:] + + return args, kwargs + + +@final +class HookRelay: + """Hook holder object for performing 1:N hook calls where N is the number + of registered plugins.""" + + __slots__ = ("__dict__",) + + def __init__(self) -> None: + """:meta private:""" + + if TYPE_CHECKING: + + def __getattr__(self, name: str) -> HookCaller: ... + + +# Historical name (pluggy<=1.2), kept for backward compatibility. +_HookRelay = HookRelay + + +_CallHistory = list[tuple[Mapping[str, object], Optional[Callable[[Any], None]]]] + + +class HookCaller: + """A caller of all registered implementations of a hook specification.""" + + __slots__ = ( + "name", + "spec", + "_hookexec", + "_hookimpls", + "_call_history", + ) + + def __init__( + self, + name: str, + hook_execute: _HookExec, + specmodule_or_class: _Namespace | None = None, + spec_opts: HookspecOpts | None = None, + ) -> None: + """:meta private:""" + #: Name of the hook getting called. + self.name: Final = name + self._hookexec: Final = hook_execute + # The hookimpls list. The caller iterates it *in reverse*. Format: + # 1. trylast nonwrappers + # 2. nonwrappers + # 3. tryfirst nonwrappers + # 4. trylast wrappers + # 5. wrappers + # 6. tryfirst wrappers + self._hookimpls: Final[list[HookImpl]] = [] + self._call_history: _CallHistory | None = None + # TODO: Document, or make private. + self.spec: HookSpec | None = None + if specmodule_or_class is not None: + assert spec_opts is not None + self.set_specification(specmodule_or_class, spec_opts) + + # TODO: Document, or make private. + def has_spec(self) -> bool: + return self.spec is not None + + # TODO: Document, or make private. + def set_specification( + self, + specmodule_or_class: _Namespace, + spec_opts: HookspecOpts, + ) -> None: + if self.spec is not None: + raise ValueError( + f"Hook {self.spec.name!r} is already registered " + f"within namespace {self.spec.namespace}" + ) + self.spec = HookSpec(specmodule_or_class, self.name, spec_opts) + if spec_opts.get("historic"): + self._call_history = [] + + def is_historic(self) -> bool: + """Whether this caller is :ref:`historic `.""" + return self._call_history is not None + + def _remove_plugin(self, plugin: _Plugin) -> None: + for i, method in enumerate(self._hookimpls): + if method.plugin == plugin: + del self._hookimpls[i] + return + raise ValueError(f"plugin {plugin!r} not found") + + def get_hookimpls(self) -> list[HookImpl]: + """Get all registered hook implementations for this hook.""" + return self._hookimpls.copy() + + def _add_hookimpl(self, hookimpl: HookImpl) -> None: + """Add an implementation to the callback chain.""" + for i, method in enumerate(self._hookimpls): + if method.hookwrapper or method.wrapper: + splitpoint = i + break + else: + splitpoint = len(self._hookimpls) + if hookimpl.hookwrapper or hookimpl.wrapper: + start, end = splitpoint, len(self._hookimpls) + else: + start, end = 0, splitpoint + + if hookimpl.trylast: + self._hookimpls.insert(start, hookimpl) + elif hookimpl.tryfirst: + self._hookimpls.insert(end, hookimpl) + else: + # find last non-tryfirst method + i = end - 1 + while i >= start and self._hookimpls[i].tryfirst: + i -= 1 + self._hookimpls.insert(i + 1, hookimpl) + + def __repr__(self) -> str: + return f"" + + def _verify_all_args_are_provided(self, kwargs: Mapping[str, object]) -> None: + # This is written to avoid expensive operations when not needed. + if self.spec: + for argname in self.spec.argnames: + if argname not in kwargs: + notincall = ", ".join( + repr(argname) + for argname in self.spec.argnames + # Avoid self.spec.argnames - kwargs.keys() + # it doesn't preserve order. + if argname not in kwargs.keys() + ) + warnings.warn( + f"Argument(s) {notincall} which are declared in the hookspec " + "cannot be found in this hook call", + stacklevel=2, + ) + break + + def __call__(self, **kwargs: object) -> Any: + """Call the hook. + + Only accepts keyword arguments, which should match the hook + specification. + + Returns the result(s) of calling all registered plugins, see + :ref:`calling`. + """ + assert not self.is_historic(), ( + "Cannot directly call a historic hook - use call_historic instead." + ) + self._verify_all_args_are_provided(kwargs) + firstresult = self.spec.opts.get("firstresult", False) if self.spec else False + # Copy because plugins may register other plugins during iteration (#438). + return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) + + def call_historic( + self, + result_callback: Callable[[Any], None] | None = None, + kwargs: Mapping[str, object] | None = None, + ) -> None: + """Call the hook with given ``kwargs`` for all registered plugins and + for all plugins which will be registered afterwards, see + :ref:`historic`. + + :param result_callback: + If provided, will be called for each non-``None`` result obtained + from a hook implementation. + """ + assert self._call_history is not None + kwargs = kwargs or {} + self._verify_all_args_are_provided(kwargs) + self._call_history.append((kwargs, result_callback)) + # Historizing hooks don't return results. + # Remember firstresult isn't compatible with historic. + # Copy because plugins may register other plugins during iteration (#438). + res = self._hookexec(self.name, self._hookimpls.copy(), kwargs, False) + if result_callback is None: + return + if isinstance(res, list): + for x in res: + result_callback(x) + + def call_extra( + self, methods: Sequence[Callable[..., object]], kwargs: Mapping[str, object] + ) -> Any: + """Call the hook with some additional temporarily participating + methods using the specified ``kwargs`` as call parameters, see + :ref:`call_extra`.""" + assert not self.is_historic(), ( + "Cannot directly call a historic hook - use call_historic instead." + ) + self._verify_all_args_are_provided(kwargs) + opts: HookimplOpts = { + "wrapper": False, + "hookwrapper": False, + "optionalhook": False, + "trylast": False, + "tryfirst": False, + "specname": None, + } + hookimpls = self._hookimpls.copy() + for method in methods: + hookimpl = HookImpl(None, "", method, opts) + # Find last non-tryfirst nonwrapper method. + i = len(hookimpls) - 1 + while i >= 0 and ( + # Skip wrappers. + (hookimpls[i].hookwrapper or hookimpls[i].wrapper) + # Skip tryfirst nonwrappers. + or hookimpls[i].tryfirst + ): + i -= 1 + hookimpls.insert(i + 1, hookimpl) + firstresult = self.spec.opts.get("firstresult", False) if self.spec else False + return self._hookexec(self.name, hookimpls, kwargs, firstresult) + + def _maybe_apply_history(self, method: HookImpl) -> None: + """Apply call history to a new hookimpl if it is marked as historic.""" + if self.is_historic(): + assert self._call_history is not None + for kwargs, result_callback in self._call_history: + res = self._hookexec(self.name, [method], kwargs, False) + if res and result_callback is not None: + # XXX: remember firstresult isn't compat with historic + assert isinstance(res, list) + result_callback(res[0]) + + +# Historical name (pluggy<=1.2), kept for backward compatibility. +_HookCaller = HookCaller + + +class _SubsetHookCaller(HookCaller): + """A proxy to another HookCaller which manages calls to all registered + plugins except the ones from remove_plugins.""" + + # This class is unusual: in inhertits from `HookCaller` so all of + # the *code* runs in the class, but it delegates all underlying *data* + # to the original HookCaller. + # `subset_hook_caller` used to be implemented by creating a full-fledged + # HookCaller, copying all hookimpls from the original. This had problems + # with memory leaks (#346) and historic calls (#347), which make a proxy + # approach better. + # An alternative implementation is to use a `_getattr__`/`__getattribute__` + # proxy, however that adds more overhead and is more tricky to implement. + + __slots__ = ( + "_orig", + "_remove_plugins", + ) + + def __init__(self, orig: HookCaller, remove_plugins: Set[_Plugin]) -> None: + self._orig = orig + self._remove_plugins = remove_plugins + self.name = orig.name # type: ignore[misc] + self._hookexec = orig._hookexec # type: ignore[misc] + + @property # type: ignore[misc] + def _hookimpls(self) -> list[HookImpl]: + return [ + impl + for impl in self._orig._hookimpls + if impl.plugin not in self._remove_plugins + ] + + @property + def spec(self) -> HookSpec | None: # type: ignore[override] + return self._orig.spec + + @property + def _call_history(self) -> _CallHistory | None: # type: ignore[override] + return self._orig._call_history + + def __repr__(self) -> str: + return f"<_SubsetHookCaller {self.name!r}>" + + +@final +class HookImpl: + """A hook implementation in a :class:`HookCaller`.""" + + __slots__ = ( + "function", + "argnames", + "kwargnames", + "plugin", + "opts", + "plugin_name", + "wrapper", + "hookwrapper", + "optionalhook", + "tryfirst", + "trylast", + ) + + def __init__( + self, + plugin: _Plugin, + plugin_name: str, + function: _HookImplFunction[object], + hook_impl_opts: HookimplOpts, + ) -> None: + """:meta private:""" + #: The hook implementation function. + self.function: Final = function + argnames, kwargnames = varnames(self.function) + #: The positional parameter names of ``function```. + self.argnames: Final = argnames + #: The keyword parameter names of ``function```. + self.kwargnames: Final = kwargnames + #: The plugin which defined this hook implementation. + self.plugin: Final = plugin + #: The :class:`HookimplOpts` used to configure this hook implementation. + self.opts: Final = hook_impl_opts + #: The name of the plugin which defined this hook implementation. + self.plugin_name: Final = plugin_name + #: Whether the hook implementation is a :ref:`wrapper `. + self.wrapper: Final = hook_impl_opts["wrapper"] + #: Whether the hook implementation is an :ref:`old-style wrapper + #: `. + self.hookwrapper: Final = hook_impl_opts["hookwrapper"] + #: Whether validation against a hook specification is :ref:`optional + #: `. + self.optionalhook: Final = hook_impl_opts["optionalhook"] + #: Whether to try to order this hook implementation :ref:`first + #: `. + self.tryfirst: Final = hook_impl_opts["tryfirst"] + #: Whether to try to order this hook implementation :ref:`last + #: `. + self.trylast: Final = hook_impl_opts["trylast"] + + def __repr__(self) -> str: + return f"" + + +@final +class HookSpec: + __slots__ = ( + "namespace", + "function", + "name", + "argnames", + "kwargnames", + "opts", + "warn_on_impl", + "warn_on_impl_args", + ) + + def __init__(self, namespace: _Namespace, name: str, opts: HookspecOpts) -> None: + self.namespace = namespace + self.function: Callable[..., object] = getattr(namespace, name) + self.name = name + self.argnames, self.kwargnames = varnames(self.function) + self.opts = opts + self.warn_on_impl = opts.get("warn_on_impl") + self.warn_on_impl_args = opts.get("warn_on_impl_args") diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/_manager.py b/Backend/venv/lib/python3.12/site-packages/pluggy/_manager.py new file mode 100644 index 00000000..ff1e3ce6 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy/_manager.py @@ -0,0 +1,523 @@ +from __future__ import annotations + +from collections.abc import Iterable +from collections.abc import Mapping +from collections.abc import Sequence +import inspect +import types +from typing import Any +from typing import Callable +from typing import cast +from typing import Final +from typing import TYPE_CHECKING +import warnings + +from . import _tracing +from ._callers import _multicall +from ._hooks import _HookImplFunction +from ._hooks import _Namespace +from ._hooks import _Plugin +from ._hooks import _SubsetHookCaller +from ._hooks import HookCaller +from ._hooks import HookImpl +from ._hooks import HookimplOpts +from ._hooks import HookRelay +from ._hooks import HookspecOpts +from ._hooks import normalize_hookimpl_opts +from ._result import Result + + +if TYPE_CHECKING: + # importtlib.metadata import is slow, defer it. + import importlib.metadata + + +_BeforeTrace = Callable[[str, Sequence[HookImpl], Mapping[str, Any]], None] +_AfterTrace = Callable[[Result[Any], str, Sequence[HookImpl], Mapping[str, Any]], None] + + +def _warn_for_function(warning: Warning, function: Callable[..., object]) -> None: + func = cast(types.FunctionType, function) + warnings.warn_explicit( + warning, + type(warning), + lineno=func.__code__.co_firstlineno, + filename=func.__code__.co_filename, + ) + + +class PluginValidationError(Exception): + """Plugin failed validation. + + :param plugin: The plugin which failed validation. + :param message: Error message. + """ + + def __init__(self, plugin: _Plugin, message: str) -> None: + super().__init__(message) + #: The plugin which failed validation. + self.plugin = plugin + + +class DistFacade: + """Emulate a pkg_resources Distribution""" + + def __init__(self, dist: importlib.metadata.Distribution) -> None: + self._dist = dist + + @property + def project_name(self) -> str: + name: str = self.metadata["name"] + return name + + def __getattr__(self, attr: str, default: Any | None = None) -> Any: + return getattr(self._dist, attr, default) + + def __dir__(self) -> list[str]: + return sorted(dir(self._dist) + ["_dist", "project_name"]) + + +class PluginManager: + """Core class which manages registration of plugin objects and 1:N hook + calling. + + You can register new hooks by calling :meth:`add_hookspecs(module_or_class) + `. + + You can register plugin objects (which contain hook implementations) by + calling :meth:`register(plugin) `. + + For debugging purposes you can call :meth:`PluginManager.enable_tracing` + which will subsequently send debug information to the trace helper. + + :param project_name: + The short project name. Prefer snake case. Make sure it's unique! + """ + + def __init__(self, project_name: str) -> None: + #: The project name. + self.project_name: Final = project_name + self._name2plugin: Final[dict[str, _Plugin]] = {} + self._plugin_distinfo: Final[list[tuple[_Plugin, DistFacade]]] = [] + #: The "hook relay", used to call a hook on all registered plugins. + #: See :ref:`calling`. + self.hook: Final = HookRelay() + #: The tracing entry point. See :ref:`tracing`. + self.trace: Final[_tracing.TagTracerSub] = _tracing.TagTracer().get( + "pluginmanage" + ) + self._inner_hookexec = _multicall + + def _hookexec( + self, + hook_name: str, + methods: Sequence[HookImpl], + kwargs: Mapping[str, object], + firstresult: bool, + ) -> object | list[object]: + # called from all hookcaller instances. + # enable_tracing will set its own wrapping function at self._inner_hookexec + return self._inner_hookexec(hook_name, methods, kwargs, firstresult) + + def register(self, plugin: _Plugin, name: str | None = None) -> str | None: + """Register a plugin and return its name. + + :param name: + The name under which to register the plugin. If not specified, a + name is generated using :func:`get_canonical_name`. + + :returns: + The plugin name. If the name is blocked from registering, returns + ``None``. + + If the plugin is already registered, raises a :exc:`ValueError`. + """ + plugin_name = name or self.get_canonical_name(plugin) + + if plugin_name in self._name2plugin: + if self._name2plugin.get(plugin_name, -1) is None: + return None # blocked plugin, return None to indicate no registration + raise ValueError( + "Plugin name already registered: " + f"{plugin_name}={plugin}\n{self._name2plugin}" + ) + + if plugin in self._name2plugin.values(): + raise ValueError( + "Plugin already registered under a different name: " + f"{plugin_name}={plugin}\n{self._name2plugin}" + ) + + # XXX if an error happens we should make sure no state has been + # changed at point of return + self._name2plugin[plugin_name] = plugin + + # register matching hook implementations of the plugin + for name in dir(plugin): + hookimpl_opts = self.parse_hookimpl_opts(plugin, name) + if hookimpl_opts is not None: + normalize_hookimpl_opts(hookimpl_opts) + method: _HookImplFunction[object] = getattr(plugin, name) + hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts) + name = hookimpl_opts.get("specname") or name + hook: HookCaller | None = getattr(self.hook, name, None) + if hook is None: + hook = HookCaller(name, self._hookexec) + setattr(self.hook, name, hook) + elif hook.has_spec(): + self._verify_hook(hook, hookimpl) + hook._maybe_apply_history(hookimpl) + hook._add_hookimpl(hookimpl) + return plugin_name + + def parse_hookimpl_opts(self, plugin: _Plugin, name: str) -> HookimplOpts | None: + """Try to obtain a hook implementation from an item with the given name + in the given plugin which is being searched for hook impls. + + :returns: + The parsed hookimpl options, or None to skip the given item. + + This method can be overridden by ``PluginManager`` subclasses to + customize how hook implementation are picked up. By default, returns the + options for items decorated with :class:`HookimplMarker`. + """ + method: object = getattr(plugin, name) + if not inspect.isroutine(method): + return None + try: + res: HookimplOpts | None = getattr( + method, self.project_name + "_impl", None + ) + except Exception: # pragma: no cover + res = {} # type: ignore[assignment] #pragma: no cover + if res is not None and not isinstance(res, dict): + # false positive + res = None # type:ignore[unreachable] #pragma: no cover + return res + + def unregister( + self, plugin: _Plugin | None = None, name: str | None = None + ) -> Any | None: + """Unregister a plugin and all of its hook implementations. + + The plugin can be specified either by the plugin object or the plugin + name. If both are specified, they must agree. + + Returns the unregistered plugin, or ``None`` if not found. + """ + if name is None: + assert plugin is not None, "one of name or plugin needs to be specified" + name = self.get_name(plugin) + assert name is not None, "plugin is not registered" + + if plugin is None: + plugin = self.get_plugin(name) + if plugin is None: + return None + + hookcallers = self.get_hookcallers(plugin) + if hookcallers: + for hookcaller in hookcallers: + hookcaller._remove_plugin(plugin) + + # if self._name2plugin[name] == None registration was blocked: ignore + if self._name2plugin.get(name): + assert name is not None + del self._name2plugin[name] + + return plugin + + def set_blocked(self, name: str) -> None: + """Block registrations of the given name, unregister if already registered.""" + self.unregister(name=name) + self._name2plugin[name] = None + + def is_blocked(self, name: str) -> bool: + """Return whether the given plugin name is blocked.""" + return name in self._name2plugin and self._name2plugin[name] is None + + def unblock(self, name: str) -> bool: + """Unblocks a name. + + Returns whether the name was actually blocked. + """ + if self._name2plugin.get(name, -1) is None: + del self._name2plugin[name] + return True + return False + + def add_hookspecs(self, module_or_class: _Namespace) -> None: + """Add new hook specifications defined in the given ``module_or_class``. + + Functions are recognized as hook specifications if they have been + decorated with a matching :class:`HookspecMarker`. + """ + names = [] + for name in dir(module_or_class): + spec_opts = self.parse_hookspec_opts(module_or_class, name) + if spec_opts is not None: + hc: HookCaller | None = getattr(self.hook, name, None) + if hc is None: + hc = HookCaller(name, self._hookexec, module_or_class, spec_opts) + setattr(self.hook, name, hc) + else: + # Plugins registered this hook without knowing the spec. + hc.set_specification(module_or_class, spec_opts) + for hookfunction in hc.get_hookimpls(): + self._verify_hook(hc, hookfunction) + names.append(name) + + if not names: + raise ValueError( + f"did not find any {self.project_name!r} hooks in {module_or_class!r}" + ) + + def parse_hookspec_opts( + self, module_or_class: _Namespace, name: str + ) -> HookspecOpts | None: + """Try to obtain a hook specification from an item with the given name + in the given module or class which is being searched for hook specs. + + :returns: + The parsed hookspec options for defining a hook, or None to skip the + given item. + + This method can be overridden by ``PluginManager`` subclasses to + customize how hook specifications are picked up. By default, returns the + options for items decorated with :class:`HookspecMarker`. + """ + method = getattr(module_or_class, name) + opts: HookspecOpts | None = getattr(method, self.project_name + "_spec", None) + return opts + + def get_plugins(self) -> set[Any]: + """Return a set of all registered plugin objects.""" + return {x for x in self._name2plugin.values() if x is not None} + + def is_registered(self, plugin: _Plugin) -> bool: + """Return whether the plugin is already registered.""" + return any(plugin == val for val in self._name2plugin.values()) + + def get_canonical_name(self, plugin: _Plugin) -> str: + """Return a canonical name for a plugin object. + + Note that a plugin may be registered under a different name + specified by the caller of :meth:`register(plugin, name) `. + To obtain the name of a registered plugin use :meth:`get_name(plugin) + ` instead. + """ + name: str | None = getattr(plugin, "__name__", None) + return name or str(id(plugin)) + + def get_plugin(self, name: str) -> Any | None: + """Return the plugin registered under the given name, if any.""" + return self._name2plugin.get(name) + + def has_plugin(self, name: str) -> bool: + """Return whether a plugin with the given name is registered.""" + return self.get_plugin(name) is not None + + def get_name(self, plugin: _Plugin) -> str | None: + """Return the name the plugin is registered under, or ``None`` if + is isn't.""" + for name, val in self._name2plugin.items(): + if plugin == val: + return name + return None + + def _verify_hook(self, hook: HookCaller, hookimpl: HookImpl) -> None: + if hook.is_historic() and (hookimpl.hookwrapper or hookimpl.wrapper): + raise PluginValidationError( + hookimpl.plugin, + f"Plugin {hookimpl.plugin_name!r}\nhook {hook.name!r}\n" + "historic incompatible with yield/wrapper/hookwrapper", + ) + + assert hook.spec is not None + if hook.spec.warn_on_impl: + _warn_for_function(hook.spec.warn_on_impl, hookimpl.function) + + # positional arg checking + notinspec = set(hookimpl.argnames) - set(hook.spec.argnames) + if notinspec: + raise PluginValidationError( + hookimpl.plugin, + f"Plugin {hookimpl.plugin_name!r} for hook {hook.name!r}\n" + f"hookimpl definition: {_formatdef(hookimpl.function)}\n" + f"Argument(s) {notinspec} are declared in the hookimpl but " + "can not be found in the hookspec", + ) + + if hook.spec.warn_on_impl_args: + for hookimpl_argname in hookimpl.argnames: + argname_warning = hook.spec.warn_on_impl_args.get(hookimpl_argname) + if argname_warning is not None: + _warn_for_function(argname_warning, hookimpl.function) + + if ( + hookimpl.wrapper or hookimpl.hookwrapper + ) and not inspect.isgeneratorfunction(hookimpl.function): + raise PluginValidationError( + hookimpl.plugin, + f"Plugin {hookimpl.plugin_name!r} for hook {hook.name!r}\n" + f"hookimpl definition: {_formatdef(hookimpl.function)}\n" + "Declared as wrapper=True or hookwrapper=True " + "but function is not a generator function", + ) + + if hookimpl.wrapper and hookimpl.hookwrapper: + raise PluginValidationError( + hookimpl.plugin, + f"Plugin {hookimpl.plugin_name!r} for hook {hook.name!r}\n" + f"hookimpl definition: {_formatdef(hookimpl.function)}\n" + "The wrapper=True and hookwrapper=True options are mutually exclusive", + ) + + def check_pending(self) -> None: + """Verify that all hooks which have not been verified against a + hook specification are optional, otherwise raise + :exc:`PluginValidationError`.""" + for name in self.hook.__dict__: + if name[0] == "_": + continue + hook: HookCaller = getattr(self.hook, name) + if not hook.has_spec(): + for hookimpl in hook.get_hookimpls(): + if not hookimpl.optionalhook: + raise PluginValidationError( + hookimpl.plugin, + f"unknown hook {name!r} in plugin {hookimpl.plugin!r}", + ) + + def load_setuptools_entrypoints(self, group: str, name: str | None = None) -> int: + """Load modules from querying the specified setuptools ``group``. + + :param group: + Entry point group to load plugins. + :param name: + If given, loads only plugins with the given ``name``. + + :return: + The number of plugins loaded by this call. + """ + import importlib.metadata + + count = 0 + for dist in list(importlib.metadata.distributions()): + for ep in dist.entry_points: + if ( + ep.group != group + or (name is not None and ep.name != name) + # already registered + or self.get_plugin(ep.name) + or self.is_blocked(ep.name) + ): + continue + plugin = ep.load() + self.register(plugin, name=ep.name) + self._plugin_distinfo.append((plugin, DistFacade(dist))) + count += 1 + return count + + def list_plugin_distinfo(self) -> list[tuple[_Plugin, DistFacade]]: + """Return a list of (plugin, distinfo) pairs for all + setuptools-registered plugins.""" + return list(self._plugin_distinfo) + + def list_name_plugin(self) -> list[tuple[str, _Plugin]]: + """Return a list of (name, plugin) pairs for all registered plugins.""" + return list(self._name2plugin.items()) + + def get_hookcallers(self, plugin: _Plugin) -> list[HookCaller] | None: + """Get all hook callers for the specified plugin. + + :returns: + The hook callers, or ``None`` if ``plugin`` is not registered in + this plugin manager. + """ + if self.get_name(plugin) is None: + return None + hookcallers = [] + for hookcaller in self.hook.__dict__.values(): + for hookimpl in hookcaller.get_hookimpls(): + if hookimpl.plugin is plugin: + hookcallers.append(hookcaller) + return hookcallers + + def add_hookcall_monitoring( + self, before: _BeforeTrace, after: _AfterTrace + ) -> Callable[[], None]: + """Add before/after tracing functions for all hooks. + + Returns an undo function which, when called, removes the added tracers. + + ``before(hook_name, hook_impls, kwargs)`` will be called ahead + of all hook calls and receive a hookcaller instance, a list + of HookImpl instances and the keyword arguments for the hook call. + + ``after(outcome, hook_name, hook_impls, kwargs)`` receives the + same arguments as ``before`` but also a :class:`~pluggy.Result` object + which represents the result of the overall hook call. + """ + oldcall = self._inner_hookexec + + def traced_hookexec( + hook_name: str, + hook_impls: Sequence[HookImpl], + caller_kwargs: Mapping[str, object], + firstresult: bool, + ) -> object | list[object]: + before(hook_name, hook_impls, caller_kwargs) + outcome = Result.from_call( + lambda: oldcall(hook_name, hook_impls, caller_kwargs, firstresult) + ) + after(outcome, hook_name, hook_impls, caller_kwargs) + return outcome.get_result() + + self._inner_hookexec = traced_hookexec + + def undo() -> None: + self._inner_hookexec = oldcall + + return undo + + def enable_tracing(self) -> Callable[[], None]: + """Enable tracing of hook calls. + + Returns an undo function which, when called, removes the added tracing. + """ + hooktrace = self.trace.root.get("hook") + + def before( + hook_name: str, methods: Sequence[HookImpl], kwargs: Mapping[str, object] + ) -> None: + hooktrace.root.indent += 1 + hooktrace(hook_name, kwargs) + + def after( + outcome: Result[object], + hook_name: str, + methods: Sequence[HookImpl], + kwargs: Mapping[str, object], + ) -> None: + if outcome.exception is None: + hooktrace("finish", hook_name, "-->", outcome.get_result()) + hooktrace.root.indent -= 1 + + return self.add_hookcall_monitoring(before, after) + + def subset_hook_caller( + self, name: str, remove_plugins: Iterable[_Plugin] + ) -> HookCaller: + """Return a proxy :class:`~pluggy.HookCaller` instance for the named + method which manages calls to all registered plugins except the ones + from remove_plugins.""" + orig: HookCaller = getattr(self.hook, name) + plugins_to_remove = {plug for plug in remove_plugins if hasattr(plug, name)} + if plugins_to_remove: + return _SubsetHookCaller(orig, plugins_to_remove) + return orig + + +def _formatdef(func: Callable[..., object]) -> str: + return f"{func.__name__}{inspect.signature(func)}" diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/_result.py b/Backend/venv/lib/python3.12/site-packages/pluggy/_result.py new file mode 100644 index 00000000..656a5841 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy/_result.py @@ -0,0 +1,107 @@ +""" +Hook wrapper "result" utilities. +""" + +from __future__ import annotations + +from types import TracebackType +from typing import Callable +from typing import cast +from typing import final +from typing import Generic +from typing import Optional +from typing import TypeVar + + +_ExcInfo = tuple[type[BaseException], BaseException, Optional[TracebackType]] +ResultType = TypeVar("ResultType") + + +class HookCallError(Exception): + """Hook was called incorrectly.""" + + +@final +class Result(Generic[ResultType]): + """An object used to inspect and set the result in a :ref:`hook wrapper + `.""" + + __slots__ = ("_result", "_exception", "_traceback") + + def __init__( + self, + result: ResultType | None, + exception: BaseException | None, + ) -> None: + """:meta private:""" + self._result = result + self._exception = exception + # Exception __traceback__ is mutable, this keeps the original. + self._traceback = exception.__traceback__ if exception is not None else None + + @property + def excinfo(self) -> _ExcInfo | None: + """:meta private:""" + exc = self._exception + if exc is None: + return None + else: + return (type(exc), exc, self._traceback) + + @property + def exception(self) -> BaseException | None: + """:meta private:""" + return self._exception + + @classmethod + def from_call(cls, func: Callable[[], ResultType]) -> Result[ResultType]: + """:meta private:""" + __tracebackhide__ = True + result = exception = None + try: + result = func() + except BaseException as exc: + exception = exc + return cls(result, exception) + + def force_result(self, result: ResultType) -> None: + """Force the result(s) to ``result``. + + If the hook was marked as a ``firstresult`` a single value should + be set, otherwise set a (modified) list of results. Any exceptions + found during invocation will be deleted. + + This overrides any previous result or exception. + """ + self._result = result + self._exception = None + self._traceback = None + + def force_exception(self, exception: BaseException) -> None: + """Force the result to fail with ``exception``. + + This overrides any previous result or exception. + + .. versionadded:: 1.1.0 + """ + self._result = None + self._exception = exception + self._traceback = exception.__traceback__ if exception is not None else None + + def get_result(self) -> ResultType: + """Get the result(s) for this hook call. + + If the hook was marked as a ``firstresult`` only a single value + will be returned, otherwise a list of results. + """ + __tracebackhide__ = True + exc = self._exception + tb = self._traceback + if exc is None: + return cast(ResultType, self._result) + else: + raise exc.with_traceback(tb) + + +# Historical name (pluggy<=1.2), kept for backward compatibility. +_Result = Result diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/_tracing.py b/Backend/venv/lib/python3.12/site-packages/pluggy/_tracing.py new file mode 100644 index 00000000..f0b36db1 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy/_tracing.py @@ -0,0 +1,72 @@ +""" +Tracing utils +""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import Any +from typing import Callable + + +_Writer = Callable[[str], object] +_Processor = Callable[[tuple[str, ...], tuple[Any, ...]], object] + + +class TagTracer: + def __init__(self) -> None: + self._tags2proc: dict[tuple[str, ...], _Processor] = {} + self._writer: _Writer | None = None + self.indent = 0 + + def get(self, name: str) -> TagTracerSub: + return TagTracerSub(self, (name,)) + + def _format_message(self, tags: Sequence[str], args: Sequence[object]) -> str: + if isinstance(args[-1], dict): + extra = args[-1] + args = args[:-1] + else: + extra = {} + + content = " ".join(map(str, args)) + indent = " " * self.indent + + lines = ["{}{} [{}]\n".format(indent, content, ":".join(tags))] + + for name, value in extra.items(): + lines.append(f"{indent} {name}: {value}\n") + + return "".join(lines) + + def _processmessage(self, tags: tuple[str, ...], args: tuple[object, ...]) -> None: + if self._writer is not None and args: + self._writer(self._format_message(tags, args)) + try: + processor = self._tags2proc[tags] + except KeyError: + pass + else: + processor(tags, args) + + def setwriter(self, writer: _Writer | None) -> None: + self._writer = writer + + def setprocessor(self, tags: str | tuple[str, ...], processor: _Processor) -> None: + if isinstance(tags, str): + tags = tuple(tags.split(":")) + else: + assert isinstance(tags, tuple) + self._tags2proc[tags] = processor + + +class TagTracerSub: + def __init__(self, root: TagTracer, tags: tuple[str, ...]) -> None: + self.root = root + self.tags = tags + + def __call__(self, *args: object) -> None: + self.root._processmessage(self.tags, args) + + def get(self, name: str) -> TagTracerSub: + return self.__class__(self.root, self.tags + (name,)) diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/_version.py b/Backend/venv/lib/python3.12/site-packages/pluggy/_version.py new file mode 100644 index 00000000..6b8420c0 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy/_version.py @@ -0,0 +1,21 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '1.6.0' +__version_tuple__ = version_tuple = (1, 6, 0) diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/_warnings.py b/Backend/venv/lib/python3.12/site-packages/pluggy/_warnings.py new file mode 100644 index 00000000..6356c770 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pluggy/_warnings.py @@ -0,0 +1,27 @@ +from typing import final + + +class PluggyWarning(UserWarning): + """Base class for all warnings emitted by pluggy.""" + + __module__ = "pluggy" + + +@final +class PluggyTeardownRaisedWarning(PluggyWarning): + """A plugin raised an exception during an :ref:`old-style hookwrapper + ` teardown. + + Such exceptions are not handled by pluggy, and may cause subsequent + teardowns to be executed at unexpected times, or be skipped entirely. + + This is an issue in the plugin implementation. + + If the exception is unintended, fix the underlying cause. + + If the exception is intended, switch to :ref:`new-style hook wrappers + `, or use :func:`result.force_exception() + ` to set the exception instead of raising. + """ + + __module__ = "pluggy" diff --git a/Backend/venv/lib/python3.12/site-packages/pluggy/py.typed b/Backend/venv/lib/python3.12/site-packages/pluggy/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Backend/venv/lib/python3.12/site-packages/py.py b/Backend/venv/lib/python3.12/site-packages/py.py new file mode 100644 index 00000000..5c661e66 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/py.py @@ -0,0 +1,15 @@ +# shim for pylib going away +# if pylib is installed this file will get skipped +# (`py/__init__.py` has higher precedence) +from __future__ import annotations + +import sys + +import _pytest._py.error as error +import _pytest._py.path as path + + +sys.modules["py.error"] = error +sys.modules["py.path"] = path + +__all__ = ["error", "path"] diff --git a/Backend/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_validators.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_validators.cpython-312.pyc index c41c311e..99005068 100644 Binary files a/Backend/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_validators.cpython-312.pyc and b/Backend/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_validators.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/METADATA new file mode 100644 index 00000000..2eff6a0c --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/METADATA @@ -0,0 +1,58 @@ +Metadata-Version: 2.4 +Name: Pygments +Version: 2.19.2 +Summary: Pygments is a syntax highlighting package written in Python. +Project-URL: Homepage, https://pygments.org +Project-URL: Documentation, https://pygments.org/docs +Project-URL: Source, https://github.com/pygments/pygments +Project-URL: Bug Tracker, https://github.com/pygments/pygments/issues +Project-URL: Changelog, https://github.com/pygments/pygments/blob/master/CHANGES +Author-email: Georg Brandl +Maintainer: Matthäus G. Chajdas +Maintainer-email: Georg Brandl , Jean Abou Samra +License: BSD-2-Clause +License-File: AUTHORS +License-File: LICENSE +Keywords: syntax highlighting +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: End Users/Desktop +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Text Processing :: Filters +Classifier: Topic :: Utilities +Requires-Python: >=3.8 +Provides-Extra: plugins +Provides-Extra: windows-terminal +Requires-Dist: colorama>=0.4.6; extra == 'windows-terminal' +Description-Content-Type: text/x-rst + +Pygments +~~~~~~~~ + +Pygments is a syntax highlighting package written in Python. + +It is a generic syntax highlighter suitable for use in code hosting, forums, +wikis or other applications that need to prettify source code. Highlights +are: + +* a wide range of over 500 languages and other text formats is supported +* special attention is paid to details, increasing quality by a fair amount +* support for new languages and formats are added easily +* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image + formats that PIL supports and ANSI sequences +* it is usable as a command-line tool and as a library + +Copyright 2006-2025 by the Pygments team, see ``AUTHORS``. +Licensed under the BSD, see ``LICENSE`` for details. diff --git a/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/RECORD new file mode 100644 index 00000000..f4d51844 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/RECORD @@ -0,0 +1,684 @@ +../../../bin/pygmentize,sha256=ghpZ_TR_z61Et86zpsyj0Putnh2sCbRNRxBuDwvzQhI,227 +pygments-2.19.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pygments-2.19.2.dist-info/METADATA,sha256=euEA1n1nAGxkeYA92DX89HqbWfrHlEQeqOZqp_WYTYI,2512 +pygments-2.19.2.dist-info/RECORD,, +pygments-2.19.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +pygments-2.19.2.dist-info/entry_points.txt,sha256=uUXw-XhMKBEX4pWcCtpuTTnPhL3h7OEE2jWi51VQsa8,53 +pygments-2.19.2.dist-info/licenses/AUTHORS,sha256=BmDjGKbyFYAq3Icxq4XQxl_yfPzKP10oWX8wZHYZW9k,10824 +pygments-2.19.2.dist-info/licenses/LICENSE,sha256=qdZvHVJt8C4p3Oc0NtNOVuhjL0bCdbvf_HBWnogvnxc,1331 +pygments/__init__.py,sha256=_3UT86TGpHuW8FekdZ8uLidEZH1NhmcLiOy2KKNPCt4,2959 +pygments/__main__.py,sha256=p8AJyoyCOMYGvzWHdnq0_A9qaaVqaj02nIu3xhJp1_4,348 +pygments/__pycache__/__init__.cpython-312.pyc,, +pygments/__pycache__/__main__.cpython-312.pyc,, +pygments/__pycache__/cmdline.cpython-312.pyc,, +pygments/__pycache__/console.cpython-312.pyc,, +pygments/__pycache__/filter.cpython-312.pyc,, +pygments/__pycache__/formatter.cpython-312.pyc,, +pygments/__pycache__/lexer.cpython-312.pyc,, +pygments/__pycache__/modeline.cpython-312.pyc,, +pygments/__pycache__/plugin.cpython-312.pyc,, +pygments/__pycache__/regexopt.cpython-312.pyc,, +pygments/__pycache__/scanner.cpython-312.pyc,, +pygments/__pycache__/sphinxext.cpython-312.pyc,, +pygments/__pycache__/style.cpython-312.pyc,, +pygments/__pycache__/token.cpython-312.pyc,, +pygments/__pycache__/unistring.cpython-312.pyc,, +pygments/__pycache__/util.cpython-312.pyc,, +pygments/cmdline.py,sha256=4pL9Kpn2PUEKPobgrsQgg-vCx2NjsrapKzQ6LxQR7Q0,23536 +pygments/console.py,sha256=AagDWqwea2yBWf10KC9ptBgMpMjxKp8yABAmh-NQOVk,1718 +pygments/filter.py,sha256=YLtpTnZiu07nY3oK9nfR6E9Y1FBHhP5PX8gvkJWcfag,1910 +pygments/filters/__init__.py,sha256=B00KqPCQh5E0XhzaDK74Qa1E4fDSTlD6b0Pvr1v-vEQ,40344 +pygments/filters/__pycache__/__init__.cpython-312.pyc,, +pygments/formatter.py,sha256=H_4J-moKkKfRWUOW9J0u7hhw6n1LiO-2Xu1q2B0sE5w,4366 +pygments/formatters/__init__.py,sha256=7OuvmoYLyoPzoOQV_brHG8GSKYB_wjFSkAQng6x2y9g,5349 +pygments/formatters/__pycache__/__init__.cpython-312.pyc,, +pygments/formatters/__pycache__/_mapping.cpython-312.pyc,, +pygments/formatters/__pycache__/bbcode.cpython-312.pyc,, +pygments/formatters/__pycache__/groff.cpython-312.pyc,, +pygments/formatters/__pycache__/html.cpython-312.pyc,, +pygments/formatters/__pycache__/img.cpython-312.pyc,, +pygments/formatters/__pycache__/irc.cpython-312.pyc,, +pygments/formatters/__pycache__/latex.cpython-312.pyc,, +pygments/formatters/__pycache__/other.cpython-312.pyc,, +pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc,, +pygments/formatters/__pycache__/rtf.cpython-312.pyc,, +pygments/formatters/__pycache__/svg.cpython-312.pyc,, +pygments/formatters/__pycache__/terminal.cpython-312.pyc,, +pygments/formatters/__pycache__/terminal256.cpython-312.pyc,, +pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176 +pygments/formatters/bbcode.py,sha256=s0Ka35OKuIchoSgEAGf6rj0rl2a9ym9L31JVNSRbZFQ,3296 +pygments/formatters/groff.py,sha256=pLcIHj4jJS_lRAVFnyJODKDu1Xlyl9_AEIdOtbl3DT0,5082 +pygments/formatters/html.py,sha256=FrHJ69FUliEyPY0zTfab0C1gPf7LXsKgeRlhwkniqIs,35953 +pygments/formatters/img.py,sha256=aRpFo8mBmWTL3sBUjRCWkeS3rc6FZrSFC4EksDrl53g,23301 +pygments/formatters/irc.py,sha256=R0Js0TYWySlI2yE9sW6tN4d4X-x3k9ZmudsijGPnLmU,4945 +pygments/formatters/latex.py,sha256=BRYtbLeW_YD1kwhhnFInhJIKylurnri8CF1lP069KWE,19258 +pygments/formatters/other.py,sha256=8pYW27sU_7XicLUqOEt2yWSO0h1IEUM3TIv34KODLwo,4986 +pygments/formatters/pangomarkup.py,sha256=pcFvEC7K1Me0EjGeOZth4oCnEY85bfqc77XzZASEPpY,2206 +pygments/formatters/rtf.py,sha256=kcKMCxTXu-2-hpgEftlGJRm7Ss-yA_Sy8OsHH_qzykA,11921 +pygments/formatters/svg.py,sha256=R6A2ME6JsMQWFiyn8wcKwFUOD6vsu-HLwiIztLu-77E,7138 +pygments/formatters/terminal.py,sha256=J_F_dFXwR9LHWvatIDnwqRYJyjVmSo1Zx8K_XDh6SyM,4626 +pygments/formatters/terminal256.py,sha256=7GQFLE5cfmeu53CAzANO74-kBk2BFkXfn5phmZjYkhM,11717 +pygments/lexer.py,sha256=ib-F_0GxHkwGpb6vWP0DeLMLc7EYgjo3hWFKN5IgOq0,35109 +pygments/lexers/__init__.py,sha256=6YhzxGKlWk38P6JpIJUQ1rVvV0DEZjEmdYsdMQ58hSk,12067 +pygments/lexers/__pycache__/__init__.cpython-312.pyc,, +pygments/lexers/__pycache__/_ada_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_asy_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_cl_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_cocoa_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_csound_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_css_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_googlesql_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_julia_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_lasso_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_lilypond_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_lua_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_luau_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_mapping.cpython-312.pyc,, +pygments/lexers/__pycache__/_mql_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_mysql_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_openedge_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_php_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_postgres_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_qlik_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_scheme_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_scilab_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_sourcemod_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_sql_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_stan_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_stata_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_tsql_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_usd_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_vbscript_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/_vim_builtins.cpython-312.pyc,, +pygments/lexers/__pycache__/actionscript.cpython-312.pyc,, +pygments/lexers/__pycache__/ada.cpython-312.pyc,, +pygments/lexers/__pycache__/agile.cpython-312.pyc,, +pygments/lexers/__pycache__/algebra.cpython-312.pyc,, +pygments/lexers/__pycache__/ambient.cpython-312.pyc,, +pygments/lexers/__pycache__/amdgpu.cpython-312.pyc,, +pygments/lexers/__pycache__/ampl.cpython-312.pyc,, +pygments/lexers/__pycache__/apdlexer.cpython-312.pyc,, +pygments/lexers/__pycache__/apl.cpython-312.pyc,, +pygments/lexers/__pycache__/archetype.cpython-312.pyc,, +pygments/lexers/__pycache__/arrow.cpython-312.pyc,, +pygments/lexers/__pycache__/arturo.cpython-312.pyc,, +pygments/lexers/__pycache__/asc.cpython-312.pyc,, +pygments/lexers/__pycache__/asm.cpython-312.pyc,, +pygments/lexers/__pycache__/asn1.cpython-312.pyc,, +pygments/lexers/__pycache__/automation.cpython-312.pyc,, +pygments/lexers/__pycache__/bare.cpython-312.pyc,, +pygments/lexers/__pycache__/basic.cpython-312.pyc,, +pygments/lexers/__pycache__/bdd.cpython-312.pyc,, +pygments/lexers/__pycache__/berry.cpython-312.pyc,, +pygments/lexers/__pycache__/bibtex.cpython-312.pyc,, +pygments/lexers/__pycache__/blueprint.cpython-312.pyc,, +pygments/lexers/__pycache__/boa.cpython-312.pyc,, +pygments/lexers/__pycache__/bqn.cpython-312.pyc,, +pygments/lexers/__pycache__/business.cpython-312.pyc,, +pygments/lexers/__pycache__/c_cpp.cpython-312.pyc,, +pygments/lexers/__pycache__/c_like.cpython-312.pyc,, +pygments/lexers/__pycache__/capnproto.cpython-312.pyc,, +pygments/lexers/__pycache__/carbon.cpython-312.pyc,, +pygments/lexers/__pycache__/cddl.cpython-312.pyc,, +pygments/lexers/__pycache__/chapel.cpython-312.pyc,, +pygments/lexers/__pycache__/clean.cpython-312.pyc,, +pygments/lexers/__pycache__/codeql.cpython-312.pyc,, +pygments/lexers/__pycache__/comal.cpython-312.pyc,, +pygments/lexers/__pycache__/compiled.cpython-312.pyc,, +pygments/lexers/__pycache__/configs.cpython-312.pyc,, +pygments/lexers/__pycache__/console.cpython-312.pyc,, +pygments/lexers/__pycache__/cplint.cpython-312.pyc,, +pygments/lexers/__pycache__/crystal.cpython-312.pyc,, +pygments/lexers/__pycache__/csound.cpython-312.pyc,, +pygments/lexers/__pycache__/css.cpython-312.pyc,, +pygments/lexers/__pycache__/d.cpython-312.pyc,, +pygments/lexers/__pycache__/dalvik.cpython-312.pyc,, +pygments/lexers/__pycache__/data.cpython-312.pyc,, +pygments/lexers/__pycache__/dax.cpython-312.pyc,, +pygments/lexers/__pycache__/devicetree.cpython-312.pyc,, +pygments/lexers/__pycache__/diff.cpython-312.pyc,, +pygments/lexers/__pycache__/dns.cpython-312.pyc,, +pygments/lexers/__pycache__/dotnet.cpython-312.pyc,, +pygments/lexers/__pycache__/dsls.cpython-312.pyc,, +pygments/lexers/__pycache__/dylan.cpython-312.pyc,, +pygments/lexers/__pycache__/ecl.cpython-312.pyc,, +pygments/lexers/__pycache__/eiffel.cpython-312.pyc,, +pygments/lexers/__pycache__/elm.cpython-312.pyc,, +pygments/lexers/__pycache__/elpi.cpython-312.pyc,, +pygments/lexers/__pycache__/email.cpython-312.pyc,, +pygments/lexers/__pycache__/erlang.cpython-312.pyc,, +pygments/lexers/__pycache__/esoteric.cpython-312.pyc,, +pygments/lexers/__pycache__/ezhil.cpython-312.pyc,, +pygments/lexers/__pycache__/factor.cpython-312.pyc,, +pygments/lexers/__pycache__/fantom.cpython-312.pyc,, +pygments/lexers/__pycache__/felix.cpython-312.pyc,, +pygments/lexers/__pycache__/fift.cpython-312.pyc,, +pygments/lexers/__pycache__/floscript.cpython-312.pyc,, +pygments/lexers/__pycache__/forth.cpython-312.pyc,, +pygments/lexers/__pycache__/fortran.cpython-312.pyc,, +pygments/lexers/__pycache__/foxpro.cpython-312.pyc,, +pygments/lexers/__pycache__/freefem.cpython-312.pyc,, +pygments/lexers/__pycache__/func.cpython-312.pyc,, +pygments/lexers/__pycache__/functional.cpython-312.pyc,, +pygments/lexers/__pycache__/futhark.cpython-312.pyc,, +pygments/lexers/__pycache__/gcodelexer.cpython-312.pyc,, +pygments/lexers/__pycache__/gdscript.cpython-312.pyc,, +pygments/lexers/__pycache__/gleam.cpython-312.pyc,, +pygments/lexers/__pycache__/go.cpython-312.pyc,, +pygments/lexers/__pycache__/grammar_notation.cpython-312.pyc,, +pygments/lexers/__pycache__/graph.cpython-312.pyc,, +pygments/lexers/__pycache__/graphics.cpython-312.pyc,, +pygments/lexers/__pycache__/graphql.cpython-312.pyc,, +pygments/lexers/__pycache__/graphviz.cpython-312.pyc,, +pygments/lexers/__pycache__/gsql.cpython-312.pyc,, +pygments/lexers/__pycache__/hare.cpython-312.pyc,, +pygments/lexers/__pycache__/haskell.cpython-312.pyc,, +pygments/lexers/__pycache__/haxe.cpython-312.pyc,, +pygments/lexers/__pycache__/hdl.cpython-312.pyc,, +pygments/lexers/__pycache__/hexdump.cpython-312.pyc,, +pygments/lexers/__pycache__/html.cpython-312.pyc,, +pygments/lexers/__pycache__/idl.cpython-312.pyc,, +pygments/lexers/__pycache__/igor.cpython-312.pyc,, +pygments/lexers/__pycache__/inferno.cpython-312.pyc,, +pygments/lexers/__pycache__/installers.cpython-312.pyc,, +pygments/lexers/__pycache__/int_fiction.cpython-312.pyc,, +pygments/lexers/__pycache__/iolang.cpython-312.pyc,, +pygments/lexers/__pycache__/j.cpython-312.pyc,, +pygments/lexers/__pycache__/javascript.cpython-312.pyc,, +pygments/lexers/__pycache__/jmespath.cpython-312.pyc,, +pygments/lexers/__pycache__/jslt.cpython-312.pyc,, +pygments/lexers/__pycache__/json5.cpython-312.pyc,, +pygments/lexers/__pycache__/jsonnet.cpython-312.pyc,, +pygments/lexers/__pycache__/jsx.cpython-312.pyc,, +pygments/lexers/__pycache__/julia.cpython-312.pyc,, +pygments/lexers/__pycache__/jvm.cpython-312.pyc,, +pygments/lexers/__pycache__/kuin.cpython-312.pyc,, +pygments/lexers/__pycache__/kusto.cpython-312.pyc,, +pygments/lexers/__pycache__/ldap.cpython-312.pyc,, +pygments/lexers/__pycache__/lean.cpython-312.pyc,, +pygments/lexers/__pycache__/lilypond.cpython-312.pyc,, +pygments/lexers/__pycache__/lisp.cpython-312.pyc,, +pygments/lexers/__pycache__/macaulay2.cpython-312.pyc,, +pygments/lexers/__pycache__/make.cpython-312.pyc,, +pygments/lexers/__pycache__/maple.cpython-312.pyc,, +pygments/lexers/__pycache__/markup.cpython-312.pyc,, +pygments/lexers/__pycache__/math.cpython-312.pyc,, +pygments/lexers/__pycache__/matlab.cpython-312.pyc,, +pygments/lexers/__pycache__/maxima.cpython-312.pyc,, +pygments/lexers/__pycache__/meson.cpython-312.pyc,, +pygments/lexers/__pycache__/mime.cpython-312.pyc,, +pygments/lexers/__pycache__/minecraft.cpython-312.pyc,, +pygments/lexers/__pycache__/mips.cpython-312.pyc,, +pygments/lexers/__pycache__/ml.cpython-312.pyc,, +pygments/lexers/__pycache__/modeling.cpython-312.pyc,, +pygments/lexers/__pycache__/modula2.cpython-312.pyc,, +pygments/lexers/__pycache__/mojo.cpython-312.pyc,, +pygments/lexers/__pycache__/monte.cpython-312.pyc,, +pygments/lexers/__pycache__/mosel.cpython-312.pyc,, +pygments/lexers/__pycache__/ncl.cpython-312.pyc,, +pygments/lexers/__pycache__/nimrod.cpython-312.pyc,, +pygments/lexers/__pycache__/nit.cpython-312.pyc,, +pygments/lexers/__pycache__/nix.cpython-312.pyc,, +pygments/lexers/__pycache__/numbair.cpython-312.pyc,, +pygments/lexers/__pycache__/oberon.cpython-312.pyc,, +pygments/lexers/__pycache__/objective.cpython-312.pyc,, +pygments/lexers/__pycache__/ooc.cpython-312.pyc,, +pygments/lexers/__pycache__/openscad.cpython-312.pyc,, +pygments/lexers/__pycache__/other.cpython-312.pyc,, +pygments/lexers/__pycache__/parasail.cpython-312.pyc,, +pygments/lexers/__pycache__/parsers.cpython-312.pyc,, +pygments/lexers/__pycache__/pascal.cpython-312.pyc,, +pygments/lexers/__pycache__/pawn.cpython-312.pyc,, +pygments/lexers/__pycache__/pddl.cpython-312.pyc,, +pygments/lexers/__pycache__/perl.cpython-312.pyc,, +pygments/lexers/__pycache__/phix.cpython-312.pyc,, +pygments/lexers/__pycache__/php.cpython-312.pyc,, +pygments/lexers/__pycache__/pointless.cpython-312.pyc,, +pygments/lexers/__pycache__/pony.cpython-312.pyc,, +pygments/lexers/__pycache__/praat.cpython-312.pyc,, +pygments/lexers/__pycache__/procfile.cpython-312.pyc,, +pygments/lexers/__pycache__/prolog.cpython-312.pyc,, +pygments/lexers/__pycache__/promql.cpython-312.pyc,, +pygments/lexers/__pycache__/prql.cpython-312.pyc,, +pygments/lexers/__pycache__/ptx.cpython-312.pyc,, +pygments/lexers/__pycache__/python.cpython-312.pyc,, +pygments/lexers/__pycache__/q.cpython-312.pyc,, +pygments/lexers/__pycache__/qlik.cpython-312.pyc,, +pygments/lexers/__pycache__/qvt.cpython-312.pyc,, +pygments/lexers/__pycache__/r.cpython-312.pyc,, +pygments/lexers/__pycache__/rdf.cpython-312.pyc,, +pygments/lexers/__pycache__/rebol.cpython-312.pyc,, +pygments/lexers/__pycache__/rego.cpython-312.pyc,, +pygments/lexers/__pycache__/resource.cpython-312.pyc,, +pygments/lexers/__pycache__/ride.cpython-312.pyc,, +pygments/lexers/__pycache__/rita.cpython-312.pyc,, +pygments/lexers/__pycache__/rnc.cpython-312.pyc,, +pygments/lexers/__pycache__/roboconf.cpython-312.pyc,, +pygments/lexers/__pycache__/robotframework.cpython-312.pyc,, +pygments/lexers/__pycache__/ruby.cpython-312.pyc,, +pygments/lexers/__pycache__/rust.cpython-312.pyc,, +pygments/lexers/__pycache__/sas.cpython-312.pyc,, +pygments/lexers/__pycache__/savi.cpython-312.pyc,, +pygments/lexers/__pycache__/scdoc.cpython-312.pyc,, +pygments/lexers/__pycache__/scripting.cpython-312.pyc,, +pygments/lexers/__pycache__/sgf.cpython-312.pyc,, +pygments/lexers/__pycache__/shell.cpython-312.pyc,, +pygments/lexers/__pycache__/sieve.cpython-312.pyc,, +pygments/lexers/__pycache__/slash.cpython-312.pyc,, +pygments/lexers/__pycache__/smalltalk.cpython-312.pyc,, +pygments/lexers/__pycache__/smithy.cpython-312.pyc,, +pygments/lexers/__pycache__/smv.cpython-312.pyc,, +pygments/lexers/__pycache__/snobol.cpython-312.pyc,, +pygments/lexers/__pycache__/solidity.cpython-312.pyc,, +pygments/lexers/__pycache__/soong.cpython-312.pyc,, +pygments/lexers/__pycache__/sophia.cpython-312.pyc,, +pygments/lexers/__pycache__/special.cpython-312.pyc,, +pygments/lexers/__pycache__/spice.cpython-312.pyc,, +pygments/lexers/__pycache__/sql.cpython-312.pyc,, +pygments/lexers/__pycache__/srcinfo.cpython-312.pyc,, +pygments/lexers/__pycache__/stata.cpython-312.pyc,, +pygments/lexers/__pycache__/supercollider.cpython-312.pyc,, +pygments/lexers/__pycache__/tablegen.cpython-312.pyc,, +pygments/lexers/__pycache__/tact.cpython-312.pyc,, +pygments/lexers/__pycache__/tal.cpython-312.pyc,, +pygments/lexers/__pycache__/tcl.cpython-312.pyc,, +pygments/lexers/__pycache__/teal.cpython-312.pyc,, +pygments/lexers/__pycache__/templates.cpython-312.pyc,, +pygments/lexers/__pycache__/teraterm.cpython-312.pyc,, +pygments/lexers/__pycache__/testing.cpython-312.pyc,, +pygments/lexers/__pycache__/text.cpython-312.pyc,, +pygments/lexers/__pycache__/textedit.cpython-312.pyc,, +pygments/lexers/__pycache__/textfmts.cpython-312.pyc,, +pygments/lexers/__pycache__/theorem.cpython-312.pyc,, +pygments/lexers/__pycache__/thingsdb.cpython-312.pyc,, +pygments/lexers/__pycache__/tlb.cpython-312.pyc,, +pygments/lexers/__pycache__/tls.cpython-312.pyc,, +pygments/lexers/__pycache__/tnt.cpython-312.pyc,, +pygments/lexers/__pycache__/trafficscript.cpython-312.pyc,, +pygments/lexers/__pycache__/typoscript.cpython-312.pyc,, +pygments/lexers/__pycache__/typst.cpython-312.pyc,, +pygments/lexers/__pycache__/ul4.cpython-312.pyc,, +pygments/lexers/__pycache__/unicon.cpython-312.pyc,, +pygments/lexers/__pycache__/urbi.cpython-312.pyc,, +pygments/lexers/__pycache__/usd.cpython-312.pyc,, +pygments/lexers/__pycache__/varnish.cpython-312.pyc,, +pygments/lexers/__pycache__/verification.cpython-312.pyc,, +pygments/lexers/__pycache__/verifpal.cpython-312.pyc,, +pygments/lexers/__pycache__/vip.cpython-312.pyc,, +pygments/lexers/__pycache__/vyper.cpython-312.pyc,, +pygments/lexers/__pycache__/web.cpython-312.pyc,, +pygments/lexers/__pycache__/webassembly.cpython-312.pyc,, +pygments/lexers/__pycache__/webidl.cpython-312.pyc,, +pygments/lexers/__pycache__/webmisc.cpython-312.pyc,, +pygments/lexers/__pycache__/wgsl.cpython-312.pyc,, +pygments/lexers/__pycache__/whiley.cpython-312.pyc,, +pygments/lexers/__pycache__/wowtoc.cpython-312.pyc,, +pygments/lexers/__pycache__/wren.cpython-312.pyc,, +pygments/lexers/__pycache__/x10.cpython-312.pyc,, +pygments/lexers/__pycache__/xorg.cpython-312.pyc,, +pygments/lexers/__pycache__/yang.cpython-312.pyc,, +pygments/lexers/__pycache__/yara.cpython-312.pyc,, +pygments/lexers/__pycache__/zig.cpython-312.pyc,, +pygments/lexers/_ada_builtins.py,sha256=CA_OnShtdc7wWh9oYcRlcrkDAQwYUKl6w7tdSbALQd4,1543 +pygments/lexers/_asy_builtins.py,sha256=cd9M00YH19w5ZL7aqucmC3nwpJGTS04U-01NLy5E2_4,27287 +pygments/lexers/_cl_builtins.py,sha256=kQeUIyZjP4kX0frkICDcKxBYQCLqzIDXa5WV5cevhDo,13994 +pygments/lexers/_cocoa_builtins.py,sha256=Ka1lLJe7JfWtdho4IFIB82X9yBvrbfHCCmEG-peXXhQ,105173 +pygments/lexers/_csound_builtins.py,sha256=qnQYKeI26ZHim316uqy_hDiRiCoHo2RHjD3sYBALyXs,18414 +pygments/lexers/_css_builtins.py,sha256=aD-dhLFXVd1Atn_bZd7gEdQn7Mhe60_VHpvZ340WzDI,12446 +pygments/lexers/_googlesql_builtins.py,sha256=IkrOk-T2v1yzbGzUEEQh5_Cf4uC_cmL_uuhwDpZlTug,16132 +pygments/lexers/_julia_builtins.py,sha256=N2WdSw5zgI2fhDat_i4YeVqurRTC_P8x71ez00SCN6U,11883 +pygments/lexers/_lasso_builtins.py,sha256=8q1gbsrMJeaeUhxIYKhaOxC9j_B-NBpq_XFj2Ze41X0,134510 +pygments/lexers/_lilypond_builtins.py,sha256=XTbGL1z1oKMoqWLEktG33jx5GdGTI9CpeO5NheEi4Y0,108094 +pygments/lexers/_lua_builtins.py,sha256=PhFdZV5-Tzz2j_q4lvG9lr84ELGfL41BhnrSDNNTaG4,8108 +pygments/lexers/_luau_builtins.py,sha256=-IDrU04kUVfjXwSQzMMpXmMYhNsQxZVVZk8cuAA0Lo0,955 +pygments/lexers/_mapping.py,sha256=9fv7xYOUAOr6LzfdFS4MDbPu78o4OQQH-2nsI1bNZf4,70438 +pygments/lexers/_mql_builtins.py,sha256=ybRQjlb7Cul0sDstnzxJl3h0qS6Ieqsr811fqrxyumU,24713 +pygments/lexers/_mysql_builtins.py,sha256=y0kAWZVAs0z2dTFJJV42OZpILgRnd8T3zSlBFv-g_oA,25838 +pygments/lexers/_openedge_builtins.py,sha256=Sz4j9-CPWIaxMa-2fZgY66j7igcu1ob1GR2UtI8zAkg,49398 +pygments/lexers/_php_builtins.py,sha256=Jd4BZpjMDELPi4EVoSxK1-8BFTc63HUwYfm1rLrGj0M,107922 +pygments/lexers/_postgres_builtins.py,sha256=Pqh4z0RBRbnW6rCQtWUdzWCJxNyqpJ7_0HOktxHDxk4,13343 +pygments/lexers/_qlik_builtins.py,sha256=xuJy9c9uZDXv6h8z582P5PrxqkxTZ_nS8gPl9OD9VN8,12595 +pygments/lexers/_scheme_builtins.py,sha256=2hNtJOJmP21lUsikpqMJ2gAmLT3Rwn_KEeqhXwCjgfk,32564 +pygments/lexers/_scilab_builtins.py,sha256=oZYPB1XPdIEz3pII11pFDe6extRRyWGA7pY06X8KZ8w,52411 +pygments/lexers/_sourcemod_builtins.py,sha256=H8AFLsNDdEpymIWOpDwbDJGCP1w-x-1gSlzPDioMF4o,26777 +pygments/lexers/_sql_builtins.py,sha256=oe8F9wWuO2iS6nEsZAdJtCUChBTjgM1Sq_aipu74jXM,6767 +pygments/lexers/_stan_builtins.py,sha256=dwi1hllM_NsaCv-aXJy7lEi57X5Hh5gSD97aCQyT9KM,13445 +pygments/lexers/_stata_builtins.py,sha256=Hqrr6j77zWU3cGGpBPohwexZci43YA4_sVYE4E1sNow,27227 +pygments/lexers/_tsql_builtins.py,sha256=Pi2RhTXcLE3glI9oxNhyVsOMn-fK_1TRxJ-EsYP5LcI,15460 +pygments/lexers/_usd_builtins.py,sha256=c9hbU1cwqBUCFIhNfu_Dob8ywv1rlPhi9w2OTj3kR8s,1658 +pygments/lexers/_vbscript_builtins.py,sha256=MqJ2ABywD21aSRtWYZRG64CCbGstC1kfsiHGJmZzxiw,4225 +pygments/lexers/_vim_builtins.py,sha256=bA4mH8t1mPPQfEiUCKEqRO1O0rL2DUG0Ux1Bt8ZSu0E,57066 +pygments/lexers/actionscript.py,sha256=JBngCe5UhYT_0dLD2j7PnPO0xRRJhmypEuQ-C5in8pY,11727 +pygments/lexers/ada.py,sha256=58k5ra1vGS4iLpW3h1ItY9ftzF3WevaeAAXzAYTiYkQ,5353 +pygments/lexers/agile.py,sha256=DN-7AVIqtG1MshA94rtSGYI_884hVHgzq405wD0_dl8,896 +pygments/lexers/algebra.py,sha256=yGTu9Tt-cQzAISQYIC5MS5a3z4QmL-tGcXnd_pkWGbk,9952 +pygments/lexers/ambient.py,sha256=UnzKpIlfSm3iitHvMd7XTMSY8TjZYYhKOC3AiARS_cE,2605 +pygments/lexers/amdgpu.py,sha256=S8qjn2UMLhBFm3Yn_c06XAGf8cl5x_ZeluelWG_-JAw,1723 +pygments/lexers/ampl.py,sha256=ZBRfDXm760gR1a1gqItnsHuoO3JdUcTBjJ5tFY9UtPA,4176 +pygments/lexers/apdlexer.py,sha256=Zr5-jgjxC8PKzRlEeclakZXPHci7FHBZghQ6wwiuT7A,30800 +pygments/lexers/apl.py,sha256=PTQMp-bxT5P-DbrEvFha10HBTcsDJ5srL3I1s9ljz58,3404 +pygments/lexers/archetype.py,sha256=pQVlP1Fb5OA8nn7QwmFaaaOSvvpoIsQVw43FVCQCve4,11538 +pygments/lexers/arrow.py,sha256=2PKdbWq3xQLF1KoDbWvSxpjwKRrznnDiArTflRGZzBo,3564 +pygments/lexers/arturo.py,sha256=U5MtRNHJtnBn4ZOeWmW6MKlVRG7SX6KhTRamDqzn9tA,11414 +pygments/lexers/asc.py,sha256=-DgZl9jccBDHPlDmjCsrEqx0-Q7ap7XVdNKtxLNWG1w,1693 +pygments/lexers/asm.py,sha256=xm2Y5mcT-sF3oQvair4SWs9EWTyndoaUoSsDy5v6shI,41967 +pygments/lexers/asn1.py,sha256=BlcloIX2bu6Q7BxGcksuhYFHGsXLVKyB4B9mFd4Pj6E,4262 +pygments/lexers/automation.py,sha256=Q61qon8EwpfakMh_2MS2E2zUUT16rG3UNIKPYjITeTs,19831 +pygments/lexers/bare.py,sha256=tWoei86JJX1k-ADhaXd5TgX6ItDTici9yFWpkTPhnfM,3020 +pygments/lexers/basic.py,sha256=qpVe5h8Fa7NJo1EihN-4R_UZpHO6my2Ssgkb-BktkKs,27989 +pygments/lexers/bdd.py,sha256=yysefcOFAEyk9kJ2y4EXmzJTecgLYUHlWixt_3YzPMU,1641 +pygments/lexers/berry.py,sha256=zxGowFb8HMIyN15-m8nmWnW6bPRR4esKtSEVugc9uXM,3209 +pygments/lexers/bibtex.py,sha256=yuNoPxwrJf9DCGUT17hxfDzbq_HtCLkQkRbBtiTVmeQ,4811 +pygments/lexers/blueprint.py,sha256=NzvWHMxCLDWt8hc6gB5jokltxVJgNa7Jwh4c61ng388,6188 +pygments/lexers/boa.py,sha256=dOot1XWNZThPIio2UyAX67K6EpISjSRCFjotD7dcnwE,3921 +pygments/lexers/bqn.py,sha256=nJiwrPKKbRF-qdai5tfqipwBkkko2P3weiZAjHUMimY,3671 +pygments/lexers/business.py,sha256=lRtekOJfsDkb12AGbuz10-G67OJrVJgCBtihTQ8_aoY,28345 +pygments/lexers/c_cpp.py,sha256=D7ZIswaHASlGBgoTlwnSqTQHf8_JyvvSt2L2q1W-F6g,18059 +pygments/lexers/c_like.py,sha256=FTGp17ds6X2rDZOHup2hH6BEn3gKK4nLm9pydNEhm0E,32021 +pygments/lexers/capnproto.py,sha256=XQJAh1WS-0ulqbTn9TdzR6gEgWLcuBqb4sj3jNsrhsY,2174 +pygments/lexers/carbon.py,sha256=av12YuTGZGpOa1Cmxp3lppx3LfSJUWbvOu0ixmUVll0,3211 +pygments/lexers/cddl.py,sha256=MKa70IwABgjBjYu15_Q9v8rsu2sr1a-i2jkiaPTI6sM,5076 +pygments/lexers/chapel.py,sha256=0n_fL3ehLC4pw4YKnmq9jxIXOJcxGPka1Wr1t1zsXPc,5156 +pygments/lexers/clean.py,sha256=dkDPAwF5BTALPeuKFoRKOSD3RfsKcGWbaRo6_G8LHng,6418 +pygments/lexers/codeql.py,sha256=ebvghn2zbrnETV4buVozMDmRCVKSdGiIN8ycLlHpGsE,2576 +pygments/lexers/comal.py,sha256=TC3NzcJ58ew5jw7qwK0kJ-okTA47psZje0yAIS39HR4,3179 +pygments/lexers/compiled.py,sha256=Slfo1sjWqcPawUwf0dIIZLBCL5pkOIoAX2S8Lxs02Mc,1426 +pygments/lexers/configs.py,sha256=wW8pY0Sa5a10pnAeTLGf48HhixQTVageIyHEf1aYMCc,50913 +pygments/lexers/console.py,sha256=-jAG120dupvV3kG3zC70brLJvSLwTFqMubBQuj_GVnU,4180 +pygments/lexers/cplint.py,sha256=DkbyE5EKydLgf6BRr1FhQrK-IeQPL7Zmjk0DVdlRFnQ,1389 +pygments/lexers/crystal.py,sha256=xU-RnpIkpjrquoxtOuOcP8fcesSJl4xhU7kO9m42LZY,15754 +pygments/lexers/csound.py,sha256=ioSw4Q04wdwjUAbnTZ1qLhUq1vxdWFxhh3QtEl5RAJc,16998 +pygments/lexers/css.py,sha256=JN1RBYsee-jrpHWrSmhN3TKc4TkOBn-_BEGpgTCzcqE,25376 +pygments/lexers/d.py,sha256=piOy0EJeiAwPHugiM3gVv0z7HNh3u2gZQoCUSASRbY4,9920 +pygments/lexers/dalvik.py,sha256=deFg2JPBktJ9mEGb9EgxNkmd6vaMjJFQVzUHo8NKIa8,4606 +pygments/lexers/data.py,sha256=o0x0SmB5ms_CPUPljEEEenOON4IQWn86DkwFjkJYCOg,27026 +pygments/lexers/dax.py,sha256=ASi73qmr7OA7cVZXF2GTYGt01Ly1vY8CgD_Pnpm8k-4,8098 +pygments/lexers/devicetree.py,sha256=RecSQCidt8DRE1QFCPUbwwR0hiRlNtsFihdGldeUn3k,4019 +pygments/lexers/diff.py,sha256=F6vxZ64wm5Nag_97de1H_3F700ZwCVnYjKvtT5jilww,5382 +pygments/lexers/dns.py,sha256=Hh5hJ7MXfrq36KgfyIRwK3X8o1LdR98IKERcV4eZ7HY,3891 +pygments/lexers/dotnet.py,sha256=NDE0kOmpe96GLO-zwNLazmj77E9ORGmKpa4ZMCXDXxQ,39441 +pygments/lexers/dsls.py,sha256=GnHKhGL5GxsRFnqC7-65NTPZLOZdmnllNrGP86x_fQE,36746 +pygments/lexers/dylan.py,sha256=7zZ1EbHWXeVHqTD36AqykKqo3fhuIh4sM-whcxUaH_Y,10409 +pygments/lexers/ecl.py,sha256=vhmpa2LBrHxsPkYcf3kPZ1ItVaLRDTebi186wY0xGZA,6371 +pygments/lexers/eiffel.py,sha256=5ydYIEFcgcMoEj4BlK31hZ0aJb8OX0RdAvuCNdlxwqw,2690 +pygments/lexers/elm.py,sha256=uRCddU8jK5vVkH6Y66y8KOsDJprIfrOgeYq3hv1PxAM,3152 +pygments/lexers/elpi.py,sha256=O9j_WKBPyvNFjCRuPciVpW4etVSnILm_T79BhCPZYmo,6877 +pygments/lexers/email.py,sha256=ZZL6yvwCRl1CEQyysuOu0lbabp5tjMutS7f3efFKGR4,4804 +pygments/lexers/erlang.py,sha256=bU11eVHvooLwmVknzN6Xkb2DMk7HbenqdNlYSzhThDM,19147 +pygments/lexers/esoteric.py,sha256=Jfp8UUKyKYsqLaqXRZT3GSM9dzkF65zduwfnH1GoGhU,10500 +pygments/lexers/ezhil.py,sha256=22r-xjvvBVpExTqCI-HycAwunDb1p5gY4tIfDmM0vDw,3272 +pygments/lexers/factor.py,sha256=urZ4En4uKFCLXdEkXLWg9EYUFGHQTTDCwNXtyq-ngok,19530 +pygments/lexers/fantom.py,sha256=JJ13-NwykD-iIESnuzCefCYeQDO95cHMJA8TasF4gHA,10231 +pygments/lexers/felix.py,sha256=F-v0si4zPtRelqzDQWXI1-tarCE-BvawziODxRU7378,9655 +pygments/lexers/fift.py,sha256=rOCwp3v5ocK5YOWvt7Td3Md--97_8e-7Sonx52uS8mA,1644 +pygments/lexers/floscript.py,sha256=aHh82k52jMuDuzl9LatrcSANJiXTCyjGU3SO53bwbb0,2667 +pygments/lexers/forth.py,sha256=ZMtsHdNbnS_0IdSYlfAlfTSPEr0MEsRo-YZriQNueTQ,7193 +pygments/lexers/fortran.py,sha256=1PE5dTxf4Df6LUeXFcmNtyeXWsC8tSiK5dYwPHIJeeQ,10382 +pygments/lexers/foxpro.py,sha256=CBkW62Fuibz3yfyelZCaEO8GGdFJWsuRhqwtsSeBwLM,26295 +pygments/lexers/freefem.py,sha256=LFBQk-m1-nNCgrl-VDH3QwnVWurvb7W29i06LoT207A,26913 +pygments/lexers/func.py,sha256=OR2rkM7gf9fKvad5WcFQln-_U_pb-RUCM9eQatToF4A,3700 +pygments/lexers/functional.py,sha256=fYT2AGZ642cRkIAId0rnXFBsx1c8LLEDRN_VuCEkUyM,693 +pygments/lexers/futhark.py,sha256=Vf1i4t-tR3zqaktVjhTzFNg_ts_9CcyA4ZDfDizbCmk,3743 +pygments/lexers/gcodelexer.py,sha256=4Xs9ax4-JZGupW_qSnHon39wQGpb-tNA3xorMKg841E,874 +pygments/lexers/gdscript.py,sha256=Ws7JKxy0M0IyZ_1iMfRvJPrizEwmeCNLDoeMIFaM-CU,7566 +pygments/lexers/gleam.py,sha256=XIlTcq6cB743pCqbNYo8PocSkjZyDPR6hHgdaJNJ1Vc,2392 +pygments/lexers/go.py,sha256=4LezefgyuqZWHzLZHieUkKTi-ssY6aHJxx7Z-LFaLK0,3783 +pygments/lexers/grammar_notation.py,sha256=LvzhRQHgwZzq9oceukZS_hwnKK58ee7Z5d0cwXOR734,8043 +pygments/lexers/graph.py,sha256=WFqoPA1c_hHYrV0i_F7-eUw3Co4_HmZY3GJ-TyDr670,4108 +pygments/lexers/graphics.py,sha256=tmF9NNALnvPnax8ywYC3pLOla45YXtp9UA0H-5EiTQY,39145 +pygments/lexers/graphql.py,sha256=O_zcrGrBaDaKTlUoJGRruxqk7CJi-NR92Y0Cs-KkCvw,5601 +pygments/lexers/graphviz.py,sha256=mzdXOMpwz9_V-be1eTAMyhkKCBl6UxCIXuq6C2yrtsw,1934 +pygments/lexers/gsql.py,sha256=VPZk9sb26-DumRkWfEaSTeoc0lx5xt5n-6eDDLezMtc,3990 +pygments/lexers/hare.py,sha256=PGCOuILktJsmtTpCZZKkMFtObfJuBpei8HM8HHuq1Tw,2649 +pygments/lexers/haskell.py,sha256=MYr74-PAC8kGJRX-dZmvZsHTc7a2u6yFS2B19LfDD7g,33262 +pygments/lexers/haxe.py,sha256=WHCy_nrXHnfLITfbdp3Ji3lqQU4HAsTUpXsLCp2_4sk,30974 +pygments/lexers/hdl.py,sha256=MOWxhmAuE4Ei0CKDqqaON7T8tl43geancrNYM136Z0U,22738 +pygments/lexers/hexdump.py,sha256=1lj9oJ-KiZXSVYvTMfGmEAQzNEW08WlMcC2I5aYvHK4,3653 +pygments/lexers/html.py,sha256=MxYTI4EeT7QxoGleCAyQq-8n_Sgly6tD95H5zanCNmk,21977 +pygments/lexers/idl.py,sha256=rcihUAGhfuGEaSW6pgFq6NzplT_pv0DagUoefg4zAmk,15449 +pygments/lexers/igor.py,sha256=wVefbUjb3ftaW3LCKGtX1JgLgiY4EmRor5gVOn8vQA8,31633 +pygments/lexers/inferno.py,sha256=ChE_5y5SLH_75Uv7D2dKWQMk2dlN6z1gY1IDjlJZ8rU,3135 +pygments/lexers/installers.py,sha256=ZHliit4Pxz1tYKOIjKkDXI5djTkpzYUMVIPR1xvUrL8,14435 +pygments/lexers/int_fiction.py,sha256=0ZzIa1sZDUQsltd1oHuS-BoNiOF8zKQfcVuDyK1Ttv8,56544 +pygments/lexers/iolang.py,sha256=L6dNDCLH0kxkIUi00fI4Z14QnRu79UcNDrgv02c5Zw8,1905 +pygments/lexers/j.py,sha256=DqNdwQGFLiZW3mCNLRg81gpmsy4Hgcai_9NP3LbWhNU,4853 +pygments/lexers/javascript.py,sha256=TGKQLSrCprCKfhLLGAq_0EOdvqvJKX9pOdKo7tCRurQ,63243 +pygments/lexers/jmespath.py,sha256=R5yA5LJ2nTIaDwnFIpSNGAThd0sAYFccwawA9xBptlg,2082 +pygments/lexers/jslt.py,sha256=OeYQf8O2_9FCaf9W6Q3a7rPdAFLthePCtVSgCrOTcl8,3700 +pygments/lexers/json5.py,sha256=8JZbc8EiTEZdKaIdQg3hXEh0mHWSzPlwd473a0nUuT0,2502 +pygments/lexers/jsonnet.py,sha256=bx2G6J4tJqGrJV1PyZrIWzWHXcoefCX-4lIxxtbn2gw,5636 +pygments/lexers/jsx.py,sha256=wGsoGSB40qAJrVfXwRPtan7OcK0O87RVsHHk0m6gogk,2693 +pygments/lexers/julia.py,sha256=0ZDJ9X83V5GqJzA6T6p0TTN8WHy2JAjvu-FSBXvfXdc,11710 +pygments/lexers/jvm.py,sha256=Yt1iQ3QodXRY-x_HUOGedhyuBBHn5jYH-I8NzOzHTlE,72667 +pygments/lexers/kuin.py,sha256=3dKKJVJlskgrvMKv2tY9NOsFfDjyo-3MLcJ1lFKdXSg,11405 +pygments/lexers/kusto.py,sha256=kaxkoPpEBDsBTCvCOkZZx7oGfv0jk_UNIRIRbfVAsBE,3477 +pygments/lexers/ldap.py,sha256=77vF4t_19x9V522cxRCM5d3HW8Ne3giYsFsMPVYYBw4,6551 +pygments/lexers/lean.py,sha256=7HWRgxFsxS1N9XKqw0vfKwaxl27s5YiVYtZeRUoTHFo,8570 +pygments/lexers/lilypond.py,sha256=yd2Tuv67um6EyCIr-VwBnlPhTHxMaQsBJ4nGgO5fjIk,9752 +pygments/lexers/lisp.py,sha256=EHUy1g4pzEsYPE-zGj2rAXm3YATE1j9dCQOr5-JPSkU,157668 +pygments/lexers/macaulay2.py,sha256=zkV-vxjQYa0Jj9TGfFP1iMgpTZ4ApQuAAIdJVGWb2is,33366 +pygments/lexers/make.py,sha256=YMI5DBCrxWca-pz9cVXcyfuHLcikPx9R_3pW_98Myqo,7831 +pygments/lexers/maple.py,sha256=Rs0dEmOMD3C1YQPd0mntN-vzReq4XfHegH6xV4lvJWo,7960 +pygments/lexers/markup.py,sha256=zWtxsyIx_1OxQzS6wLe8bEqglePv4RqvJjbia8AvV5c,65088 +pygments/lexers/math.py,sha256=P3ZK1ePd8ZnLdlmHezo2irCA8T2-nlHBoSaBoT5mEVI,695 +pygments/lexers/matlab.py,sha256=F9KO4qowIhfP8oVhCRRzE_1sqg4zmQbsB2NZH193PiM,133027 +pygments/lexers/maxima.py,sha256=a0h9Ggs9JEovTrzbJT-BLVbOqI29yPnaMZlkU5f_FeY,2715 +pygments/lexers/meson.py,sha256=BMrsDo6BH2lzTFw7JDwQ9SDNMTrRkXCNRDVf4aFHdsI,4336 +pygments/lexers/mime.py,sha256=yGrf3h37LK4b6ERBpFiL_qzn3JgOfGR5KLagnbWFl6c,7582 +pygments/lexers/minecraft.py,sha256=Nu88snDDPzM0D-742fFdUriczL-EE911pAd4_I4-pAw,13696 +pygments/lexers/mips.py,sha256=STKiZT67b3QERXXn7XKVxlPBu7vwbPC5EyCpuf3Jfbw,4656 +pygments/lexers/ml.py,sha256=t8sCv4BjvuBq6AihKKUwStEONIgdXCC2RMtO0RopNbM,35390 +pygments/lexers/modeling.py,sha256=M7B58bGB-Zwd1EmPxKqtRvg7TgNCyem3MVUHv0_H2SQ,13683 +pygments/lexers/modula2.py,sha256=NtpXBRoUCeHfflgB39LknSkCwhBHBKv2Er_pinjVsNE,53072 +pygments/lexers/mojo.py,sha256=8JRVoftN1E-W2woG0K-4n8PQXTUM9iY6Sl5sWb2uGNg,24233 +pygments/lexers/monte.py,sha256=baWU6zlXloenw9MO1MtEVGE9i3CfiXAYhqU621MIjRk,6289 +pygments/lexers/mosel.py,sha256=gjRdedhA1jTjoYoM1Gpaoog_I9o7TRbYMHk97N1TXwg,9297 +pygments/lexers/ncl.py,sha256=zJ6ahlitit4S0pBXc7Wu96PB7xOn59MwfR2HdY5_C60,63999 +pygments/lexers/nimrod.py,sha256=Q1NSqEkLC5wWt7xJyKC-vzWw_Iw2SfDNP_pyMFBuIfA,6413 +pygments/lexers/nit.py,sha256=p_hVD8GzMRl3CABVKHtYgnXFUQk0i5F2FbWFA6WXm6s,2725 +pygments/lexers/nix.py,sha256=NOrv20gdq-2A7eZ6c2gElPHv1Xx2pvv20-qOymL9GMg,4421 +pygments/lexers/numbair.py,sha256=fxkp2CXeXWKBMewfi1H4JSYkmm4kU58wZ2Sh9BDYAWQ,1758 +pygments/lexers/oberon.py,sha256=jw403qUUs7zpTHAs5CbLjb8qiuwtxLk0spDIYqGZwAw,4210 +pygments/lexers/objective.py,sha256=Fo1WB3JMj8sNeYnvB84H4_qwhOt4WNJtJWjVEOwrJGk,23297 +pygments/lexers/ooc.py,sha256=kD1XaJZaihDF_s-Vyu1Bx68S_9zFt2rhox7NF8LpOZM,3002 +pygments/lexers/openscad.py,sha256=h9I1k8kiuQmhX5vZm6VDSr2fa5Finy0sN8ZDIE-jx1c,3700 +pygments/lexers/other.py,sha256=WLVyqPsvm9oSXIbZwbfyJloS6HGgoFW5nVTaU1uQpTw,1763 +pygments/lexers/parasail.py,sha256=DWMGhtyQgGTXbIgQl_mID6CKqi-Dhbvs_dTkmvrZXfE,2719 +pygments/lexers/parsers.py,sha256=feNgxroPoWRf0NEsON2mtmKDUfslIQppukw6ndEsQ3M,26596 +pygments/lexers/pascal.py,sha256=N2tRAjlXnTxggAzzk2tOOAVzeC2MBzrXy97_HQl5n44,30989 +pygments/lexers/pawn.py,sha256=LWUYQYsebMMt2d5oxX1HYWvBqbakR1h7Av_z8Vw94Wg,8253 +pygments/lexers/pddl.py,sha256=Mk4_BzlROJCd0xR4KKRRSrbj0F7LLQcBRjmsmtWmrCg,2989 +pygments/lexers/perl.py,sha256=9BXn3tyHMA49NvzbM9E2czSCHjeU7bvaPLUcoZrhz-4,39192 +pygments/lexers/phix.py,sha256=hZqychqo5sFMBDESzDPXg1DYHQe_9sn294UfbjihaFk,23249 +pygments/lexers/php.py,sha256=l4hzQrlm0525i5dSw9Vmjcai3TzbPT6DkjzxPg9l6Zc,13061 +pygments/lexers/pointless.py,sha256=WSDjqQyGrNIGmTCdaMxl4zk7OZTlJAMzeUZ02kfgcTI,1974 +pygments/lexers/pony.py,sha256=EXrMkacqMZblI7v4AvBRQe-3Py8__bx5FOgjCLdfXxQ,3279 +pygments/lexers/praat.py,sha256=4UFK-nbC6WkZBhJgcQqEGqq9CocJkW7AmT_OJQbjWzk,12676 +pygments/lexers/procfile.py,sha256=05W2fyofLTP-FbEdSXD1eles-PPqVNfF6RWXjQdW2us,1155 +pygments/lexers/prolog.py,sha256=9Kc5YNUFqkfWu2sYoyzC3RX65abf1bm7oHr86z1s4kQ,12866 +pygments/lexers/promql.py,sha256=n-0vo-o8-ZasqP3Va4ujs562UfZSLfZF-RzT71yL0Tk,4738 +pygments/lexers/prql.py,sha256=PFReuvhbv4K5aeu6lvDfw4m-3hULkB3r43bKAy948os,8747 +pygments/lexers/ptx.py,sha256=KSHAvbiNVUntKilQ6EPYoLFocmJpRsBy_7fW6_Nrs1Y,4501 +pygments/lexers/python.py,sha256=WZe7fBAHKZ_BxPg8qIU26UGhk8qwUYyENJ3IyPW64mc,53805 +pygments/lexers/q.py,sha256=WQFUh3JrpK2j-VGW_Ytn3uJ5frUNmQIFnLtMVGRA9DI,6936 +pygments/lexers/qlik.py,sha256=2wqwdfIjrAz6RNBsP4MyeLX8Z7QpIGzxtf1CvaOlr_g,3693 +pygments/lexers/qvt.py,sha256=XMBnsWRrvCDf989OuDeb-KpszAkeETiACyaghZeL1ns,6103 +pygments/lexers/r.py,sha256=B6WgrD9SY1UTCV1fQBSlZbezPfpYsARn3FQIHcFYOiM,6474 +pygments/lexers/rdf.py,sha256=qUzxLna9v071bHhZAjdsBi8dKaJNk_h9g1ZRUAYCfoo,16056 +pygments/lexers/rebol.py,sha256=4u3N4kzui55HapopXDu3Kt0jczxDZ4buzwR7Mt4tQiM,18259 +pygments/lexers/rego.py,sha256=Rx5Gphbktr9ojg5DbqlyxHeQqqtF7g8W-oF0rmloDNY,1748 +pygments/lexers/resource.py,sha256=ioEzgWksB5HCjoz85XNkQPSd7n5kL0SZiuPkJP1hunQ,2927 +pygments/lexers/ride.py,sha256=kCWdxuR3PclVi4wiA0uUx4CYEFwuTqoMsKjhSW4X3yg,5035 +pygments/lexers/rita.py,sha256=Mj1QNxx1sWAZYC02kw8piVckaiw9B0MqQtiIiDFH0pA,1127 +pygments/lexers/rnc.py,sha256=g7ZD334PMGUqy_Ij64laSN1vJerwHqVkegfMCa3E-y8,1972 +pygments/lexers/roboconf.py,sha256=HbYuK5CqmQdd63SRY2nle01r7-p7mil0SnoauYDmEOY,2074 +pygments/lexers/robotframework.py,sha256=c4U1B9Q9ITBCTohqJTZOvkfyeVbenN4xhzSWIoZh5eU,18448 +pygments/lexers/ruby.py,sha256=uG617E5abBZcECRCqkhIfc-IbZcRb5cGuUZq_xpax90,22753 +pygments/lexers/rust.py,sha256=ZY-9vtsreBP0NfDd0WCouLSp_9MChAL8U8Abe-m9PB8,8260 +pygments/lexers/sas.py,sha256=C1Uz2s9DU6_s2kL-cB_PAGPtpyK5THlmhNmCumC1l48,9456 +pygments/lexers/savi.py,sha256=jrmruK0GnXktgBTWXW3oN3TXtofn3HBbkMlHnR84cko,4878 +pygments/lexers/scdoc.py,sha256=DXRmFDmYuc7h3gPAAVhfcL1OEbNBK5RdPpJqQzF3ZTk,2524 +pygments/lexers/scripting.py,sha256=eaYlkDK-_cAwTcCBHP6QXBCz8n6OzbhzdkRe0uV0xWY,81814 +pygments/lexers/sgf.py,sha256=w6C513ENaO2YCnqrduK7k03NaMDf-pgygvfzq2NaSRk,1985 +pygments/lexers/shell.py,sha256=dCS1zwkf5KwTog4__MnMC7h3Xmwv4_d3fnEV29tSwXI,36381 +pygments/lexers/sieve.py,sha256=eob-L84yf2jmhdNyYZUlbUJozdcd6GXcHW68lmAe8WE,2514 +pygments/lexers/slash.py,sha256=I-cRepmaxhL1SgYvD1hHX3gNBFI8NPszdU7hn1o5JlA,8484 +pygments/lexers/smalltalk.py,sha256=ue2PmqDK2sw0j75WdseiiENJBdZ1OwysH2Op1QN1r24,7204 +pygments/lexers/smithy.py,sha256=VREWoeuz7ANap_Uiopn7rs0Tnsfc-xBisDJKRGQY_y8,2659 +pygments/lexers/smv.py,sha256=He_VBSMbWONMWZmkrB5RYR0cfHVnMyKIXz68IFYl-a8,2805 +pygments/lexers/snobol.py,sha256=qDzb41xQQWMNmjB2MtZs23pFoFgZ2gbRZhK_Ir03r7I,2778 +pygments/lexers/solidity.py,sha256=Tixfnwku4Yezj6nNm8xVaw7EdV1qgAgdwahdTFP0St8,3163 +pygments/lexers/soong.py,sha256=Vm18vV4g6T8UPgjjY2yTRlSXGDpZowmuqQUBFfm4A9A,2339 +pygments/lexers/sophia.py,sha256=2YtYIT8iwAoW0B7TZuuoG_ZILhJV-2A7oBGat-98naE,3376 +pygments/lexers/special.py,sha256=8JuR2Vex8X-RWnC36S0HXTHWp2qmZclc90-TrLUWyaY,3585 +pygments/lexers/spice.py,sha256=m4nK0q4Sq_OFQez7kGWfki0No4ZV24YrONfHVj1Piqs,2790 +pygments/lexers/sql.py,sha256=WSG6vOsR87EEEwSQefP_Z7TauUG_BjqMHUFmPaSOVj4,41476 +pygments/lexers/srcinfo.py,sha256=B8vDs-sJogG3mWa5Hp_7JfHHUMyYRwGvKv6cKbFQXLM,1746 +pygments/lexers/stata.py,sha256=Zr9BC52D5O_3BbdW0N-tzoUmy0NTguL2sC-saXRVM-c,6415 +pygments/lexers/supercollider.py,sha256=_H5wDrn0DiGnlhB_cz6Rt_lo2TvqjSm0o6NPTd9R4Ko,3697 +pygments/lexers/tablegen.py,sha256=1JjedXYY18BNiY9JtNGLOtGfiwduNDZpQLBGTeQ6jAw,3987 +pygments/lexers/tact.py,sha256=X_lsxjFUMaC1TmYysXJq9tmAGifRnil83Bt1zA86Xdo,10809 +pygments/lexers/tal.py,sha256=xS9PlaWQOPj8MVr56fUNq31vUQKRWoLTlyWj9ZHm8AM,2904 +pygments/lexers/tcl.py,sha256=lK97ju4nikkt-oGOzIeyFEM98yq4dZSI8uEmYsq0R6c,5512 +pygments/lexers/teal.py,sha256=t3dqy_Arwv8_yExbX_xiFxv1TqJLPv4vh1MVKjKwS4Y,3522 +pygments/lexers/templates.py,sha256=BVdjYeoacIUuFyHTG39j4PxeNCe5E1oUURjH1rITrI4,75731 +pygments/lexers/teraterm.py,sha256=ciwztagW5Drg2gr17Qykrh6GwMsKy7e4xdQshX95GyQ,9718 +pygments/lexers/testing.py,sha256=YZgDgUEaLEYKSKEqpDsUi3Bn-Db_D42IlyiSsr1oX8U,10810 +pygments/lexers/text.py,sha256=nOCQPssIlKdVWU3PKxZiBPkf_KFM2V48IOssSyqhFY8,1068 +pygments/lexers/textedit.py,sha256=ttT4Ph-hIdgFLG6maRy_GskkziTFK0Wcg28yU0s6lek,7760 +pygments/lexers/textfmts.py,sha256=mi9KLEq4mrzDJbEc8G3VM-mSki_Tylkzodu47yH6z84,15524 +pygments/lexers/theorem.py,sha256=51ppBAEdhJmwU_lC916zMyjEoKLXqf89VAE_Lr0PNCc,17855 +pygments/lexers/thingsdb.py,sha256=x_fHNkLA-hIJyeIs6rg_X8n5OLYvFqaSu1FhI3apI5Y,6017 +pygments/lexers/tlb.py,sha256=ue2gqm45BI512lM13O8skAky9zAb7pLMrxZ8pbt5zRU,1450 +pygments/lexers/tls.py,sha256=_uQUVuMRDOhN-XUyGR5DIlVCk1CUZ1fIOSN4_WQYPKk,1540 +pygments/lexers/tnt.py,sha256=pK4LgoKON7u1xF66JYFncAPSbD8DZaeI_WTZ9HqEFlY,10456 +pygments/lexers/trafficscript.py,sha256=X3B8kgxS54ecuok9ic6Hkp-UMn5DvOmCK0p70Tz27Cw,1506 +pygments/lexers/typoscript.py,sha256=mBuePiVZUoAORPKsHwrx6fBWiy3fAIqG-2O67QmMiFI,8332 +pygments/lexers/typst.py,sha256=zIJBEhUXtWp5OiyAmvFA5m8d1EQG-ocwrJ677dvTUAk,7167 +pygments/lexers/ul4.py,sha256=rCaw0J9j3cdql9lX_HTilg65k9-9S118zOA6TAYfxaM,10499 +pygments/lexers/unicon.py,sha256=RAqoCnAAJBYOAGdR8ng0g6FtB39bGemLRlIqv5mcg9E,18625 +pygments/lexers/urbi.py,sha256=ajNP70NJg32jNnFDZsLvr_-4TToSGqRGkFyAPIJLfCU,6082 +pygments/lexers/usd.py,sha256=2eEGouolodYS402P_gtBrn4lLzpg1z8uHwPCKqjUb_k,3304 +pygments/lexers/varnish.py,sha256=dSh0Ku9SrjmlB29Fi_mWdWavN7M0cMKeepR4a34sOyI,7473 +pygments/lexers/verification.py,sha256=Qu433Q_h3EK3uS4bJoLRFZK0kIVwzX5AFKsa4Z-qnxA,3934 +pygments/lexers/verifpal.py,sha256=buyOOzCo_dGnoC40h0tthylHVVpgDt8qXu4olLvYy_4,2661 +pygments/lexers/vip.py,sha256=2lEV4cLV9p4E37wctBL7zkZ4ZU4p3HVsiLJFzB1bie0,5711 +pygments/lexers/vyper.py,sha256=Zq6sQIUBk6mBdpgOVgu3A6swGoBne0kDlRyjZznm2BY,5615 +pygments/lexers/web.py,sha256=4W9a7vcskrGJnxt4KmoE3SZydWB1qLq7lP2XS85J_m8,913 +pygments/lexers/webassembly.py,sha256=zgcMouzLawcbeFr6w_SOvGoUR68ZtqnnsbOcWEVleLk,5698 +pygments/lexers/webidl.py,sha256=ODtVmw4gVzI8HQWxuEckP6KMwm8WP2G2lSZEjagDXts,10516 +pygments/lexers/webmisc.py,sha256=-_-INDVdk47e2jlj-9bFcuLtntqVorBqIjlnwPfZFdI,40564 +pygments/lexers/wgsl.py,sha256=9igd9dzixGIgNewruv9mPnFms-c9BahkZcCCrZygv84,11880 +pygments/lexers/whiley.py,sha256=lMr750lA4MZsB4xqzVsIRtVMJIC3_dArhFYTHvOPwvA,4017 +pygments/lexers/wowtoc.py,sha256=8xxvf0xGeYtf4PE7KtkHZ_ly9xY_XXHrpCitdKE42Ro,4076 +pygments/lexers/wren.py,sha256=goGXnAMKKa13LLL40ybT3aMGPrk3gCRwZQFYAkKB_w0,3229 +pygments/lexers/x10.py,sha256=Q-AmgdF2E-N7mtOPpZ07CsxrTVnikyqC4uRRv6H75sk,1943 +pygments/lexers/xorg.py,sha256=9ttrBd3_Y2nXANsqtMposSgblYmMYqWXQ-Iz5RH9RsU,925 +pygments/lexers/yang.py,sha256=13CWbSaNr9giOHz4o0SXSklh0bfWt0ah14jJGpTvcn0,4499 +pygments/lexers/yara.py,sha256=jUSv78KTDfguCoAoAZKbYzQERkkyxBBWv5dInVrkDxo,2427 +pygments/lexers/zig.py,sha256=f-80MVOSp1KnczAMokQLVM-_wAEOD16EcGFnaCNlsN0,3976 +pygments/modeline.py,sha256=K5eSkR8GS1r5OkXXTHOcV0aM_6xpk9eWNEIAW-OOJ2g,1005 +pygments/plugin.py,sha256=tPx0rJCTIZ9ioRgLNYG4pifCbAwTRUZddvLw-NfAk2w,1891 +pygments/regexopt.py,sha256=wXaP9Gjp_hKAdnICqoDkRxAOQJSc4v3X6mcxx3z-TNs,3072 +pygments/scanner.py,sha256=nNcETRR1tRuiTaHmHSTTECVYFPcLf6mDZu1e4u91A9E,3092 +pygments/sphinxext.py,sha256=VEe_oHNgLoEGMHc2ROfbee2mF2PPREFyE6_m_JN5FvQ,7898 +pygments/style.py,sha256=Cpw9dCAyW3_JAwFRXOJXmtKb5ZwO2_5KSmlq6q4fZw4,6408 +pygments/styles/__init__.py,sha256=f9KCQXN4uKbe8aI8-L3qTC-_XPfT563FwTg6VTGVfwI,2006 +pygments/styles/__pycache__/__init__.cpython-312.pyc,, +pygments/styles/__pycache__/_mapping.cpython-312.pyc,, +pygments/styles/__pycache__/abap.cpython-312.pyc,, +pygments/styles/__pycache__/algol.cpython-312.pyc,, +pygments/styles/__pycache__/algol_nu.cpython-312.pyc,, +pygments/styles/__pycache__/arduino.cpython-312.pyc,, +pygments/styles/__pycache__/autumn.cpython-312.pyc,, +pygments/styles/__pycache__/borland.cpython-312.pyc,, +pygments/styles/__pycache__/bw.cpython-312.pyc,, +pygments/styles/__pycache__/coffee.cpython-312.pyc,, +pygments/styles/__pycache__/colorful.cpython-312.pyc,, +pygments/styles/__pycache__/default.cpython-312.pyc,, +pygments/styles/__pycache__/dracula.cpython-312.pyc,, +pygments/styles/__pycache__/emacs.cpython-312.pyc,, +pygments/styles/__pycache__/friendly.cpython-312.pyc,, +pygments/styles/__pycache__/friendly_grayscale.cpython-312.pyc,, +pygments/styles/__pycache__/fruity.cpython-312.pyc,, +pygments/styles/__pycache__/gh_dark.cpython-312.pyc,, +pygments/styles/__pycache__/gruvbox.cpython-312.pyc,, +pygments/styles/__pycache__/igor.cpython-312.pyc,, +pygments/styles/__pycache__/inkpot.cpython-312.pyc,, +pygments/styles/__pycache__/lightbulb.cpython-312.pyc,, +pygments/styles/__pycache__/lilypond.cpython-312.pyc,, +pygments/styles/__pycache__/lovelace.cpython-312.pyc,, +pygments/styles/__pycache__/manni.cpython-312.pyc,, +pygments/styles/__pycache__/material.cpython-312.pyc,, +pygments/styles/__pycache__/monokai.cpython-312.pyc,, +pygments/styles/__pycache__/murphy.cpython-312.pyc,, +pygments/styles/__pycache__/native.cpython-312.pyc,, +pygments/styles/__pycache__/nord.cpython-312.pyc,, +pygments/styles/__pycache__/onedark.cpython-312.pyc,, +pygments/styles/__pycache__/paraiso_dark.cpython-312.pyc,, +pygments/styles/__pycache__/paraiso_light.cpython-312.pyc,, +pygments/styles/__pycache__/pastie.cpython-312.pyc,, +pygments/styles/__pycache__/perldoc.cpython-312.pyc,, +pygments/styles/__pycache__/rainbow_dash.cpython-312.pyc,, +pygments/styles/__pycache__/rrt.cpython-312.pyc,, +pygments/styles/__pycache__/sas.cpython-312.pyc,, +pygments/styles/__pycache__/solarized.cpython-312.pyc,, +pygments/styles/__pycache__/staroffice.cpython-312.pyc,, +pygments/styles/__pycache__/stata_dark.cpython-312.pyc,, +pygments/styles/__pycache__/stata_light.cpython-312.pyc,, +pygments/styles/__pycache__/tango.cpython-312.pyc,, +pygments/styles/__pycache__/trac.cpython-312.pyc,, +pygments/styles/__pycache__/vim.cpython-312.pyc,, +pygments/styles/__pycache__/vs.cpython-312.pyc,, +pygments/styles/__pycache__/xcode.cpython-312.pyc,, +pygments/styles/__pycache__/zenburn.cpython-312.pyc,, +pygments/styles/_mapping.py,sha256=6lovFUE29tz6EsV3XYY4hgozJ7q1JL7cfO3UOlgnS8w,3312 +pygments/styles/abap.py,sha256=64Uwr8uPdEdcT-tE-Y2VveTXfH3SkqH9qdMgY49YHQI,749 +pygments/styles/algol.py,sha256=fCuk8ITTehvbJSufiaKlgnFsKbl-xFxxR82xhltc-cQ,2262 +pygments/styles/algol_nu.py,sha256=Gv9WfHJvYegGcUk1zcufQgsdXPNjCUNk8sAHyrSGGh4,2283 +pygments/styles/arduino.py,sha256=NoUB8xk7M1HGPoLfuySOLU0sVwoTuLcZqllXl2EO_iE,4557 +pygments/styles/autumn.py,sha256=fLLfjHXjxCl6crBAxEsBLH372ALMkFacA2bG6KFbJi4,2195 +pygments/styles/borland.py,sha256=_0ySKp4KGCSgtYjPe8uzD6gQhlmAIR4T43i-FoRYNOM,1611 +pygments/styles/bw.py,sha256=vhk8Xoj64fLPdA9IQU6mUVsYMel255jR-FDU7BjIHtI,1406 +pygments/styles/coffee.py,sha256=NqLt-fc7LONma1BGggbceVRY9uDE70WBuZXqK4zwaco,2308 +pygments/styles/colorful.py,sha256=mYcSbehtH7itH_QV9NqJp4Wna1X4lrwl2wkVXS2u-5A,2832 +pygments/styles/default.py,sha256=RTgG2zKWWUxPTDCFxhTnyZI_WZBIVgu5XsUpNvFisCA,2588 +pygments/styles/dracula.py,sha256=vRJmixBoSKV9o8NVQhXGViQqchhIYugfikLmvX0DoBw,2182 +pygments/styles/emacs.py,sha256=TiOG9oc83qToMCRMnJrXtWYqnzAqYycRz_50OoCKtxc,2535 +pygments/styles/friendly.py,sha256=oAi-l9anQTs9STDmUzXGDlOegatEOH4hpD0j6o6dZGM,2604 +pygments/styles/friendly_grayscale.py,sha256=a7Cqkzt6-uTiXvj6GoYBXzRvX5_zviCjjRB04Kf_-Q0,2828 +pygments/styles/fruity.py,sha256=GfSUTG0stlJr5Ow_saCaxbI2IB4-34Dp2TuRTpfUJBs,1324 +pygments/styles/gh_dark.py,sha256=ruNX3d4rf22rx-8HnwvGbNbXRQpXCNcHU1HNq6N4uNg,3590 +pygments/styles/gruvbox.py,sha256=KrFoHEoVnZW6XM9udyXncPomeGyZgIDsNWOH3kCrxFQ,3387 +pygments/styles/igor.py,sha256=fYYPhM0dRCvcDTMVrMVO5oFKnYm-8YVlsuVBoczFLtY,737 +pygments/styles/inkpot.py,sha256=jggSeX9NV15eOL2oJaVmZ6vmV7LWRzXJQRUqcWEqGRs,2404 +pygments/styles/lightbulb.py,sha256=Y8u1qdvlHfBqI2jJex55SkvVatVo_FjEUzE6h-X7m-0,3172 +pygments/styles/lilypond.py,sha256=Y6fp_sEL-zESmxAaMxzjtrKk90cuDC_DalNdC8wj0nw,2066 +pygments/styles/lovelace.py,sha256=cA9uhmbnzY04MccsiYSgMY7fvb4WMRbegWBUrGvXh1M,3178 +pygments/styles/manni.py,sha256=g9FyO7plTwfMm2cU4iiKgdlkMlvQLG6l2Lwkgz5ITS4,2443 +pygments/styles/material.py,sha256=LDmgomAbgtJDZhbv446_zIwgYh50UAqEEtgYNUns1rQ,4201 +pygments/styles/monokai.py,sha256=lrxTJpkBarV9gTLkBQryZ6oNSjekAVheJueKJP5iEYA,5184 +pygments/styles/murphy.py,sha256=-AKZiLkpiWej-otjHMsYCE-I-_IzCOLJY-_GBdKRZRw,2805 +pygments/styles/native.py,sha256=l6tezGSQTB8p_SyOXJ0PWI7KzCeEdtsPmVc4Yn4_CwU,2043 +pygments/styles/nord.py,sha256=GDt3WAaqaWsiCeqpIBPxd8TEUX708fGfwaA7S0w0oy0,5391 +pygments/styles/onedark.py,sha256=k80cZEppCEF-HLoxy_FEA0QmQDZze68nHVMNGyUVa28,1719 +pygments/styles/paraiso_dark.py,sha256=Jkrg4nUKIVNF8U4fPNV_Smq_g9NFbb9eiUrjYpVgQZg,5662 +pygments/styles/paraiso_light.py,sha256=MxN964ZEpze3wF0ss-igaa2I7E684MHe-Zq0rWPH3wo,5668 +pygments/styles/pastie.py,sha256=ZvAs9UpBNYFC-5PFrCRGYnm3FoPKb-eKR-ozbWZP-4g,2525 +pygments/styles/perldoc.py,sha256=HSxB93e4UpQkZspReQ34FeJbZ-59ksGvdaH-hToehi8,2230 +pygments/styles/rainbow_dash.py,sha256=4ugL18Or7aNtaLfPfCLFRiFy0Gu2RA4a9G2LQUE9SrM,2390 +pygments/styles/rrt.py,sha256=fgzfpC0PC_SCcLOMCNEIQTjPUMOncRe7SR10GfSRbXY,1006 +pygments/styles/sas.py,sha256=yzoXmbfQ2ND1WWq93b4vVGYkQSZHPqb4ymes9YYRT3w,1440 +pygments/styles/solarized.py,sha256=qupILFZn02WspnAF5SPYb-W8guo9xnUtjb1HeLw3XgE,4247 +pygments/styles/staroffice.py,sha256=CLbBeMoxay21Xyu3Af2p4xUXyG1_6ydCbvs5RJKYe5w,831 +pygments/styles/stata_dark.py,sha256=vX8SwHV__sG92F4CKribG08MJfSVq98dgs7gEA_n9yc,1257 +pygments/styles/stata_light.py,sha256=uV3GE-ylvffQ0yN3py1YAVqBB5wflIKZbceyK1Lqvrc,1289 +pygments/styles/tango.py,sha256=O2wcM4hHuU1Yt071M9CK7JPtiiSCqyxtT9tbiQICV28,7137 +pygments/styles/trac.py,sha256=9kMv1ZZyMKACWlx2fQVjRP0I2pgcRYCNrd7iGGZg9qk,1981 +pygments/styles/vim.py,sha256=J7_TqvrGkTX_XuTHW0In5wqPLAUPRWyr1122XueZWmM,2019 +pygments/styles/vs.py,sha256=s7YnzbIPuFU3LIke27mc4lAQSn2R3vbbHc1baMGSU_U,1130 +pygments/styles/xcode.py,sha256=PbQdzgGaA4a9LAU1i58alY9kM4IFlQX5jHQwOYmf_Rk,1504 +pygments/styles/zenburn.py,sha256=suZEKzBTCYdhf2cxNwcY7UATJK1tq5eYhGdBcXdf6MU,2203 +pygments/token.py,sha256=WbdWGhYm_Vosb0DDxW9lHNPgITXfWTsQmHt6cy9RbcM,6226 +pygments/unistring.py,sha256=al-_rBemRuGvinsrM6atNsHTmJ6DUbw24q2O2Ru1cBc,63208 +pygments/util.py,sha256=oRtSpiAo5jM9ulntkvVbgXUdiAW57jnuYGB7t9fYuhc,10031 diff --git a/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/WHEEL new file mode 100644 index 00000000..12228d41 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/entry_points.txt b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/entry_points.txt new file mode 100644 index 00000000..15498e35 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +pygmentize = pygments.cmdline:main diff --git a/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/licenses/AUTHORS b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/licenses/AUTHORS new file mode 100644 index 00000000..811c66ae --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/licenses/AUTHORS @@ -0,0 +1,291 @@ +Pygments is written and maintained by Georg Brandl . + +Major developers are Tim Hatch and Armin Ronacher +. + +Other contributors, listed alphabetically, are: + +* Sam Aaron -- Ioke lexer +* Jean Abou Samra -- LilyPond lexer +* João Abecasis -- JSLT lexer +* Ali Afshar -- image formatter +* Thomas Aglassinger -- Easytrieve, JCL, Rexx, Transact-SQL and VBScript + lexers +* Maxence Ahlouche -- PostgreSQL Explain lexer +* Muthiah Annamalai -- Ezhil lexer +* Nikolay Antipov -- OpenSCAD lexer +* Kumar Appaiah -- Debian control lexer +* Andreas Amann -- AppleScript lexer +* Timothy Armstrong -- Dart lexer fixes +* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers +* Eiríkr Åsheim -- Uxntal lexer +* Jeremy Ashkenas -- CoffeeScript lexer +* José Joaquín Atria -- Praat lexer +* Stefan Matthias Aust -- Smalltalk lexer +* Lucas Bajolet -- Nit lexer +* Ben Bangert -- Mako lexers +* Max Battcher -- Darcs patch lexer +* Thomas Baruchel -- APL lexer +* Tim Baumann -- (Literate) Agda lexer +* Paul Baumgart, 280 North, Inc. -- Objective-J lexer +* Michael Bayer -- Myghty lexers +* Thomas Beale -- Archetype lexers +* John Benediktsson -- Factor lexer +* David Benjamin, Google LLC -- TLS lexer +* Trevor Bergeron -- mIRC formatter +* Vincent Bernat -- LessCSS lexer +* Christopher Bertels -- Fancy lexer +* Sébastien Bigaret -- QVT Operational lexer +* Jarrett Billingsley -- MiniD lexer +* Adam Blinkinsop -- Haskell, Redcode lexers +* Stéphane Blondon -- Procfile, SGF and Sieve lexers +* Frits van Bommel -- assembler lexers +* Pierre Bourdon -- bugfixes +* Martijn Braam -- Kernel log lexer, BARE lexer +* JD Browne, Google LLC -- GoogleSQL lexer +* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter +* chebee7i -- Python traceback lexer improvements +* Hiram Chirino -- Scaml and Jade lexers +* Mauricio Caceres -- SAS and Stata lexers. +* Michael Camilleri, John Gabriele, sogaiu -- Janet lexer +* Daren Chandisingh -- Gleam lexer +* Ian Cooper -- VGL lexer +* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers +* Leaf Corcoran -- MoonScript lexer +* Fraser Cormack -- TableGen lexer +* Gabriel Corona -- ASN.1 lexer +* Christopher Creutzig -- MuPAD lexer +* Daniël W. Crompton -- Pike lexer +* Pete Curry -- bugfixes +* Bryan Davis -- EBNF lexer +* Bruno Deferrari -- Shen lexer +* Walter Dörwald -- UL4 lexer +* Luke Drummond -- Meson lexer +* Giedrius Dubinskas -- HTML formatter improvements +* Owen Durni -- Haxe lexer +* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer +* James Edwards -- Terraform lexer +* Nick Efford -- Python 3 lexer +* Sven Efftinge -- Xtend lexer +* Artem Egorkine -- terminal256 formatter +* Matthew Fernandez -- CAmkES lexer +* Paweł Fertyk -- GDScript lexer, HTML formatter improvements +* Michael Ficarra -- CPSA lexer +* James H. Fisher -- PostScript lexer +* Amanda Fitch, Google LLC -- GoogleSQL lexer +* William S. Fulton -- SWIG lexer +* Carlos Galdino -- Elixir and Elixir Console lexers +* Michael Galloy -- IDL lexer +* Naveen Garg -- Autohotkey lexer +* Simon Garnotel -- FreeFem++ lexer +* Laurent Gautier -- R/S lexer +* Alex Gaynor -- PyPy log lexer +* Richard Gerkin -- Igor Pro lexer +* Alain Gilbert -- TypeScript lexer +* Alex Gilding -- BlitzBasic lexer +* GitHub, Inc -- DASM16, Augeas, TOML, and Slash lexers +* Bertrand Goetzmann -- Groovy lexer +* Krzysiek Goj -- Scala lexer +* Rostyslav Golda -- FloScript lexer +* Andrey Golovizin -- BibTeX lexers +* Matt Good -- Genshi, Cheetah lexers +* Michał Górny -- vim modeline support +* Alex Gosse -- TrafficScript lexer +* Patrick Gotthardt -- PHP namespaces support +* Hubert Gruniaux -- C and C++ lexer improvements +* Olivier Guibe -- Asymptote lexer +* Phil Hagelberg -- Fennel lexer +* Florian Hahn -- Boogie lexer +* Martin Harriman -- SNOBOL lexer +* Matthew Harrison -- SVG formatter +* Steven Hazel -- Tcl lexer +* Dan Michael Heggø -- Turtle lexer +* Aslak Hellesøy -- Gherkin lexer +* Greg Hendershott -- Racket lexer +* Justin Hendrick -- ParaSail lexer +* Jordi Gutiérrez Hermoso -- Octave lexer +* David Hess, Fish Software, Inc. -- Objective-J lexer +* Ken Hilton -- Typographic Number Theory and Arrow lexers +* Varun Hiremath -- Debian control lexer +* Rob Hoelz -- Perl 6 lexer +* Doug Hogan -- Mscgen lexer +* Ben Hollis -- Mason lexer +* Max Horn -- GAP lexer +* Fred Hornsey -- OMG IDL Lexer +* Alastair Houghton -- Lexer inheritance facility +* Tim Howard -- BlitzMax lexer +* Dustin Howett -- Logos lexer +* Ivan Inozemtsev -- Fantom lexer +* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session, + MSDOS session, BC, WDiff +* Brian R. Jackson -- Tea lexer +* Christian Jann -- ShellSession lexer +* Jonas Camillus Jeppesen -- Line numbers and line highlighting for + RTF-formatter +* Dennis Kaarsemaker -- sources.list lexer +* Dmitri Kabak -- Inferno Limbo lexer +* Igor Kalnitsky -- vhdl lexer +* Colin Kennedy - USD lexer +* Alexander Kit -- MaskJS lexer +* Pekka Klärck -- Robot Framework lexer +* Gerwin Klein -- Isabelle lexer +* Eric Knibbe -- Lasso lexer +* Stepan Koltsov -- Clay lexer +* Oliver Kopp - Friendly grayscale style +* Adam Koprowski -- Opa lexer +* Benjamin Kowarsch -- Modula-2 lexer +* Domen Kožar -- Nix lexer +* Oleh Krekel -- Emacs Lisp lexer +* Alexander Kriegisch -- Kconfig and AspectJ lexers +* Marek Kubica -- Scheme lexer +* Jochen Kupperschmidt -- Markdown processor +* Gerd Kurzbach -- Modelica lexer +* Jon Larimer, Google Inc. -- Smali lexer +* Olov Lassus -- Dart lexer +* Matt Layman -- TAP lexer +* Dan Lazin, Google LLC -- GoogleSQL lexer +* Kristian Lyngstøl -- Varnish lexers +* Sylvestre Ledru -- Scilab lexer +* Chee Sing Lee -- Flatline lexer +* Mark Lee -- Vala lexer +* Thomas Linder Puls -- Visual Prolog lexer +* Pete Lomax -- Phix lexer +* Valentin Lorentz -- C++ lexer improvements +* Ben Mabey -- Gherkin lexer +* Angus MacArthur -- QML lexer +* Louis Mandel -- X10 lexer +* Louis Marchand -- Eiffel lexer +* Simone Margaritelli -- Hybris lexer +* Tim Martin - World of Warcraft TOC lexer +* Kirk McDonald -- D lexer +* Gordon McGregor -- SystemVerilog lexer +* Stephen McKamey -- Duel/JBST lexer +* Brian McKenna -- F# lexer +* Charles McLaughlin -- Puppet lexer +* Kurt McKee -- Tera Term macro lexer, PostgreSQL updates, MySQL overhaul, JSON lexer +* Joe Eli McIlvain -- Savi lexer +* Lukas Meuser -- BBCode formatter, Lua lexer +* Cat Miller -- Pig lexer +* Paul Miller -- LiveScript lexer +* Hong Minhee -- HTTP lexer +* Michael Mior -- Awk lexer +* Bruce Mitchener -- Dylan lexer rewrite +* Reuben Morais -- SourcePawn lexer +* Jon Morton -- Rust lexer +* Paulo Moura -- Logtalk lexer +* Mher Movsisyan -- DTD lexer +* Dejan Muhamedagic -- Crmsh lexer +* Adrien Nayrat -- PostgreSQL Explain lexer +* Ana Nelson -- Ragel, ANTLR, R console lexers +* David Neto, Google LLC -- WebGPU Shading Language lexer +* Kurt Neufeld -- Markdown lexer +* Nam T. Nguyen -- Monokai style +* Jesper Noehr -- HTML formatter "anchorlinenos" +* Mike Nolta -- Julia lexer +* Avery Nortonsmith -- Pointless lexer +* Jonas Obrist -- BBCode lexer +* Edward O'Callaghan -- Cryptol lexer +* David Oliva -- Rebol lexer +* Pat Pannuto -- nesC lexer +* Jon Parise -- Protocol buffers and Thrift lexers +* Benjamin Peterson -- Test suite refactoring +* Ronny Pfannschmidt -- BBCode lexer +* Dominik Picheta -- Nimrod lexer +* Andrew Pinkham -- RTF Formatter Refactoring +* Clément Prévost -- UrbiScript lexer +* Tanner Prynn -- cmdline -x option and loading lexers from files +* Oleh Prypin -- Crystal lexer (based on Ruby lexer) +* Nick Psaris -- K and Q lexers +* Xidorn Quan -- Web IDL lexer +* Elias Rabel -- Fortran fixed form lexer +* raichoo -- Idris lexer +* Daniel Ramirez -- GDScript lexer +* Kashif Rasul -- CUDA lexer +* Nathan Reed -- HLSL lexer +* Justin Reidy -- MXML lexer +* Jonathon Reinhart, Google LLC -- Soong lexer +* Norman Richards -- JSON lexer +* Corey Richardson -- Rust lexer updates +* Fabrizio Riguzzi -- cplint leder +* Lubomir Rintel -- GoodData MAQL and CL lexers +* Andre Roberge -- Tango style +* Georg Rollinger -- HSAIL lexer +* Michiel Roos -- TypoScript lexer +* Konrad Rudolph -- LaTeX formatter enhancements +* Mario Ruggier -- Evoque lexers +* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements +* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers +* Matteo Sasso -- Common Lisp lexer +* Joe Schafer -- Ada lexer +* Max Schillinger -- TiddlyWiki5 lexer +* Andrew Schmidt -- X++ lexer +* Ken Schutte -- Matlab lexers +* René Schwaiger -- Rainbow Dash style +* Sebastian Schweizer -- Whiley lexer +* Tassilo Schweyer -- Io, MOOCode lexers +* Pablo Seminario -- PromQL lexer +* Ted Shaw -- AutoIt lexer +* Joerg Sieker -- ABAP lexer +* Robert Simmons -- Standard ML lexer +* Kirill Simonov -- YAML lexer +* Corbin Simpson -- Monte lexer +* Ville Skyttä -- ASCII armored lexer +* Alexander Smishlajev -- Visual FoxPro lexer +* Steve Spigarelli -- XQuery lexer +* Jerome St-Louis -- eC lexer +* Camil Staps -- Clean and NuSMV lexers; Solarized style +* James Strachan -- Kotlin lexer +* Tom Stuart -- Treetop lexer +* Colin Sullivan -- SuperCollider lexer +* Ben Swift -- Extempore lexer +* tatt61880 -- Kuin lexer +* Edoardo Tenani -- Arduino lexer +* Tiberius Teng -- default style overhaul +* Jeremy Thurgood -- Erlang, Squid config lexers +* Brian Tiffin -- OpenCOBOL lexer +* Bob Tolbert -- Hy lexer +* Doug Torrance -- Macaulay2 lexer +* Matthias Trute -- Forth lexer +* Tuoa Spi T4 -- Bdd lexer +* Erick Tryzelaar -- Felix lexer +* Alexander Udalov -- Kotlin lexer improvements +* Thomas Van Doren -- Chapel lexer +* Dave Van Ee -- Uxntal lexer updates +* Daniele Varrazzo -- PostgreSQL lexers +* Abe Voelker -- OpenEdge ABL lexer +* Pepijn de Vos -- HTML formatter CTags support +* Matthias Vallentin -- Bro lexer +* Benoît Vinot -- AMPL lexer +* Linh Vu Hong -- RSL lexer +* Taavi Väänänen -- Debian control lexer +* Immanuel Washington -- Smithy lexer +* Nathan Weizenbaum -- Haml and Sass lexers +* Nathan Whetsell -- Csound lexers +* Dietmar Winkler -- Modelica lexer +* Nils Winter -- Smalltalk lexer +* Davy Wybiral -- Clojure lexer +* Whitney Young -- ObjectiveC lexer +* Diego Zamboni -- CFengine3 lexer +* Enrique Zamudio -- Ceylon lexer +* Alex Zimin -- Nemerle lexer +* Rob Zimmerman -- Kal lexer +* Evgenii Zheltonozhskii -- Maple lexer +* Vincent Zurczak -- Roboconf lexer +* Hubert Gruniaux -- C and C++ lexer improvements +* Thomas Symalla -- AMDGPU Lexer +* 15b3 -- Image Formatter improvements +* Fabian Neumann -- CDDL lexer +* Thomas Duboucher -- CDDL lexer +* Philipp Imhof -- Pango Markup formatter +* Thomas Voss -- Sed lexer +* Martin Fischer -- WCAG contrast testing +* Marc Auberer -- Spice lexer +* Amr Hesham -- Carbon lexer +* diskdance -- Wikitext lexer +* vanillajonathan -- PRQL lexer +* Nikolay Antipov -- OpenSCAD lexer +* Markus Meyer, Nextron Systems -- YARA lexer +* Hannes Römer -- Mojo lexer +* Jan Frederik Schaefer -- PDDL lexer + +Many thanks for all contributions! diff --git a/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/licenses/LICENSE b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/licenses/LICENSE new file mode 100644 index 00000000..446a1a80 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments-2.19.2.dist-info/licenses/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2006-2022 by the respective authors (see AUTHORS file). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__init__.py b/Backend/venv/lib/python3.12/site-packages/pygments/__init__.py new file mode 100644 index 00000000..2a391c3e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/__init__.py @@ -0,0 +1,82 @@ +""" + Pygments + ~~~~~~~~ + + Pygments is a syntax highlighting package written in Python. + + It is a generic syntax highlighter for general use in all kinds of software + such as forum systems, wikis or other applications that need to prettify + source code. Highlights are: + + * a wide range of common languages and markup formats is supported + * special attention is paid to details, increasing quality by a fair amount + * support for new languages and formats are added easily + * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image + formats that PIL supports, and ANSI sequences + * it is usable as a command-line tool and as a library + * ... and it highlights even Brainfuck! + + The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``. + + .. _Pygments master branch: + https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +from io import StringIO, BytesIO + +__version__ = '2.19.2' +__docformat__ = 'restructuredtext' + +__all__ = ['lex', 'format', 'highlight'] + + +def lex(code, lexer): + """ + Lex `code` with the `lexer` (must be a `Lexer` instance) + and return an iterable of tokens. Currently, this only calls + `lexer.get_tokens()`. + """ + try: + return lexer.get_tokens(code) + except TypeError: + # Heuristic to catch a common mistake. + from pygments.lexer import RegexLexer + if isinstance(lexer, type) and issubclass(lexer, RegexLexer): + raise TypeError('lex() argument must be a lexer instance, ' + 'not a class') + raise + + +def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin + """ + Format ``tokens`` (an iterable of tokens) with the formatter ``formatter`` + (a `Formatter` instance). + + If ``outfile`` is given and a valid file object (an object with a + ``write`` method), the result will be written to it, otherwise it + is returned as a string. + """ + try: + if not outfile: + realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO() + formatter.format(tokens, realoutfile) + return realoutfile.getvalue() + else: + formatter.format(tokens, outfile) + except TypeError: + # Heuristic to catch a common mistake. + from pygments.formatter import Formatter + if isinstance(formatter, type) and issubclass(formatter, Formatter): + raise TypeError('format() argument must be a formatter instance, ' + 'not a class') + raise + + +def highlight(code, lexer, formatter, outfile=None): + """ + This is the most high-level highlighting function. It combines `lex` and + `format` in one function. + """ + return format(lex(code, lexer), formatter, outfile) diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__main__.py b/Backend/venv/lib/python3.12/site-packages/pygments/__main__.py new file mode 100644 index 00000000..4890a6c7 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/__main__.py @@ -0,0 +1,17 @@ +""" + pygments.__main__ + ~~~~~~~~~~~~~~~~~ + + Main entry point for ``python -m pygments``. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import sys +import pygments.cmdline + +try: + sys.exit(pygments.cmdline.main(sys.argv)) +except KeyboardInterrupt: + sys.exit(1) diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..acda9a50 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 00000000..554ff0e9 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/__main__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/cmdline.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/cmdline.cpython-312.pyc new file mode 100644 index 00000000..e4dd09b0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/cmdline.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/console.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/console.cpython-312.pyc new file mode 100644 index 00000000..b96ba098 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/console.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/filter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/filter.cpython-312.pyc new file mode 100644 index 00000000..99ea4bea Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/filter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/formatter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/formatter.cpython-312.pyc new file mode 100644 index 00000000..36a6b748 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/formatter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/lexer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/lexer.cpython-312.pyc new file mode 100644 index 00000000..643111c4 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/lexer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/modeline.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/modeline.cpython-312.pyc new file mode 100644 index 00000000..80b50db4 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/modeline.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/plugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/plugin.cpython-312.pyc new file mode 100644 index 00000000..95c3ef5c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/plugin.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/regexopt.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/regexopt.cpython-312.pyc new file mode 100644 index 00000000..e5c554d3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/regexopt.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/scanner.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/scanner.cpython-312.pyc new file mode 100644 index 00000000..42dc67f3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/scanner.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/sphinxext.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/sphinxext.cpython-312.pyc new file mode 100644 index 00000000..45a20358 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/sphinxext.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/style.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/style.cpython-312.pyc new file mode 100644 index 00000000..3a25edb0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/style.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/token.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/token.cpython-312.pyc new file mode 100644 index 00000000..50b426df Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/token.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/unistring.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/unistring.cpython-312.pyc new file mode 100644 index 00000000..1c419411 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/unistring.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/util.cpython-312.pyc new file mode 100644 index 00000000..a02e7512 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/util.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/cmdline.py b/Backend/venv/lib/python3.12/site-packages/pygments/cmdline.py new file mode 100644 index 00000000..2878fd55 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/cmdline.py @@ -0,0 +1,668 @@ +""" + pygments.cmdline + ~~~~~~~~~~~~~~~~ + + Command line interface. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import os +import sys +import shutil +import argparse +from textwrap import dedent + +from pygments import __version__, highlight +from pygments.util import ClassNotFound, OptionError, docstring_headline, \ + guess_decode, guess_decode_from_terminal, terminal_encoding, \ + UnclosingTextIOWrapper +from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \ + load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename +from pygments.lexers.special import TextLexer +from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter +from pygments.formatters import get_all_formatters, get_formatter_by_name, \ + load_formatter_from_file, get_formatter_for_filename, find_formatter_class +from pygments.formatters.terminal import TerminalFormatter +from pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter +from pygments.filters import get_all_filters, find_filter_class +from pygments.styles import get_all_styles, get_style_by_name + + +def _parse_options(o_strs): + opts = {} + if not o_strs: + return opts + for o_str in o_strs: + if not o_str.strip(): + continue + o_args = o_str.split(',') + for o_arg in o_args: + o_arg = o_arg.strip() + try: + o_key, o_val = o_arg.split('=', 1) + o_key = o_key.strip() + o_val = o_val.strip() + except ValueError: + opts[o_arg] = True + else: + opts[o_key] = o_val + return opts + + +def _parse_filters(f_strs): + filters = [] + if not f_strs: + return filters + for f_str in f_strs: + if ':' in f_str: + fname, fopts = f_str.split(':', 1) + filters.append((fname, _parse_options([fopts]))) + else: + filters.append((f_str, {})) + return filters + + +def _print_help(what, name): + try: + if what == 'lexer': + cls = get_lexer_by_name(name) + print(f"Help on the {cls.name} lexer:") + print(dedent(cls.__doc__)) + elif what == 'formatter': + cls = find_formatter_class(name) + print(f"Help on the {cls.name} formatter:") + print(dedent(cls.__doc__)) + elif what == 'filter': + cls = find_filter_class(name) + print(f"Help on the {name} filter:") + print(dedent(cls.__doc__)) + return 0 + except (AttributeError, ValueError): + print(f"{what} not found!", file=sys.stderr) + return 1 + + +def _print_list(what): + if what == 'lexer': + print() + print("Lexers:") + print("~~~~~~~") + + info = [] + for fullname, names, exts, _ in get_all_lexers(): + tup = (', '.join(names)+':', fullname, + exts and '(filenames ' + ', '.join(exts) + ')' or '') + info.append(tup) + info.sort() + for i in info: + print(('* {}\n {} {}').format(*i)) + + elif what == 'formatter': + print() + print("Formatters:") + print("~~~~~~~~~~~") + + info = [] + for cls in get_all_formatters(): + doc = docstring_headline(cls) + tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and + '(filenames ' + ', '.join(cls.filenames) + ')' or '') + info.append(tup) + info.sort() + for i in info: + print(('* {}\n {} {}').format(*i)) + + elif what == 'filter': + print() + print("Filters:") + print("~~~~~~~~") + + for name in get_all_filters(): + cls = find_filter_class(name) + print("* " + name + ':') + print(f" {docstring_headline(cls)}") + + elif what == 'style': + print() + print("Styles:") + print("~~~~~~~") + + for name in get_all_styles(): + cls = get_style_by_name(name) + print("* " + name + ':') + print(f" {docstring_headline(cls)}") + + +def _print_list_as_json(requested_items): + import json + result = {} + if 'lexer' in requested_items: + info = {} + for fullname, names, filenames, mimetypes in get_all_lexers(): + info[fullname] = { + 'aliases': names, + 'filenames': filenames, + 'mimetypes': mimetypes + } + result['lexers'] = info + + if 'formatter' in requested_items: + info = {} + for cls in get_all_formatters(): + doc = docstring_headline(cls) + info[cls.name] = { + 'aliases': cls.aliases, + 'filenames': cls.filenames, + 'doc': doc + } + result['formatters'] = info + + if 'filter' in requested_items: + info = {} + for name in get_all_filters(): + cls = find_filter_class(name) + info[name] = { + 'doc': docstring_headline(cls) + } + result['filters'] = info + + if 'style' in requested_items: + info = {} + for name in get_all_styles(): + cls = get_style_by_name(name) + info[name] = { + 'doc': docstring_headline(cls) + } + result['styles'] = info + + json.dump(result, sys.stdout) + +def main_inner(parser, argns): + if argns.help: + parser.print_help() + return 0 + + if argns.V: + print(f'Pygments version {__version__}, (c) 2006-2024 by Georg Brandl, Matthäus ' + 'Chajdas and contributors.') + return 0 + + def is_only_option(opt): + return not any(v for (k, v) in vars(argns).items() if k != opt) + + # handle ``pygmentize -L`` + if argns.L is not None: + arg_set = set() + for k, v in vars(argns).items(): + if v: + arg_set.add(k) + + arg_set.discard('L') + arg_set.discard('json') + + if arg_set: + parser.print_help(sys.stderr) + return 2 + + # print version + if not argns.json: + main(['', '-V']) + allowed_types = {'lexer', 'formatter', 'filter', 'style'} + largs = [arg.rstrip('s') for arg in argns.L] + if any(arg not in allowed_types for arg in largs): + parser.print_help(sys.stderr) + return 0 + if not largs: + largs = allowed_types + if not argns.json: + for arg in largs: + _print_list(arg) + else: + _print_list_as_json(largs) + return 0 + + # handle ``pygmentize -H`` + if argns.H: + if not is_only_option('H'): + parser.print_help(sys.stderr) + return 2 + what, name = argns.H + if what not in ('lexer', 'formatter', 'filter'): + parser.print_help(sys.stderr) + return 2 + return _print_help(what, name) + + # parse -O options + parsed_opts = _parse_options(argns.O or []) + + # parse -P options + for p_opt in argns.P or []: + try: + name, value = p_opt.split('=', 1) + except ValueError: + parsed_opts[p_opt] = True + else: + parsed_opts[name] = value + + # encodings + inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding')) + outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding')) + + # handle ``pygmentize -N`` + if argns.N: + lexer = find_lexer_class_for_filename(argns.N) + if lexer is None: + lexer = TextLexer + + print(lexer.aliases[0]) + return 0 + + # handle ``pygmentize -C`` + if argns.C: + inp = sys.stdin.buffer.read() + try: + lexer = guess_lexer(inp, inencoding=inencoding) + except ClassNotFound: + lexer = TextLexer + + print(lexer.aliases[0]) + return 0 + + # handle ``pygmentize -S`` + S_opt = argns.S + a_opt = argns.a + if S_opt is not None: + f_opt = argns.f + if not f_opt: + parser.print_help(sys.stderr) + return 2 + if argns.l or argns.INPUTFILE: + parser.print_help(sys.stderr) + return 2 + + try: + parsed_opts['style'] = S_opt + fmter = get_formatter_by_name(f_opt, **parsed_opts) + except ClassNotFound as err: + print(err, file=sys.stderr) + return 1 + + print(fmter.get_style_defs(a_opt or '')) + return 0 + + # if no -S is given, -a is not allowed + if argns.a is not None: + parser.print_help(sys.stderr) + return 2 + + # parse -F options + F_opts = _parse_filters(argns.F or []) + + # -x: allow custom (eXternal) lexers and formatters + allow_custom_lexer_formatter = bool(argns.x) + + # select lexer + lexer = None + + # given by name? + lexername = argns.l + if lexername: + # custom lexer, located relative to user's cwd + if allow_custom_lexer_formatter and '.py' in lexername: + try: + filename = None + name = None + if ':' in lexername: + filename, name = lexername.rsplit(':', 1) + + if '.py' in name: + # This can happen on Windows: If the lexername is + # C:\lexer.py -- return to normal load path in that case + name = None + + if filename and name: + lexer = load_lexer_from_file(filename, name, + **parsed_opts) + else: + lexer = load_lexer_from_file(lexername, **parsed_opts) + except ClassNotFound as err: + print('Error:', err, file=sys.stderr) + return 1 + else: + try: + lexer = get_lexer_by_name(lexername, **parsed_opts) + except (OptionError, ClassNotFound) as err: + print('Error:', err, file=sys.stderr) + return 1 + + # read input code + code = None + + if argns.INPUTFILE: + if argns.s: + print('Error: -s option not usable when input file specified', + file=sys.stderr) + return 2 + + infn = argns.INPUTFILE + try: + with open(infn, 'rb') as infp: + code = infp.read() + except Exception as err: + print('Error: cannot read infile:', err, file=sys.stderr) + return 1 + if not inencoding: + code, inencoding = guess_decode(code) + + # do we have to guess the lexer? + if not lexer: + try: + lexer = get_lexer_for_filename(infn, code, **parsed_opts) + except ClassNotFound as err: + if argns.g: + try: + lexer = guess_lexer(code, **parsed_opts) + except ClassNotFound: + lexer = TextLexer(**parsed_opts) + else: + print('Error:', err, file=sys.stderr) + return 1 + except OptionError as err: + print('Error:', err, file=sys.stderr) + return 1 + + elif not argns.s: # treat stdin as full file (-s support is later) + # read code from terminal, always in binary mode since we want to + # decode ourselves and be tolerant with it + code = sys.stdin.buffer.read() # use .buffer to get a binary stream + if not inencoding: + code, inencoding = guess_decode_from_terminal(code, sys.stdin) + # else the lexer will do the decoding + if not lexer: + try: + lexer = guess_lexer(code, **parsed_opts) + except ClassNotFound: + lexer = TextLexer(**parsed_opts) + + else: # -s option needs a lexer with -l + if not lexer: + print('Error: when using -s a lexer has to be selected with -l', + file=sys.stderr) + return 2 + + # process filters + for fname, fopts in F_opts: + try: + lexer.add_filter(fname, **fopts) + except ClassNotFound as err: + print('Error:', err, file=sys.stderr) + return 1 + + # select formatter + outfn = argns.o + fmter = argns.f + if fmter: + # custom formatter, located relative to user's cwd + if allow_custom_lexer_formatter and '.py' in fmter: + try: + filename = None + name = None + if ':' in fmter: + # Same logic as above for custom lexer + filename, name = fmter.rsplit(':', 1) + + if '.py' in name: + name = None + + if filename and name: + fmter = load_formatter_from_file(filename, name, + **parsed_opts) + else: + fmter = load_formatter_from_file(fmter, **parsed_opts) + except ClassNotFound as err: + print('Error:', err, file=sys.stderr) + return 1 + else: + try: + fmter = get_formatter_by_name(fmter, **parsed_opts) + except (OptionError, ClassNotFound) as err: + print('Error:', err, file=sys.stderr) + return 1 + + if outfn: + if not fmter: + try: + fmter = get_formatter_for_filename(outfn, **parsed_opts) + except (OptionError, ClassNotFound) as err: + print('Error:', err, file=sys.stderr) + return 1 + try: + outfile = open(outfn, 'wb') + except Exception as err: + print('Error: cannot open outfile:', err, file=sys.stderr) + return 1 + else: + if not fmter: + if os.environ.get('COLORTERM','') in ('truecolor', '24bit'): + fmter = TerminalTrueColorFormatter(**parsed_opts) + elif '256' in os.environ.get('TERM', ''): + fmter = Terminal256Formatter(**parsed_opts) + else: + fmter = TerminalFormatter(**parsed_opts) + outfile = sys.stdout.buffer + + # determine output encoding if not explicitly selected + if not outencoding: + if outfn: + # output file? use lexer encoding for now (can still be None) + fmter.encoding = inencoding + else: + # else use terminal encoding + fmter.encoding = terminal_encoding(sys.stdout) + + # provide coloring under Windows, if possible + if not outfn and sys.platform in ('win32', 'cygwin') and \ + fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover + # unfortunately colorama doesn't support binary streams on Py3 + outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding) + fmter.encoding = None + try: + import colorama.initialise + except ImportError: + pass + else: + outfile = colorama.initialise.wrap_stream( + outfile, convert=None, strip=None, autoreset=False, wrap=True) + + # When using the LaTeX formatter and the option `escapeinside` is + # specified, we need a special lexer which collects escaped text + # before running the chosen language lexer. + escapeinside = parsed_opts.get('escapeinside', '') + if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter): + left = escapeinside[0] + right = escapeinside[1] + lexer = LatexEmbeddedLexer(left, right, lexer) + + # ... and do it! + if not argns.s: + # process whole input as per normal... + try: + highlight(code, lexer, fmter, outfile) + finally: + if outfn: + outfile.close() + return 0 + else: + # line by line processing of stdin (eg: for 'tail -f')... + try: + while 1: + line = sys.stdin.buffer.readline() + if not line: + break + if not inencoding: + line = guess_decode_from_terminal(line, sys.stdin)[0] + highlight(line, lexer, fmter, outfile) + if hasattr(outfile, 'flush'): + outfile.flush() + return 0 + except KeyboardInterrupt: # pragma: no cover + return 0 + finally: + if outfn: + outfile.close() + + +class HelpFormatter(argparse.HelpFormatter): + def __init__(self, prog, indent_increment=2, max_help_position=16, width=None): + if width is None: + try: + width = shutil.get_terminal_size().columns - 2 + except Exception: + pass + argparse.HelpFormatter.__init__(self, prog, indent_increment, + max_help_position, width) + + +def main(args=sys.argv): + """ + Main command line entry point. + """ + desc = "Highlight an input file and write the result to an output file." + parser = argparse.ArgumentParser(description=desc, add_help=False, + formatter_class=HelpFormatter) + + operation = parser.add_argument_group('Main operation') + lexersel = operation.add_mutually_exclusive_group() + lexersel.add_argument( + '-l', metavar='LEXER', + help='Specify the lexer to use. (Query names with -L.) If not ' + 'given and -g is not present, the lexer is guessed from the filename.') + lexersel.add_argument( + '-g', action='store_true', + help='Guess the lexer from the file contents, or pass through ' + 'as plain text if nothing can be guessed.') + operation.add_argument( + '-F', metavar='FILTER[:options]', action='append', + help='Add a filter to the token stream. (Query names with -L.) ' + 'Filter options are given after a colon if necessary.') + operation.add_argument( + '-f', metavar='FORMATTER', + help='Specify the formatter to use. (Query names with -L.) ' + 'If not given, the formatter is guessed from the output filename, ' + 'and defaults to the terminal formatter if the output is to the ' + 'terminal or an unknown file extension.') + operation.add_argument( + '-O', metavar='OPTION=value[,OPTION=value,...]', action='append', + help='Give options to the lexer and formatter as a comma-separated ' + 'list of key-value pairs. ' + 'Example: `-O bg=light,python=cool`.') + operation.add_argument( + '-P', metavar='OPTION=value', action='append', + help='Give a single option to the lexer and formatter - with this ' + 'you can pass options whose value contains commas and equal signs. ' + 'Example: `-P "heading=Pygments, the Python highlighter"`.') + operation.add_argument( + '-o', metavar='OUTPUTFILE', + help='Where to write the output. Defaults to standard output.') + + operation.add_argument( + 'INPUTFILE', nargs='?', + help='Where to read the input. Defaults to standard input.') + + flags = parser.add_argument_group('Operation flags') + flags.add_argument( + '-v', action='store_true', + help='Print a detailed traceback on unhandled exceptions, which ' + 'is useful for debugging and bug reports.') + flags.add_argument( + '-s', action='store_true', + help='Process lines one at a time until EOF, rather than waiting to ' + 'process the entire file. This only works for stdin, only for lexers ' + 'with no line-spanning constructs, and is intended for streaming ' + 'input such as you get from `tail -f`. ' + 'Example usage: `tail -f sql.log | pygmentize -s -l sql`.') + flags.add_argument( + '-x', action='store_true', + help='Allow custom lexers and formatters to be loaded from a .py file ' + 'relative to the current working directory. For example, ' + '`-l ./customlexer.py -x`. By default, this option expects a file ' + 'with a class named CustomLexer or CustomFormatter; you can also ' + 'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). ' + 'Users should be very careful not to use this option with untrusted ' + 'files, because it will import and run them.') + flags.add_argument('--json', help='Output as JSON. This can ' + 'be only used in conjunction with -L.', + default=False, + action='store_true') + + special_modes_group = parser.add_argument_group( + 'Special modes - do not do any highlighting') + special_modes = special_modes_group.add_mutually_exclusive_group() + special_modes.add_argument( + '-S', metavar='STYLE -f formatter', + help='Print style definitions for STYLE for a formatter ' + 'given with -f. The argument given by -a is formatter ' + 'dependent.') + special_modes.add_argument( + '-L', nargs='*', metavar='WHAT', + help='List lexers, formatters, styles or filters -- ' + 'give additional arguments for the thing(s) you want to list ' + '(e.g. "styles"), or omit them to list everything.') + special_modes.add_argument( + '-N', metavar='FILENAME', + help='Guess and print out a lexer name based solely on the given ' + 'filename. Does not take input or highlight anything. If no specific ' + 'lexer can be determined, "text" is printed.') + special_modes.add_argument( + '-C', action='store_true', + help='Like -N, but print out a lexer name based solely on ' + 'a given content from standard input.') + special_modes.add_argument( + '-H', action='store', nargs=2, metavar=('NAME', 'TYPE'), + help='Print detailed help for the object of type , ' + 'where is one of "lexer", "formatter" or "filter".') + special_modes.add_argument( + '-V', action='store_true', + help='Print the package version.') + special_modes.add_argument( + '-h', '--help', action='store_true', + help='Print this help.') + special_modes_group.add_argument( + '-a', metavar='ARG', + help='Formatter-specific additional argument for the -S (print ' + 'style sheet) mode.') + + argns = parser.parse_args(args[1:]) + + try: + return main_inner(parser, argns) + except BrokenPipeError: + # someone closed our stdout, e.g. by quitting a pager. + return 0 + except Exception: + if argns.v: + print(file=sys.stderr) + print('*' * 65, file=sys.stderr) + print('An unhandled exception occurred while highlighting.', + file=sys.stderr) + print('Please report the whole traceback to the issue tracker at', + file=sys.stderr) + print('.', + file=sys.stderr) + print('*' * 65, file=sys.stderr) + print(file=sys.stderr) + raise + import traceback + info = traceback.format_exception(*sys.exc_info()) + msg = info[-1].strip() + if len(info) >= 3: + # extract relevant file and position info + msg += '\n (f{})'.format(info[-2].split('\n')[0].strip()[1:]) + print(file=sys.stderr) + print('*** Error while highlighting:', file=sys.stderr) + print(msg, file=sys.stderr) + print('*** If this is a bug you want to report, please rerun with -v.', + file=sys.stderr) + return 1 diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/console.py b/Backend/venv/lib/python3.12/site-packages/pygments/console.py new file mode 100644 index 00000000..ee1ac27a --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/console.py @@ -0,0 +1,70 @@ +""" + pygments.console + ~~~~~~~~~~~~~~~~ + + Format colored console output. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +esc = "\x1b[" + +codes = {} +codes[""] = "" +codes["reset"] = esc + "39;49;00m" + +codes["bold"] = esc + "01m" +codes["faint"] = esc + "02m" +codes["standout"] = esc + "03m" +codes["underline"] = esc + "04m" +codes["blink"] = esc + "05m" +codes["overline"] = esc + "06m" + +dark_colors = ["black", "red", "green", "yellow", "blue", + "magenta", "cyan", "gray"] +light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue", + "brightmagenta", "brightcyan", "white"] + +x = 30 +for dark, light in zip(dark_colors, light_colors): + codes[dark] = esc + "%im" % x + codes[light] = esc + "%im" % (60 + x) + x += 1 + +del dark, light, x + +codes["white"] = codes["bold"] + + +def reset_color(): + return codes["reset"] + + +def colorize(color_key, text): + return codes[color_key] + text + codes["reset"] + + +def ansiformat(attr, text): + """ + Format ``text`` with a color and/or some attributes:: + + color normal color + *color* bold color + _color_ underlined color + +color+ blinking color + """ + result = [] + if attr[:1] == attr[-1:] == '+': + result.append(codes['blink']) + attr = attr[1:-1] + if attr[:1] == attr[-1:] == '*': + result.append(codes['bold']) + attr = attr[1:-1] + if attr[:1] == attr[-1:] == '_': + result.append(codes['underline']) + attr = attr[1:-1] + result.append(codes[attr]) + result.append(text) + result.append(codes['reset']) + return ''.join(result) diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/filter.py b/Backend/venv/lib/python3.12/site-packages/pygments/filter.py new file mode 100644 index 00000000..5efff438 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/filter.py @@ -0,0 +1,70 @@ +""" + pygments.filter + ~~~~~~~~~~~~~~~ + + Module that implements the default filter. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + + +def apply_filters(stream, filters, lexer=None): + """ + Use this method to apply an iterable of filters to + a stream. If lexer is given it's forwarded to the + filter, otherwise the filter receives `None`. + """ + def _apply(filter_, stream): + yield from filter_.filter(lexer, stream) + for filter_ in filters: + stream = _apply(filter_, stream) + return stream + + +def simplefilter(f): + """ + Decorator that converts a function into a filter:: + + @simplefilter + def lowercase(self, lexer, stream, options): + for ttype, value in stream: + yield ttype, value.lower() + """ + return type(f.__name__, (FunctionFilter,), { + '__module__': getattr(f, '__module__'), + '__doc__': f.__doc__, + 'function': f, + }) + + +class Filter: + """ + Default filter. Subclass this class or use the `simplefilter` + decorator to create own filters. + """ + + def __init__(self, **options): + self.options = options + + def filter(self, lexer, stream): + raise NotImplementedError() + + +class FunctionFilter(Filter): + """ + Abstract class used by `simplefilter` to create simple + function filters on the fly. The `simplefilter` decorator + automatically creates subclasses of this class for + functions passed to it. + """ + function = None + + def __init__(self, **options): + if not hasattr(self, 'function'): + raise TypeError(f'{self.__class__.__name__!r} used without bound function') + Filter.__init__(self, **options) + + def filter(self, lexer, stream): + # pylint: disable=not-callable + yield from self.function(lexer, stream, self.options) diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/filters/__init__.py b/Backend/venv/lib/python3.12/site-packages/pygments/filters/__init__.py new file mode 100644 index 00000000..2fed761a --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/filters/__init__.py @@ -0,0 +1,940 @@ +""" + pygments.filters + ~~~~~~~~~~~~~~~~ + + Module containing filter lookup functions and default + filters. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \ + string_to_tokentype +from pygments.filter import Filter +from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \ + get_choice_opt, ClassNotFound, OptionError +from pygments.plugin import find_plugin_filters + + +def find_filter_class(filtername): + """Lookup a filter by name. Return None if not found.""" + if filtername in FILTERS: + return FILTERS[filtername] + for name, cls in find_plugin_filters(): + if name == filtername: + return cls + return None + + +def get_filter_by_name(filtername, **options): + """Return an instantiated filter. + + Options are passed to the filter initializer if wanted. + Raise a ClassNotFound if not found. + """ + cls = find_filter_class(filtername) + if cls: + return cls(**options) + else: + raise ClassNotFound(f'filter {filtername!r} not found') + + +def get_all_filters(): + """Return a generator of all filter names.""" + yield from FILTERS + for name, _ in find_plugin_filters(): + yield name + + +def _replace_special(ttype, value, regex, specialttype, + replacefunc=lambda x: x): + last = 0 + for match in regex.finditer(value): + start, end = match.start(), match.end() + if start != last: + yield ttype, value[last:start] + yield specialttype, replacefunc(value[start:end]) + last = end + if last != len(value): + yield ttype, value[last:] + + +class CodeTagFilter(Filter): + """Highlight special code tags in comments and docstrings. + + Options accepted: + + `codetags` : list of strings + A list of strings that are flagged as code tags. The default is to + highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``. + + .. versionchanged:: 2.13 + Now recognizes ``FIXME`` by default. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + tags = get_list_opt(options, 'codetags', + ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE']) + self.tag_re = re.compile(r'\b({})\b'.format('|'.join([ + re.escape(tag) for tag in tags if tag + ]))) + + def filter(self, lexer, stream): + regex = self.tag_re + for ttype, value in stream: + if ttype in String.Doc or \ + ttype in Comment and \ + ttype not in Comment.Preproc: + yield from _replace_special(ttype, value, regex, Comment.Special) + else: + yield ttype, value + + +class SymbolFilter(Filter): + """Convert mathematical symbols such as \\ in Isabelle + or \\longrightarrow in LaTeX into Unicode characters. + + This is mostly useful for HTML or console output when you want to + approximate the source rendering you'd see in an IDE. + + Options accepted: + + `lang` : string + The symbol language. Must be one of ``'isabelle'`` or + ``'latex'``. The default is ``'isabelle'``. + """ + + latex_symbols = { + '\\alpha' : '\U000003b1', + '\\beta' : '\U000003b2', + '\\gamma' : '\U000003b3', + '\\delta' : '\U000003b4', + '\\varepsilon' : '\U000003b5', + '\\zeta' : '\U000003b6', + '\\eta' : '\U000003b7', + '\\vartheta' : '\U000003b8', + '\\iota' : '\U000003b9', + '\\kappa' : '\U000003ba', + '\\lambda' : '\U000003bb', + '\\mu' : '\U000003bc', + '\\nu' : '\U000003bd', + '\\xi' : '\U000003be', + '\\pi' : '\U000003c0', + '\\varrho' : '\U000003c1', + '\\sigma' : '\U000003c3', + '\\tau' : '\U000003c4', + '\\upsilon' : '\U000003c5', + '\\varphi' : '\U000003c6', + '\\chi' : '\U000003c7', + '\\psi' : '\U000003c8', + '\\omega' : '\U000003c9', + '\\Gamma' : '\U00000393', + '\\Delta' : '\U00000394', + '\\Theta' : '\U00000398', + '\\Lambda' : '\U0000039b', + '\\Xi' : '\U0000039e', + '\\Pi' : '\U000003a0', + '\\Sigma' : '\U000003a3', + '\\Upsilon' : '\U000003a5', + '\\Phi' : '\U000003a6', + '\\Psi' : '\U000003a8', + '\\Omega' : '\U000003a9', + '\\leftarrow' : '\U00002190', + '\\longleftarrow' : '\U000027f5', + '\\rightarrow' : '\U00002192', + '\\longrightarrow' : '\U000027f6', + '\\Leftarrow' : '\U000021d0', + '\\Longleftarrow' : '\U000027f8', + '\\Rightarrow' : '\U000021d2', + '\\Longrightarrow' : '\U000027f9', + '\\leftrightarrow' : '\U00002194', + '\\longleftrightarrow' : '\U000027f7', + '\\Leftrightarrow' : '\U000021d4', + '\\Longleftrightarrow' : '\U000027fa', + '\\mapsto' : '\U000021a6', + '\\longmapsto' : '\U000027fc', + '\\relbar' : '\U00002500', + '\\Relbar' : '\U00002550', + '\\hookleftarrow' : '\U000021a9', + '\\hookrightarrow' : '\U000021aa', + '\\leftharpoondown' : '\U000021bd', + '\\rightharpoondown' : '\U000021c1', + '\\leftharpoonup' : '\U000021bc', + '\\rightharpoonup' : '\U000021c0', + '\\rightleftharpoons' : '\U000021cc', + '\\leadsto' : '\U0000219d', + '\\downharpoonleft' : '\U000021c3', + '\\downharpoonright' : '\U000021c2', + '\\upharpoonleft' : '\U000021bf', + '\\upharpoonright' : '\U000021be', + '\\restriction' : '\U000021be', + '\\uparrow' : '\U00002191', + '\\Uparrow' : '\U000021d1', + '\\downarrow' : '\U00002193', + '\\Downarrow' : '\U000021d3', + '\\updownarrow' : '\U00002195', + '\\Updownarrow' : '\U000021d5', + '\\langle' : '\U000027e8', + '\\rangle' : '\U000027e9', + '\\lceil' : '\U00002308', + '\\rceil' : '\U00002309', + '\\lfloor' : '\U0000230a', + '\\rfloor' : '\U0000230b', + '\\flqq' : '\U000000ab', + '\\frqq' : '\U000000bb', + '\\bot' : '\U000022a5', + '\\top' : '\U000022a4', + '\\wedge' : '\U00002227', + '\\bigwedge' : '\U000022c0', + '\\vee' : '\U00002228', + '\\bigvee' : '\U000022c1', + '\\forall' : '\U00002200', + '\\exists' : '\U00002203', + '\\nexists' : '\U00002204', + '\\neg' : '\U000000ac', + '\\Box' : '\U000025a1', + '\\Diamond' : '\U000025c7', + '\\vdash' : '\U000022a2', + '\\models' : '\U000022a8', + '\\dashv' : '\U000022a3', + '\\surd' : '\U0000221a', + '\\le' : '\U00002264', + '\\ge' : '\U00002265', + '\\ll' : '\U0000226a', + '\\gg' : '\U0000226b', + '\\lesssim' : '\U00002272', + '\\gtrsim' : '\U00002273', + '\\lessapprox' : '\U00002a85', + '\\gtrapprox' : '\U00002a86', + '\\in' : '\U00002208', + '\\notin' : '\U00002209', + '\\subset' : '\U00002282', + '\\supset' : '\U00002283', + '\\subseteq' : '\U00002286', + '\\supseteq' : '\U00002287', + '\\sqsubset' : '\U0000228f', + '\\sqsupset' : '\U00002290', + '\\sqsubseteq' : '\U00002291', + '\\sqsupseteq' : '\U00002292', + '\\cap' : '\U00002229', + '\\bigcap' : '\U000022c2', + '\\cup' : '\U0000222a', + '\\bigcup' : '\U000022c3', + '\\sqcup' : '\U00002294', + '\\bigsqcup' : '\U00002a06', + '\\sqcap' : '\U00002293', + '\\Bigsqcap' : '\U00002a05', + '\\setminus' : '\U00002216', + '\\propto' : '\U0000221d', + '\\uplus' : '\U0000228e', + '\\bigplus' : '\U00002a04', + '\\sim' : '\U0000223c', + '\\doteq' : '\U00002250', + '\\simeq' : '\U00002243', + '\\approx' : '\U00002248', + '\\asymp' : '\U0000224d', + '\\cong' : '\U00002245', + '\\equiv' : '\U00002261', + '\\Join' : '\U000022c8', + '\\bowtie' : '\U00002a1d', + '\\prec' : '\U0000227a', + '\\succ' : '\U0000227b', + '\\preceq' : '\U0000227c', + '\\succeq' : '\U0000227d', + '\\parallel' : '\U00002225', + '\\mid' : '\U000000a6', + '\\pm' : '\U000000b1', + '\\mp' : '\U00002213', + '\\times' : '\U000000d7', + '\\div' : '\U000000f7', + '\\cdot' : '\U000022c5', + '\\star' : '\U000022c6', + '\\circ' : '\U00002218', + '\\dagger' : '\U00002020', + '\\ddagger' : '\U00002021', + '\\lhd' : '\U000022b2', + '\\rhd' : '\U000022b3', + '\\unlhd' : '\U000022b4', + '\\unrhd' : '\U000022b5', + '\\triangleleft' : '\U000025c3', + '\\triangleright' : '\U000025b9', + '\\triangle' : '\U000025b3', + '\\triangleq' : '\U0000225c', + '\\oplus' : '\U00002295', + '\\bigoplus' : '\U00002a01', + '\\otimes' : '\U00002297', + '\\bigotimes' : '\U00002a02', + '\\odot' : '\U00002299', + '\\bigodot' : '\U00002a00', + '\\ominus' : '\U00002296', + '\\oslash' : '\U00002298', + '\\dots' : '\U00002026', + '\\cdots' : '\U000022ef', + '\\sum' : '\U00002211', + '\\prod' : '\U0000220f', + '\\coprod' : '\U00002210', + '\\infty' : '\U0000221e', + '\\int' : '\U0000222b', + '\\oint' : '\U0000222e', + '\\clubsuit' : '\U00002663', + '\\diamondsuit' : '\U00002662', + '\\heartsuit' : '\U00002661', + '\\spadesuit' : '\U00002660', + '\\aleph' : '\U00002135', + '\\emptyset' : '\U00002205', + '\\nabla' : '\U00002207', + '\\partial' : '\U00002202', + '\\flat' : '\U0000266d', + '\\natural' : '\U0000266e', + '\\sharp' : '\U0000266f', + '\\angle' : '\U00002220', + '\\copyright' : '\U000000a9', + '\\textregistered' : '\U000000ae', + '\\textonequarter' : '\U000000bc', + '\\textonehalf' : '\U000000bd', + '\\textthreequarters' : '\U000000be', + '\\textordfeminine' : '\U000000aa', + '\\textordmasculine' : '\U000000ba', + '\\euro' : '\U000020ac', + '\\pounds' : '\U000000a3', + '\\yen' : '\U000000a5', + '\\textcent' : '\U000000a2', + '\\textcurrency' : '\U000000a4', + '\\textdegree' : '\U000000b0', + } + + isabelle_symbols = { + '\\' : '\U0001d7ec', + '\\' : '\U0001d7ed', + '\\' : '\U0001d7ee', + '\\' : '\U0001d7ef', + '\\' : '\U0001d7f0', + '\\' : '\U0001d7f1', + '\\' : '\U0001d7f2', + '\\' : '\U0001d7f3', + '\\' : '\U0001d7f4', + '\\' : '\U0001d7f5', + '\\' : '\U0001d49c', + '\\' : '\U0000212c', + '\\' : '\U0001d49e', + '\\' : '\U0001d49f', + '\\' : '\U00002130', + '\\' : '\U00002131', + '\\' : '\U0001d4a2', + '\\' : '\U0000210b', + '\\' : '\U00002110', + '\\' : '\U0001d4a5', + '\\' : '\U0001d4a6', + '\\' : '\U00002112', + '\\' : '\U00002133', + '\\' : '\U0001d4a9', + '\\' : '\U0001d4aa', + '\\

' : '\U0001d5c9', + '\\' : '\U0001d5ca', + '\\' : '\U0001d5cb', + '\\' : '\U0001d5cc', + '\\' : '\U0001d5cd', + '\\' : '\U0001d5ce', + '\\' : '\U0001d5cf', + '\\' : '\U0001d5d0', + '\\' : '\U0001d5d1', + '\\' : '\U0001d5d2', + '\\' : '\U0001d5d3', + '\\' : '\U0001d504', + '\\' : '\U0001d505', + '\\' : '\U0000212d', + '\\

' : '\U0001d507', + '\\' : '\U0001d508', + '\\' : '\U0001d509', + '\\' : '\U0001d50a', + '\\' : '\U0000210c', + '\\' : '\U00002111', + '\\' : '\U0001d50d', + '\\' : '\U0001d50e', + '\\' : '\U0001d50f', + '\\' : '\U0001d510', + '\\' : '\U0001d511', + '\\' : '\U0001d512', + '\\' : '\U0001d513', + '\\' : '\U0001d514', + '\\' : '\U0000211c', + '\\' : '\U0001d516', + '\\' : '\U0001d517', + '\\' : '\U0001d518', + '\\' : '\U0001d519', + '\\' : '\U0001d51a', + '\\' : '\U0001d51b', + '\\' : '\U0001d51c', + '\\' : '\U00002128', + '\\' : '\U0001d51e', + '\\' : '\U0001d51f', + '\\' : '\U0001d520', + '\\
' : '\U0001d521', + '\\' : '\U0001d522', + '\\' : '\U0001d523', + '\\' : '\U0001d524', + '\\' : '\U0001d525', + '\\' : '\U0001d526', + '\\' : '\U0001d527', + '\\' : '\U0001d528', + '\\' : '\U0001d529', + '\\' : '\U0001d52a', + '\\' : '\U0001d52b', + '\\' : '\U0001d52c', + '\\' : '\U0001d52d', + '\\' : '\U0001d52e', + '\\' : '\U0001d52f', + '\\' : '\U0001d530', + '\\' : '\U0001d531', + '\\' : '\U0001d532', + '\\' : '\U0001d533', + '\\' : '\U0001d534', + '\\' : '\U0001d535', + '\\' : '\U0001d536', + '\\' : '\U0001d537', + '\\' : '\U000003b1', + '\\' : '\U000003b2', + '\\' : '\U000003b3', + '\\' : '\U000003b4', + '\\' : '\U000003b5', + '\\' : '\U000003b6', + '\\' : '\U000003b7', + '\\' : '\U000003b8', + '\\' : '\U000003b9', + '\\' : '\U000003ba', + '\\' : '\U000003bb', + '\\' : '\U000003bc', + '\\' : '\U000003bd', + '\\' : '\U000003be', + '\\' : '\U000003c0', + '\\' : '\U000003c1', + '\\' : '\U000003c3', + '\\' : '\U000003c4', + '\\' : '\U000003c5', + '\\' : '\U000003c6', + '\\' : '\U000003c7', + '\\' : '\U000003c8', + '\\' : '\U000003c9', + '\\' : '\U00000393', + '\\' : '\U00000394', + '\\' : '\U00000398', + '\\' : '\U0000039b', + '\\' : '\U0000039e', + '\\' : '\U000003a0', + '\\' : '\U000003a3', + '\\' : '\U000003a5', + '\\' : '\U000003a6', + '\\' : '\U000003a8', + '\\' : '\U000003a9', + '\\' : '\U0001d539', + '\\' : '\U00002102', + '\\' : '\U00002115', + '\\' : '\U0000211a', + '\\' : '\U0000211d', + '\\' : '\U00002124', + '\\' : '\U00002190', + '\\' : '\U000027f5', + '\\' : '\U00002192', + '\\' : '\U000027f6', + '\\' : '\U000021d0', + '\\' : '\U000027f8', + '\\' : '\U000021d2', + '\\' : '\U000027f9', + '\\' : '\U00002194', + '\\' : '\U000027f7', + '\\' : '\U000021d4', + '\\' : '\U000027fa', + '\\' : '\U000021a6', + '\\' : '\U000027fc', + '\\' : '\U00002500', + '\\' : '\U00002550', + '\\' : '\U000021a9', + '\\' : '\U000021aa', + '\\' : '\U000021bd', + '\\' : '\U000021c1', + '\\' : '\U000021bc', + '\\' : '\U000021c0', + '\\' : '\U000021cc', + '\\' : '\U0000219d', + '\\' : '\U000021c3', + '\\' : '\U000021c2', + '\\' : '\U000021bf', + '\\' : '\U000021be', + '\\' : '\U000021be', + '\\' : '\U00002237', + '\\' : '\U00002191', + '\\' : '\U000021d1', + '\\' : '\U00002193', + '\\' : '\U000021d3', + '\\' : '\U00002195', + '\\' : '\U000021d5', + '\\' : '\U000027e8', + '\\' : '\U000027e9', + '\\' : '\U00002308', + '\\' : '\U00002309', + '\\' : '\U0000230a', + '\\' : '\U0000230b', + '\\' : '\U00002987', + '\\' : '\U00002988', + '\\' : '\U000027e6', + '\\' : '\U000027e7', + '\\' : '\U00002983', + '\\' : '\U00002984', + '\\' : '\U000000ab', + '\\' : '\U000000bb', + '\\' : '\U000022a5', + '\\' : '\U000022a4', + '\\' : '\U00002227', + '\\' : '\U000022c0', + '\\' : '\U00002228', + '\\' : '\U000022c1', + '\\' : '\U00002200', + '\\' : '\U00002203', + '\\' : '\U00002204', + '\\' : '\U000000ac', + '\\' : '\U000025a1', + '\\' : '\U000025c7', + '\\' : '\U000022a2', + '\\' : '\U000022a8', + '\\' : '\U000022a9', + '\\' : '\U000022ab', + '\\' : '\U000022a3', + '\\' : '\U0000221a', + '\\' : '\U00002264', + '\\' : '\U00002265', + '\\' : '\U0000226a', + '\\' : '\U0000226b', + '\\' : '\U00002272', + '\\' : '\U00002273', + '\\' : '\U00002a85', + '\\' : '\U00002a86', + '\\' : '\U00002208', + '\\' : '\U00002209', + '\\' : '\U00002282', + '\\' : '\U00002283', + '\\' : '\U00002286', + '\\' : '\U00002287', + '\\' : '\U0000228f', + '\\' : '\U00002290', + '\\' : '\U00002291', + '\\' : '\U00002292', + '\\' : '\U00002229', + '\\' : '\U000022c2', + '\\' : '\U0000222a', + '\\' : '\U000022c3', + '\\' : '\U00002294', + '\\' : '\U00002a06', + '\\' : '\U00002293', + '\\' : '\U00002a05', + '\\' : '\U00002216', + '\\' : '\U0000221d', + '\\' : '\U0000228e', + '\\' : '\U00002a04', + '\\' : '\U00002260', + '\\' : '\U0000223c', + '\\' : '\U00002250', + '\\' : '\U00002243', + '\\' : '\U00002248', + '\\' : '\U0000224d', + '\\' : '\U00002245', + '\\' : '\U00002323', + '\\' : '\U00002261', + '\\' : '\U00002322', + '\\' : '\U000022c8', + '\\' : '\U00002a1d', + '\\' : '\U0000227a', + '\\' : '\U0000227b', + '\\' : '\U0000227c', + '\\' : '\U0000227d', + '\\' : '\U00002225', + '\\' : '\U000000a6', + '\\' : '\U000000b1', + '\\' : '\U00002213', + '\\' : '\U000000d7', + '\\
' : '\U000000f7', + '\\' : '\U000022c5', + '\\' : '\U000022c6', + '\\' : '\U00002219', + '\\' : '\U00002218', + '\\' : '\U00002020', + '\\' : '\U00002021', + '\\' : '\U000022b2', + '\\' : '\U000022b3', + '\\' : '\U000022b4', + '\\' : '\U000022b5', + '\\' : '\U000025c3', + '\\' : '\U000025b9', + '\\' : '\U000025b3', + '\\' : '\U0000225c', + '\\' : '\U00002295', + '\\' : '\U00002a01', + '\\' : '\U00002297', + '\\' : '\U00002a02', + '\\' : '\U00002299', + '\\' : '\U00002a00', + '\\' : '\U00002296', + '\\' : '\U00002298', + '\\' : '\U00002026', + '\\' : '\U000022ef', + '\\' : '\U00002211', + '\\' : '\U0000220f', + '\\' : '\U00002210', + '\\' : '\U0000221e', + '\\' : '\U0000222b', + '\\' : '\U0000222e', + '\\' : '\U00002663', + '\\' : '\U00002662', + '\\' : '\U00002661', + '\\' : '\U00002660', + '\\' : '\U00002135', + '\\' : '\U00002205', + '\\' : '\U00002207', + '\\' : '\U00002202', + '\\' : '\U0000266d', + '\\' : '\U0000266e', + '\\' : '\U0000266f', + '\\' : '\U00002220', + '\\' : '\U000000a9', + '\\' : '\U000000ae', + '\\' : '\U000000ad', + '\\' : '\U000000af', + '\\' : '\U000000bc', + '\\' : '\U000000bd', + '\\' : '\U000000be', + '\\' : '\U000000aa', + '\\' : '\U000000ba', + '\\
' : '\U000000a7', + '\\' : '\U000000b6', + '\\' : '\U000000a1', + '\\' : '\U000000bf', + '\\' : '\U000020ac', + '\\' : '\U000000a3', + '\\' : '\U000000a5', + '\\' : '\U000000a2', + '\\' : '\U000000a4', + '\\' : '\U000000b0', + '\\' : '\U00002a3f', + '\\' : '\U00002127', + '\\' : '\U000025ca', + '\\' : '\U00002118', + '\\' : '\U00002240', + '\\' : '\U000022c4', + '\\' : '\U000000b4', + '\\' : '\U00000131', + '\\' : '\U000000a8', + '\\' : '\U000000b8', + '\\' : '\U000002dd', + '\\' : '\U000003f5', + '\\' : '\U000023ce', + '\\' : '\U00002039', + '\\' : '\U0000203a', + '\\' : '\U00002302', + '\\<^sub>' : '\U000021e9', + '\\<^sup>' : '\U000021e7', + '\\<^bold>' : '\U00002759', + '\\<^bsub>' : '\U000021d8', + '\\<^esub>' : '\U000021d9', + '\\<^bsup>' : '\U000021d7', + '\\<^esup>' : '\U000021d6', + } + + lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols} + + def __init__(self, **options): + Filter.__init__(self, **options) + lang = get_choice_opt(options, 'lang', + ['isabelle', 'latex'], 'isabelle') + self.symbols = self.lang_map[lang] + + def filter(self, lexer, stream): + for ttype, value in stream: + if value in self.symbols: + yield ttype, self.symbols[value] + else: + yield ttype, value + + +class KeywordCaseFilter(Filter): + """Convert keywords to lowercase or uppercase or capitalize them, which + means first letter uppercase, rest lowercase. + + This can be useful e.g. if you highlight Pascal code and want to adapt the + code to your styleguide. + + Options accepted: + + `case` : string + The casing to convert keywords to. Must be one of ``'lower'``, + ``'upper'`` or ``'capitalize'``. The default is ``'lower'``. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + case = get_choice_opt(options, 'case', + ['lower', 'upper', 'capitalize'], 'lower') + self.convert = getattr(str, case) + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Keyword: + yield ttype, self.convert(value) + else: + yield ttype, value + + +class NameHighlightFilter(Filter): + """Highlight a normal Name (and Name.*) token with a different token type. + + Example:: + + filter = NameHighlightFilter( + names=['foo', 'bar', 'baz'], + tokentype=Name.Function, + ) + + This would highlight the names "foo", "bar" and "baz" + as functions. `Name.Function` is the default token type. + + Options accepted: + + `names` : list of strings + A list of names that should be given the different token type. + There is no default. + `tokentype` : TokenType or string + A token type or a string containing a token type name that is + used for highlighting the strings in `names`. The default is + `Name.Function`. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.names = set(get_list_opt(options, 'names', [])) + tokentype = options.get('tokentype') + if tokentype: + self.tokentype = string_to_tokentype(tokentype) + else: + self.tokentype = Name.Function + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Name and value in self.names: + yield self.tokentype, value + else: + yield ttype, value + + +class ErrorToken(Exception): + pass + + +class RaiseOnErrorTokenFilter(Filter): + """Raise an exception when the lexer generates an error token. + + Options accepted: + + `excclass` : Exception class + The exception class to raise. + The default is `pygments.filters.ErrorToken`. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.exception = options.get('excclass', ErrorToken) + try: + # issubclass() will raise TypeError if first argument is not a class + if not issubclass(self.exception, Exception): + raise TypeError + except TypeError: + raise OptionError('excclass option is not an exception class') + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype is Error: + raise self.exception(value) + yield ttype, value + + +class VisibleWhitespaceFilter(Filter): + """Convert tabs, newlines and/or spaces to visible characters. + + Options accepted: + + `spaces` : string or bool + If this is a one-character string, spaces will be replaces by this string. + If it is another true value, spaces will be replaced by ``·`` (unicode + MIDDLE DOT). If it is a false value, spaces will not be replaced. The + default is ``False``. + `tabs` : string or bool + The same as for `spaces`, but the default replacement character is ``»`` + (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value + is ``False``. Note: this will not work if the `tabsize` option for the + lexer is nonzero, as tabs will already have been expanded then. + `tabsize` : int + If tabs are to be replaced by this filter (see the `tabs` option), this + is the total number of characters that a tab should be expanded to. + The default is ``8``. + `newlines` : string or bool + The same as for `spaces`, but the default replacement character is ``¶`` + (unicode PILCROW SIGN). The default value is ``False``. + `wstokentype` : bool + If true, give whitespace the special `Whitespace` token type. This allows + styling the visible whitespace differently (e.g. greyed out), but it can + disrupt background colors. The default is ``True``. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + for name, default in [('spaces', '·'), + ('tabs', '»'), + ('newlines', '¶')]: + opt = options.get(name, False) + if isinstance(opt, str) and len(opt) == 1: + setattr(self, name, opt) + else: + setattr(self, name, (opt and default or '')) + tabsize = get_int_opt(options, 'tabsize', 8) + if self.tabs: + self.tabs += ' ' * (tabsize - 1) + if self.newlines: + self.newlines += '\n' + self.wstt = get_bool_opt(options, 'wstokentype', True) + + def filter(self, lexer, stream): + if self.wstt: + spaces = self.spaces or ' ' + tabs = self.tabs or '\t' + newlines = self.newlines or '\n' + regex = re.compile(r'\s') + + def replacefunc(wschar): + if wschar == ' ': + return spaces + elif wschar == '\t': + return tabs + elif wschar == '\n': + return newlines + return wschar + + for ttype, value in stream: + yield from _replace_special(ttype, value, regex, Whitespace, + replacefunc) + else: + spaces, tabs, newlines = self.spaces, self.tabs, self.newlines + # simpler processing + for ttype, value in stream: + if spaces: + value = value.replace(' ', spaces) + if tabs: + value = value.replace('\t', tabs) + if newlines: + value = value.replace('\n', newlines) + yield ttype, value + + +class GobbleFilter(Filter): + """Gobbles source code lines (eats initial characters). + + This filter drops the first ``n`` characters off every line of code. This + may be useful when the source code fed to the lexer is indented by a fixed + amount of space that isn't desired in the output. + + Options accepted: + + `n` : int + The number of characters to gobble. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + self.n = get_int_opt(options, 'n', 0) + + def gobble(self, value, left): + if left < len(value): + return value[left:], 0 + else: + return '', left - len(value) + + def filter(self, lexer, stream): + n = self.n + left = n # How many characters left to gobble. + for ttype, value in stream: + # Remove ``left`` tokens from first line, ``n`` from all others. + parts = value.split('\n') + (parts[0], left) = self.gobble(parts[0], left) + for i in range(1, len(parts)): + (parts[i], left) = self.gobble(parts[i], n) + value = '\n'.join(parts) + + if value != '': + yield ttype, value + + +class TokenMergeFilter(Filter): + """Merges consecutive tokens with the same token type in the output + stream of a lexer. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + + def filter(self, lexer, stream): + current_type = None + current_value = None + for ttype, value in stream: + if ttype is current_type: + current_value += value + else: + if current_type is not None: + yield current_type, current_value + current_type = ttype + current_value = value + if current_type is not None: + yield current_type, current_value + + +FILTERS = { + 'codetagify': CodeTagFilter, + 'keywordcase': KeywordCaseFilter, + 'highlight': NameHighlightFilter, + 'raiseonerror': RaiseOnErrorTokenFilter, + 'whitespace': VisibleWhitespaceFilter, + 'gobble': GobbleFilter, + 'tokenmerge': TokenMergeFilter, + 'symbols': SymbolFilter, +} diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/filters/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/filters/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..a6864bf1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/filters/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatter.py b/Backend/venv/lib/python3.12/site-packages/pygments/formatter.py new file mode 100644 index 00000000..a20d3039 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/formatter.py @@ -0,0 +1,129 @@ +""" + pygments.formatter + ~~~~~~~~~~~~~~~~~~ + + Base formatter class. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import codecs + +from pygments.util import get_bool_opt +from pygments.styles import get_style_by_name + +__all__ = ['Formatter'] + + +def _lookup_style(style): + if isinstance(style, str): + return get_style_by_name(style) + return style + + +class Formatter: + """ + Converts a token stream to text. + + Formatters should have attributes to help selecting them. These + are similar to the corresponding :class:`~pygments.lexer.Lexer` + attributes. + + .. autoattribute:: name + :no-value: + + .. autoattribute:: aliases + :no-value: + + .. autoattribute:: filenames + :no-value: + + You can pass options as keyword arguments to the constructor. + All formatters accept these basic options: + + ``style`` + The style to use, can be a string or a Style subclass + (default: "default"). Not used by e.g. the + TerminalFormatter. + ``full`` + Tells the formatter to output a "full" document, i.e. + a complete self-contained document. This doesn't have + any effect for some formatters (default: false). + ``title`` + If ``full`` is true, the title that should be used to + caption the document (default: ''). + ``encoding`` + If given, must be an encoding name. This will be used to + convert the Unicode token strings to byte strings in the + output. If it is "" or None, Unicode strings will be written + to the output file, which most file-like objects do not + support (default: None). + ``outencoding`` + Overrides ``encoding`` if given. + + """ + + #: Full name for the formatter, in human-readable form. + name = None + + #: A list of short, unique identifiers that can be used to lookup + #: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. + aliases = [] + + #: A list of fnmatch patterns that match filenames for which this + #: formatter can produce output. The patterns in this list should be unique + #: among all formatters. + filenames = [] + + #: If True, this formatter outputs Unicode strings when no encoding + #: option is given. + unicodeoutput = True + + def __init__(self, **options): + """ + As with lexers, this constructor takes arbitrary optional arguments, + and if you override it, you should first process your own options, then + call the base class implementation. + """ + self.style = _lookup_style(options.get('style', 'default')) + self.full = get_bool_opt(options, 'full', False) + self.title = options.get('title', '') + self.encoding = options.get('encoding', None) or None + if self.encoding in ('guess', 'chardet'): + # can happen for e.g. pygmentize -O encoding=guess + self.encoding = 'utf-8' + self.encoding = options.get('outencoding') or self.encoding + self.options = options + + def get_style_defs(self, arg=''): + """ + This method must return statements or declarations suitable to define + the current style for subsequent highlighted text (e.g. CSS classes + in the `HTMLFormatter`). + + The optional argument `arg` can be used to modify the generation and + is formatter dependent (it is standardized because it can be given on + the command line). + + This method is called by the ``-S`` :doc:`command-line option `, + the `arg` is then given by the ``-a`` option. + """ + return '' + + def format(self, tokensource, outfile): + """ + This method must format the tokens from the `tokensource` iterable and + write the formatted version to the file object `outfile`. + + Formatter options can control how exactly the tokens are converted. + """ + if self.encoding: + # wrap the outfile in a StreamWriter + outfile = codecs.lookup(self.encoding)[3](outfile) + return self.format_unencoded(tokensource, outfile) + + # Allow writing Formatter[str] or Formatter[bytes]. That's equivalent to + # Formatter. This helps when using third-party type stubs from typeshed. + def __class_getitem__(cls, name): + return cls diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__init__.py b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__init__.py new file mode 100644 index 00000000..b24931cd --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__init__.py @@ -0,0 +1,157 @@ +""" + pygments.formatters + ~~~~~~~~~~~~~~~~~~~ + + Pygments formatters. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +import sys +import types +import fnmatch +from os.path import basename + +from pygments.formatters._mapping import FORMATTERS +from pygments.plugin import find_plugin_formatters +from pygments.util import ClassNotFound + +__all__ = ['get_formatter_by_name', 'get_formatter_for_filename', + 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) + +_formatter_cache = {} # classes by name +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + + +def _load_formatters(module_name): + """Load a formatter (and all others in the module too).""" + mod = __import__(module_name, None, None, ['__all__']) + for formatter_name in mod.__all__: + cls = getattr(mod, formatter_name) + _formatter_cache[cls.name] = cls + + +def get_all_formatters(): + """Return a generator for all formatter classes.""" + # NB: this returns formatter classes, not info like get_all_lexers(). + for info in FORMATTERS.values(): + if info[1] not in _formatter_cache: + _load_formatters(info[0]) + yield _formatter_cache[info[1]] + for _, formatter in find_plugin_formatters(): + yield formatter + + +def find_formatter_class(alias): + """Lookup a formatter by alias. + + Returns None if not found. + """ + for module_name, name, aliases, _, _ in FORMATTERS.values(): + if alias in aliases: + if name not in _formatter_cache: + _load_formatters(module_name) + return _formatter_cache[name] + for _, cls in find_plugin_formatters(): + if alias in cls.aliases: + return cls + + +def get_formatter_by_name(_alias, **options): + """ + Return an instance of a :class:`.Formatter` subclass that has `alias` in its + aliases list. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that + alias is found. + """ + cls = find_formatter_class(_alias) + if cls is None: + raise ClassNotFound(f"no formatter found for name {_alias!r}") + return cls(**options) + + +def load_formatter_from_file(filename, formattername="CustomFormatter", **options): + """ + Return a `Formatter` subclass instance loaded from the provided file, relative + to the current directory. + + The file is expected to contain a Formatter class named ``formattername`` + (by default, CustomFormatter). Users should be very careful with the input, because + this method is equivalent to running ``eval()`` on the input file. The formatter is + given the `options` at its instantiation. + + :exc:`pygments.util.ClassNotFound` is raised if there are any errors loading + the formatter. + + .. versionadded:: 2.2 + """ + try: + # This empty dict will contain the namespace for the exec'd file + custom_namespace = {} + with open(filename, 'rb') as f: + exec(f.read(), custom_namespace) + # Retrieve the class `formattername` from that namespace + if formattername not in custom_namespace: + raise ClassNotFound(f'no valid {formattername} class found in {filename}') + formatter_class = custom_namespace[formattername] + # And finally instantiate it with the options + return formatter_class(**options) + except OSError as err: + raise ClassNotFound(f'cannot read {filename}: {err}') + except ClassNotFound: + raise + except Exception as err: + raise ClassNotFound(f'error when loading custom formatter: {err}') + + +def get_formatter_for_filename(fn, **options): + """ + Return a :class:`.Formatter` subclass instance that has a filename pattern + matching `fn`. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename + is found. + """ + fn = basename(fn) + for modname, name, _, filenames, _ in FORMATTERS.values(): + for filename in filenames: + if _fn_matches(fn, filename): + if name not in _formatter_cache: + _load_formatters(modname) + return _formatter_cache[name](**options) + for _name, cls in find_plugin_formatters(): + for filename in cls.filenames: + if _fn_matches(fn, filename): + return cls(**options) + raise ClassNotFound(f"no formatter found for file name {fn!r}") + + +class _automodule(types.ModuleType): + """Automatically import formatters.""" + + def __getattr__(self, name): + info = FORMATTERS.get(name) + if info: + _load_formatters(info[0]) + cls = _formatter_cache[info[1]] + setattr(self, name, cls) + return cls + raise AttributeError(name) + + +oldmod = sys.modules[__name__] +newmod = _automodule(__name__) +newmod.__dict__.update(oldmod.__dict__) +sys.modules[__name__] = newmod +del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..50178757 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/_mapping.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/_mapping.cpython-312.pyc new file mode 100644 index 00000000..ef792eb3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/_mapping.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/bbcode.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/bbcode.cpython-312.pyc new file mode 100644 index 00000000..b88976d7 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/bbcode.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/groff.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/groff.cpython-312.pyc new file mode 100644 index 00000000..29cb0632 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/groff.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/html.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/html.cpython-312.pyc new file mode 100644 index 00000000..f6e18bc0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/html.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/img.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/img.cpython-312.pyc new file mode 100644 index 00000000..79b661be Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/img.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/irc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/irc.cpython-312.pyc new file mode 100644 index 00000000..d80740d5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/irc.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/latex.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/latex.cpython-312.pyc new file mode 100644 index 00000000..6cf2d3ad Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/latex.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/other.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/other.cpython-312.pyc new file mode 100644 index 00000000..1f7ee4b8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/other.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc new file mode 100644 index 00000000..4486c54e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/rtf.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/rtf.cpython-312.pyc new file mode 100644 index 00000000..bf9d128d Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/rtf.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/svg.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/svg.cpython-312.pyc new file mode 100644 index 00000000..4c836b0c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/svg.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/terminal.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/terminal.cpython-312.pyc new file mode 100644 index 00000000..f174d1d1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/terminal.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/terminal256.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/terminal256.cpython-312.pyc new file mode 100644 index 00000000..cd1f881f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/terminal256.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/_mapping.py b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/_mapping.py new file mode 100644 index 00000000..72ca8404 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/_mapping.py @@ -0,0 +1,23 @@ +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. + +FORMATTERS = { + 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), + 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'), + 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags. By default, the content is enclosed in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). The ``
``'s CSS class can be set by the `cssclass` option."), + 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'), + 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'), + 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'), + 'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'), + 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'), + 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'), + 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ```` element with explicit ``x`` and ``y`` coordinates containing ```` elements with the individual token styles.'), + 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.'), +} diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/bbcode.py b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/bbcode.py new file mode 100644 index 00000000..339edf9d --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/bbcode.py @@ -0,0 +1,108 @@ +""" + pygments.formatters.bbcode + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + BBcode formatter. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + + +from pygments.formatter import Formatter +from pygments.util import get_bool_opt + +__all__ = ['BBCodeFormatter'] + + +class BBCodeFormatter(Formatter): + """ + Format tokens with BBcodes. These formatting codes are used by many + bulletin boards, so you can highlight your sourcecode with pygments before + posting it there. + + This formatter has no support for background colors and borders, as there + are no common BBcode tags for that. + + Some board systems (e.g. phpBB) don't support colors in their [code] tag, + so you can't use the highlighting together with that tag. + Text in a [code] tag usually is shown with a monospace font (which this + formatter can do with the ``monofont`` option) and no spaces (which you + need for indentation) are removed. + + Additional options accepted: + + `style` + The style to use, can be a string or a Style subclass (default: + ``'default'``). + + `codetag` + If set to true, put the output into ``[code]`` tags (default: + ``false``) + + `monofont` + If set to true, add a tag to show the code with a monospace font + (default: ``false``). + """ + name = 'BBCode' + aliases = ['bbcode', 'bb'] + filenames = [] + + def __init__(self, **options): + Formatter.__init__(self, **options) + self._code = get_bool_opt(options, 'codetag', False) + self._mono = get_bool_opt(options, 'monofont', False) + + self.styles = {} + self._make_styles() + + def _make_styles(self): + for ttype, ndef in self.style: + start = end = '' + if ndef['color']: + start += '[color=#{}]'.format(ndef['color']) + end = '[/color]' + end + if ndef['bold']: + start += '[b]' + end = '[/b]' + end + if ndef['italic']: + start += '[i]' + end = '[/i]' + end + if ndef['underline']: + start += '[u]' + end = '[/u]' + end + # there are no common BBcodes for background-color and border + + self.styles[ttype] = start, end + + def format_unencoded(self, tokensource, outfile): + if self._code: + outfile.write('[code]') + if self._mono: + outfile.write('[font=monospace]') + + lastval = '' + lasttype = None + + for ttype, value in tokensource: + while ttype not in self.styles: + ttype = ttype.parent + if ttype == lasttype: + lastval += value + else: + if lastval: + start, end = self.styles[lasttype] + outfile.write(''.join((start, lastval, end))) + lastval = value + lasttype = ttype + + if lastval: + start, end = self.styles[lasttype] + outfile.write(''.join((start, lastval, end))) + + if self._mono: + outfile.write('[/font]') + if self._code: + outfile.write('[/code]') + if self._code or self._mono: + outfile.write('\n') diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/groff.py b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/groff.py new file mode 100644 index 00000000..028fec4e --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/groff.py @@ -0,0 +1,170 @@ +""" + pygments.formatters.groff + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Formatter for groff output. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import math +from pygments.formatter import Formatter +from pygments.util import get_bool_opt, get_int_opt + +__all__ = ['GroffFormatter'] + + +class GroffFormatter(Formatter): + """ + Format tokens with groff escapes to change their color and font style. + + .. versionadded:: 2.11 + + Additional options accepted: + + `style` + The style to use, can be a string or a Style subclass (default: + ``'default'``). + + `monospaced` + If set to true, monospace font will be used (default: ``true``). + + `linenos` + If set to true, print the line numbers (default: ``false``). + + `wrap` + Wrap lines to the specified number of characters. Disabled if set to 0 + (default: ``0``). + """ + + name = 'groff' + aliases = ['groff','troff','roff'] + filenames = [] + + def __init__(self, **options): + Formatter.__init__(self, **options) + + self.monospaced = get_bool_opt(options, 'monospaced', True) + self.linenos = get_bool_opt(options, 'linenos', False) + self._lineno = 0 + self.wrap = get_int_opt(options, 'wrap', 0) + self._linelen = 0 + + self.styles = {} + self._make_styles() + + + def _make_styles(self): + regular = '\\f[CR]' if self.monospaced else '\\f[R]' + bold = '\\f[CB]' if self.monospaced else '\\f[B]' + italic = '\\f[CI]' if self.monospaced else '\\f[I]' + + for ttype, ndef in self.style: + start = end = '' + if ndef['color']: + start += '\\m[{}]'.format(ndef['color']) + end = '\\m[]' + end + if ndef['bold']: + start += bold + end = regular + end + if ndef['italic']: + start += italic + end = regular + end + if ndef['bgcolor']: + start += '\\M[{}]'.format(ndef['bgcolor']) + end = '\\M[]' + end + + self.styles[ttype] = start, end + + + def _define_colors(self, outfile): + colors = set() + for _, ndef in self.style: + if ndef['color'] is not None: + colors.add(ndef['color']) + + for color in sorted(colors): + outfile.write('.defcolor ' + color + ' rgb #' + color + '\n') + + + def _write_lineno(self, outfile): + self._lineno += 1 + outfile.write("%s% 4d " % (self._lineno != 1 and '\n' or '', self._lineno)) + + + def _wrap_line(self, line): + length = len(line.rstrip('\n')) + space = ' ' if self.linenos else '' + newline = '' + + if length > self.wrap: + for i in range(0, math.floor(length / self.wrap)): + chunk = line[i*self.wrap:i*self.wrap+self.wrap] + newline += (chunk + '\n' + space) + remainder = length % self.wrap + if remainder > 0: + newline += line[-remainder-1:] + self._linelen = remainder + elif self._linelen + length > self.wrap: + newline = ('\n' + space) + line + self._linelen = length + else: + newline = line + self._linelen += length + + return newline + + + def _escape_chars(self, text): + text = text.replace('\\', '\\[u005C]'). \ + replace('.', '\\[char46]'). \ + replace('\'', '\\[u0027]'). \ + replace('`', '\\[u0060]'). \ + replace('~', '\\[u007E]') + copy = text + + for char in copy: + if len(char) != len(char.encode()): + uni = char.encode('unicode_escape') \ + .decode()[1:] \ + .replace('x', 'u00') \ + .upper() + text = text.replace(char, '\\[u' + uni[1:] + ']') + + return text + + + def format_unencoded(self, tokensource, outfile): + self._define_colors(outfile) + + outfile.write('.nf\n\\f[CR]\n') + + if self.linenos: + self._write_lineno(outfile) + + for ttype, value in tokensource: + while ttype not in self.styles: + ttype = ttype.parent + start, end = self.styles[ttype] + + for line in value.splitlines(True): + if self.wrap > 0: + line = self._wrap_line(line) + + if start and end: + text = self._escape_chars(line.rstrip('\n')) + if text != '': + outfile.write(''.join((start, text, end))) + else: + outfile.write(self._escape_chars(line.rstrip('\n'))) + + if line.endswith('\n'): + if self.linenos: + self._write_lineno(outfile) + self._linelen = 0 + else: + outfile.write('\n') + self._linelen = 0 + + outfile.write('\n.fi') diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/html.py b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/html.py new file mode 100644 index 00000000..4ef18368 --- /dev/null +++ b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/html.py @@ -0,0 +1,995 @@ +""" + pygments.formatters.html + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Formatter for HTML output. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import functools +import os +import sys +import os.path +from io import StringIO + +from pygments.formatter import Formatter +from pygments.token import Token, Text, STANDARD_TYPES +from pygments.util import get_bool_opt, get_int_opt, get_list_opt + +try: + import ctags +except ImportError: + ctags = None + +__all__ = ['HtmlFormatter'] + + +_escape_html_table = { + ord('&'): '&', + ord('<'): '<', + ord('>'): '>', + ord('"'): '"', + ord("'"): ''', +} + + +def escape_html(text, table=_escape_html_table): + """Escape &, <, > as well as single and double quotes for HTML.""" + return text.translate(table) + + +def webify(color): + if color.startswith('calc') or color.startswith('var'): + return color + else: + # Check if the color can be shortened from 6 to 3 characters + color = color.upper() + if (len(color) == 6 and + ( color[0] == color[1] + and color[2] == color[3] + and color[4] == color[5])): + return f'#{color[0]}{color[2]}{color[4]}' + else: + return f'#{color}' + + +def _get_ttype_class(ttype): + fname = STANDARD_TYPES.get(ttype) + if fname: + return fname + aname = '' + while fname is None: + aname = '-' + ttype[-1] + aname + ttype = ttype.parent + fname = STANDARD_TYPES.get(ttype) + return fname + aname + + +CSSFILE_TEMPLATE = '''\ +/* +generated by Pygments +Copyright 2006-2025 by the Pygments team. +Licensed under the BSD license, see LICENSE for details. +*/ +%(styledefs)s +''' + +DOC_HEADER = '''\ + + + + + %(title)s + + + + +

%(title)s

+ +''' + +DOC_HEADER_EXTERNALCSS = '''\ + + + + + %(title)s + + + + +

%(title)s

+ +''' + +DOC_FOOTER = '''\ + + +''' + + +class HtmlFormatter(Formatter): + r""" + Format tokens as HTML 4 ```` tags. By default, the content is enclosed + in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). + The ``
``'s CSS class can be set by the `cssclass` option. + + If the `linenos` option is set to ``"table"``, the ``
`` is
+    additionally wrapped inside a ```` which has one row and two
+    cells: one containing the line numbers and one containing the code.
+    Example:
+
+    .. sourcecode:: html
+
+        
+
+ + +
+
1
+            2
+
+
def foo(bar):
+              pass
+            
+
+ + (whitespace added to improve clarity). + + A list of lines can be specified using the `hl_lines` option to make these + lines highlighted (as of Pygments 0.11). + + With the `full` option, a complete HTML 4 document is output, including + the style definitions inside a ``$)', _handle_cssblock), + + include('keywords'), + include('inline'), + ], + 'keywords': [ + (words(( + '\\define', '\\end', 'caption', 'created', 'modified', 'tags', + 'title', 'type'), prefix=r'^', suffix=r'\b'), + Keyword), + ], + 'inline': [ + # escape + (r'\\.', Text), + # created or modified date + (r'\d{17}', Number.Integer), + # italics + (r'(\s)(//[^/]+//)((?=\W|\n))', + bygroups(Text, Generic.Emph, Text)), + # superscript + (r'(\s)(\^\^[^\^]+\^\^)', bygroups(Text, Generic.Emph)), + # subscript + (r'(\s)(,,[^,]+,,)', bygroups(Text, Generic.Emph)), + # underscore + (r'(\s)(__[^_]+__)', bygroups(Text, Generic.Strong)), + # bold + (r"(\s)(''[^']+'')((?=\W|\n))", + bygroups(Text, Generic.Strong, Text)), + # strikethrough + (r'(\s)(~~[^~]+~~)((?=\W|\n))', + bygroups(Text, Generic.Deleted, Text)), + # TiddlyWiki variables + (r'<<[^>]+>>', Name.Tag), + (r'\$\$[^$]+\$\$', Name.Tag), + (r'\$\([^)]+\)\$', Name.Tag), + # TiddlyWiki style or class + (r'^@@.*$', Name.Tag), + # HTML tags + (r']+>', Name.Tag), + # inline code + (r'`[^`]+`', String.Backtick), + # HTML escaped symbols + (r'&\S*?;', String.Regex), + # Wiki links + (r'(\[{2})([^]\|]+)(\]{2})', bygroups(Text, Name.Tag, Text)), + # External links + (r'(\[{2})([^]\|]+)(\|)([^]\|]+)(\]{2})', + bygroups(Text, Name.Tag, Text, Name.Attribute, Text)), + # Transclusion + (r'(\{{2})([^}]+)(\}{2})', bygroups(Text, Name.Tag, Text)), + # URLs + (r'(\b.?.?tps?://[^\s"]+)', bygroups(Name.Attribute)), + + # general text, must come last! + (r'[\w]+', Text), + (r'.', Text) + ], + } + + def __init__(self, **options): + self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True) + RegexLexer.__init__(self, **options) + + +class WikitextLexer(RegexLexer): + """ + For MediaWiki Wikitext. + + Parsing Wikitext is tricky, and results vary between different MediaWiki + installations, so we only highlight common syntaxes (built-in or from + popular extensions), and also assume templates produce no unbalanced + syntaxes. + """ + name = 'Wikitext' + url = 'https://www.mediawiki.org/wiki/Wikitext' + aliases = ['wikitext', 'mediawiki'] + filenames = [] + mimetypes = ['text/x-wiki'] + version_added = '2.15' + flags = re.MULTILINE + + def nowiki_tag_rules(tag_name): + return [ + (rf'(?i)()', bygroups(Punctuation, + Name.Tag, Whitespace, Punctuation), '#pop'), + include('entity'), + include('text'), + ] + + def plaintext_tag_rules(tag_name): + return [ + (rf'(?si)(.*?)()', bygroups(Text, + Punctuation, Name.Tag, Whitespace, Punctuation), '#pop'), + ] + + def delegate_tag_rules(tag_name, lexer, **lexer_kwargs): + return [ + (rf'(?i)()', bygroups(Punctuation, + Name.Tag, Whitespace, Punctuation), '#pop'), + (rf'(?si).+?(?=)', using(lexer, **lexer_kwargs)), + ] + + def text_rules(token): + return [ + (r'\w+', token), + (r'[^\S\n]+', token), + (r'(?s).', token), + ] + + def handle_syntaxhighlight(self, match, ctx): + from pygments.lexers import get_lexer_by_name + + attr_content = match.group() + start = 0 + index = 0 + while True: + index = attr_content.find('>', start) + # Exclude comment end (-->) + if attr_content[index-2:index] != '--': + break + start = index + 1 + + if index == -1: + # No tag end + yield from self.get_tokens_unprocessed(attr_content, stack=['root', 'attr']) + return + attr = attr_content[:index] + yield from self.get_tokens_unprocessed(attr, stack=['root', 'attr']) + yield match.start(3) + index, Punctuation, '>' + + lexer = None + content = attr_content[index+1:] + lang_match = re.findall(r'\blang=("|\'|)(\w+)(\1)', attr) + + if len(lang_match) >= 1: + # Pick the last match in case of multiple matches + lang = lang_match[-1][1] + try: + lexer = get_lexer_by_name(lang) + except ClassNotFound: + pass + + if lexer is None: + yield match.start() + index + 1, Text, content + else: + yield from lexer.get_tokens_unprocessed(content) + + def handle_score(self, match, ctx): + attr_content = match.group() + start = 0 + index = 0 + while True: + index = attr_content.find('>', start) + # Exclude comment end (-->) + if attr_content[index-2:index] != '--': + break + start = index + 1 + + if index == -1: + # No tag end + yield from self.get_tokens_unprocessed(attr_content, stack=['root', 'attr']) + return + attr = attr_content[:index] + content = attr_content[index+1:] + yield from self.get_tokens_unprocessed(attr, stack=['root', 'attr']) + yield match.start(3) + index, Punctuation, '>' + + lang_match = re.findall(r'\blang=("|\'|)(\w+)(\1)', attr) + # Pick the last match in case of multiple matches + lang = lang_match[-1][1] if len(lang_match) >= 1 else 'lilypond' + + if lang == 'lilypond': # Case sensitive + yield from LilyPondLexer().get_tokens_unprocessed(content) + else: # ABC + # FIXME: Use ABC lexer in the future + yield match.start() + index + 1, Text, content + + # a-z removed to prevent linter from complaining, REMEMBER to use (?i) + title_char = r' %!"$&\'()*,\-./0-9:;=?@A-Z\\\^_`~+\u0080-\uFFFF' + nbsp_char = r'(?:\t| |&\#0*160;|&\#[Xx]0*[Aa]0;|[ \xA0\u1680\u2000-\u200A\u202F\u205F\u3000])' + link_address = r'(?:[0-9.]+|\[[0-9a-f:.]+\]|[^\x00-\x20"<>\[\]\x7F\xA0\u1680\u2000-\u200A\u202F\u205F\u3000\uFFFD])' + link_char_class = r'[^\x00-\x20"<>\[\]\x7F\xA0\u1680\u2000-\u200A\u202F\u205F\u3000\uFFFD]' + double_slashes_i = { + '__FORCETOC__', '__NOCONTENTCONVERT__', '__NOCC__', '__NOEDITSECTION__', '__NOGALLERY__', + '__NOTITLECONVERT__', '__NOTC__', '__NOTOC__', '__TOC__', + } + double_slashes = { + '__EXPECTUNUSEDCATEGORY__', '__HIDDENCAT__', '__INDEX__', '__NEWSECTIONLINK__', + '__NOINDEX__', '__NONEWSECTIONLINK__', '__STATICREDIRECT__', '__NOGLOBAL__', + '__DISAMBIG__', '__EXPECTED_UNCONNECTED_PAGE__', + } + protocols = { + 'bitcoin:', 'ftp://', 'ftps://', 'geo:', 'git://', 'gopher://', 'http://', 'https://', + 'irc://', 'ircs://', 'magnet:', 'mailto:', 'mms://', 'news:', 'nntp://', 'redis://', + 'sftp://', 'sip:', 'sips:', 'sms:', 'ssh://', 'svn://', 'tel:', 'telnet://', 'urn:', + 'worldwind://', 'xmpp:', '//', + } + non_relative_protocols = protocols - {'//'} + html_tags = { + 'abbr', 'b', 'bdi', 'bdo', 'big', 'blockquote', 'br', 'caption', 'center', 'cite', 'code', + 'data', 'dd', 'del', 'dfn', 'div', 'dl', 'dt', 'em', 'font', 'h1', 'h2', 'h3', 'h4', 'h5', + 'h6', 'hr', 'i', 'ins', 'kbd', 'li', 'link', 'mark', 'meta', 'ol', 'p', 'q', 'rb', 'rp', + 'rt', 'rtc', 'ruby', 's', 'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup', + 'table', 'td', 'th', 'time', 'tr', 'tt', 'u', 'ul', 'var', 'wbr', + } + parser_tags = { + 'graph', 'charinsert', 'rss', 'chem', 'categorytree', 'nowiki', 'inputbox', 'math', + 'hiero', 'score', 'pre', 'ref', 'translate', 'imagemap', 'templatestyles', 'languages', + 'noinclude', 'mapframe', 'section', 'poem', 'syntaxhighlight', 'includeonly', 'tvar', + 'onlyinclude', 'templatedata', 'langconvert', 'timeline', 'dynamicpagelist', 'gallery', + 'maplink', 'ce', 'references', + } + variant_langs = { + # ZhConverter.php + 'zh', 'zh-hans', 'zh-hant', 'zh-cn', 'zh-hk', 'zh-mo', 'zh-my', 'zh-sg', 'zh-tw', + # WuuConverter.php + 'wuu', 'wuu-hans', 'wuu-hant', + # UzConverter.php + 'uz', 'uz-latn', 'uz-cyrl', + # TlyConverter.php + 'tly', 'tly-cyrl', + # TgConverter.php + 'tg', 'tg-latn', + # SrConverter.php + 'sr', 'sr-ec', 'sr-el', + # ShiConverter.php + 'shi', 'shi-tfng', 'shi-latn', + # ShConverter.php + 'sh-latn', 'sh-cyrl', + # KuConverter.php + 'ku', 'ku-arab', 'ku-latn', + # IuConverter.php + 'iu', 'ike-cans', 'ike-latn', + # GanConverter.php + 'gan', 'gan-hans', 'gan-hant', + # EnConverter.php + 'en', 'en-x-piglatin', + # CrhConverter.php + 'crh', 'crh-cyrl', 'crh-latn', + # BanConverter.php + 'ban', 'ban-bali', 'ban-x-dharma', 'ban-x-palmleaf', 'ban-x-pku', + } + magic_vars_i = { + 'ARTICLEPATH', 'INT', 'PAGEID', 'SCRIPTPATH', 'SERVER', 'SERVERNAME', 'STYLEPATH', + } + magic_vars = { + '!', '=', 'BASEPAGENAME', 'BASEPAGENAMEE', 'CASCADINGSOURCES', 'CONTENTLANGUAGE', + 'CONTENTLANG', 'CURRENTDAY', 'CURRENTDAY2', 'CURRENTDAYNAME', 'CURRENTDOW', 'CURRENTHOUR', + 'CURRENTMONTH', 'CURRENTMONTH2', 'CURRENTMONTH1', 'CURRENTMONTHABBREV', 'CURRENTMONTHNAME', + 'CURRENTMONTHNAMEGEN', 'CURRENTTIME', 'CURRENTTIMESTAMP', 'CURRENTVERSION', 'CURRENTWEEK', + 'CURRENTYEAR', 'DIRECTIONMARK', 'DIRMARK', 'FULLPAGENAME', 'FULLPAGENAMEE', 'LOCALDAY', + 'LOCALDAY2', 'LOCALDAYNAME', 'LOCALDOW', 'LOCALHOUR', 'LOCALMONTH', 'LOCALMONTH2', + 'LOCALMONTH1', 'LOCALMONTHABBREV', 'LOCALMONTHNAME', 'LOCALMONTHNAMEGEN', 'LOCALTIME', + 'LOCALTIMESTAMP', 'LOCALWEEK', 'LOCALYEAR', 'NAMESPACE', 'NAMESPACEE', 'NAMESPACENUMBER', + 'NUMBEROFACTIVEUSERS', 'NUMBEROFADMINS', 'NUMBEROFARTICLES', 'NUMBEROFEDITS', + 'NUMBEROFFILES', 'NUMBEROFPAGES', 'NUMBEROFUSERS', 'PAGELANGUAGE', 'PAGENAME', 'PAGENAMEE', + 'REVISIONDAY', 'REVISIONDAY2', 'REVISIONID', 'REVISIONMONTH', 'REVISIONMONTH1', + 'REVISIONSIZE', 'REVISIONTIMESTAMP', 'REVISIONUSER', 'REVISIONYEAR', 'ROOTPAGENAME', + 'ROOTPAGENAMEE', 'SITENAME', 'SUBJECTPAGENAME', 'ARTICLEPAGENAME', 'SUBJECTPAGENAMEE', + 'ARTICLEPAGENAMEE', 'SUBJECTSPACE', 'ARTICLESPACE', 'SUBJECTSPACEE', 'ARTICLESPACEE', + 'SUBPAGENAME', 'SUBPAGENAMEE', 'TALKPAGENAME', 'TALKPAGENAMEE', 'TALKSPACE', 'TALKSPACEE', + } + parser_functions_i = { + 'ANCHORENCODE', 'BIDI', 'CANONICALURL', 'CANONICALURLE', 'FILEPATH', 'FORMATNUM', + 'FULLURL', 'FULLURLE', 'GENDER', 'GRAMMAR', 'INT', r'\#LANGUAGE', 'LC', 'LCFIRST', 'LOCALURL', + 'LOCALURLE', 'NS', 'NSE', 'PADLEFT', 'PADRIGHT', 'PAGEID', 'PLURAL', 'UC', 'UCFIRST', + 'URLENCODE', + } + parser_functions = { + 'BASEPAGENAME', 'BASEPAGENAMEE', 'CASCADINGSOURCES', 'DEFAULTSORT', 'DEFAULTSORTKEY', + 'DEFAULTCATEGORYSORT', 'FULLPAGENAME', 'FULLPAGENAMEE', 'NAMESPACE', 'NAMESPACEE', + 'NAMESPACENUMBER', 'NUMBERINGROUP', 'NUMINGROUP', 'NUMBEROFACTIVEUSERS', 'NUMBEROFADMINS', + 'NUMBEROFARTICLES', 'NUMBEROFEDITS', 'NUMBEROFFILES', 'NUMBEROFPAGES', 'NUMBEROFUSERS', + 'PAGENAME', 'PAGENAMEE', 'PAGESINCATEGORY', 'PAGESINCAT', 'PAGESIZE', 'PROTECTIONEXPIRY', + 'PROTECTIONLEVEL', 'REVISIONDAY', 'REVISIONDAY2', 'REVISIONID', 'REVISIONMONTH', + 'REVISIONMONTH1', 'REVISIONTIMESTAMP', 'REVISIONUSER', 'REVISIONYEAR', 'ROOTPAGENAME', + 'ROOTPAGENAMEE', 'SUBJECTPAGENAME', 'ARTICLEPAGENAME', 'SUBJECTPAGENAMEE', + 'ARTICLEPAGENAMEE', 'SUBJECTSPACE', 'ARTICLESPACE', 'SUBJECTSPACEE', 'ARTICLESPACEE', + 'SUBPAGENAME', 'SUBPAGENAMEE', 'TALKPAGENAME', 'TALKPAGENAMEE', 'TALKSPACE', 'TALKSPACEE', + 'INT', 'DISPLAYTITLE', 'PAGESINNAMESPACE', 'PAGESINNS', + } + + tokens = { + 'root': [ + # Redirects + (r"""(?xi) + (\A\s*?)(\#REDIRECT:?) # may contain a colon + (\s+)(\[\[) (?=[^\]\n]* \]\]$) + """, + bygroups(Whitespace, Keyword, Whitespace, Punctuation), 'redirect-inner'), + # Subheadings + (r'^(={2,6})(.+?)(\1)(\s*$\n)', + bygroups(Generic.Subheading, Generic.Subheading, Generic.Subheading, Whitespace)), + # Headings + (r'^(=.+?=)(\s*$\n)', + bygroups(Generic.Heading, Whitespace)), + # Double-slashed magic words + (words(double_slashes_i, prefix=r'(?i)'), Name.Function.Magic), + (words(double_slashes), Name.Function.Magic), + # Raw URLs + (r'(?i)\b(?:{}){}{}*'.format('|'.join(protocols), + link_address, link_char_class), Name.Label), + # Magic links + (rf'\b(?:RFC|PMID){nbsp_char}+[0-9]+\b', + Name.Function.Magic), + (r"""(?x) + \bISBN {nbsp_char} + (?: 97[89] {nbsp_dash}? )? + (?: [0-9] {nbsp_dash}? ){{9}} # escape format() + [0-9Xx]\b + """.format(nbsp_char=nbsp_char, nbsp_dash=f'(?:-|{nbsp_char})'), Name.Function.Magic), + include('list'), + include('inline'), + include('text'), + ], + 'redirect-inner': [ + (r'(\]\])(\s*?\n)', bygroups(Punctuation, Whitespace), '#pop'), + (r'(\#)([^#]*?)', bygroups(Punctuation, Name.Label)), + (rf'(?i)[{title_char}]+', Name.Tag), + ], + 'list': [ + # Description lists + (r'^;', Keyword, 'dt'), + # Ordered lists, unordered lists and indents + (r'^[#:*]+', Keyword), + # Horizontal rules + (r'^-{4,}', Keyword), + ], + 'inline': [ + # Signatures + (r'~{3,5}', Keyword), + # Entities + include('entity'), + # Bold & italic + (r"('')(''')(?!')", bygroups(Generic.Emph, + Generic.EmphStrong), 'inline-italic-bold'), + (r"'''(?!')", Generic.Strong, 'inline-bold'), + (r"''(?!')", Generic.Emph, 'inline-italic'), + # Comments & parameters & templates + include('replaceable'), + # Media links + ( + r"""(?xi) + (\[\[) + (File|Image) (:) + ((?: [{}] | \{{{{2,3}}[^{{}}]*?\}}{{2,3}} | )*) + (?: (\#) ([{}]*?) )? + """.format(title_char, f'{title_char}#'), + bygroups(Punctuation, Name.Namespace, Punctuation, + using(this, state=['wikilink-name']), Punctuation, Name.Label), + 'medialink-inner' + ), + # Wikilinks + ( + r"""(?xi) + (\[\[)(?!{}) # Should not contain URLs + (?: ([{}]*) (:))? + ((?: [{}] | \{{{{2,3}}[^{{}}]*?\}}{{2,3}} | )*?) + (?: (\#) ([{}]*?) )? + (\]\]) + """.format('|'.join(protocols), title_char.replace('/', ''), + title_char, f'{title_char}#'), + bygroups(Punctuation, Name.Namespace, Punctuation, + using(this, state=['wikilink-name']), Punctuation, Name.Label, Punctuation) + ), + ( + r"""(?xi) + (\[\[)(?!{}) + (?: ([{}]*) (:))? + ((?: [{}] | \{{{{2,3}}[^{{}}]*?\}}{{2,3}} | )*?) + (?: (\#) ([{}]*?) )? + (\|) + """.format('|'.join(protocols), title_char.replace('/', ''), + title_char, f'{title_char}#'), + bygroups(Punctuation, Name.Namespace, Punctuation, + using(this, state=['wikilink-name']), Punctuation, Name.Label, Punctuation), + 'wikilink-inner' + ), + # External links + ( + r"""(?xi) + (\[) + ((?:{}) {} {}*) + (\s*) + """.format('|'.join(protocols), link_address, link_char_class), + bygroups(Punctuation, Name.Label, Whitespace), + 'extlink-inner' + ), + # Tables + (r'^(:*)(\s*?)(\{\|)([^\n]*)$', bygroups(Keyword, + Whitespace, Punctuation, using(this, state=['root', 'attr'])), 'table'), + # HTML tags + (r'(?i)(<)({})\b'.format('|'.join(html_tags)), + bygroups(Punctuation, Name.Tag), 'tag-inner-ordinary'), + (r'(?i)()'.format('|'.join(html_tags)), + bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)), + # + (r'(?i)(<)(nowiki)\b', bygroups(Punctuation, + Name.Tag), ('tag-nowiki', 'tag-inner')), + #
+            (r'(?i)(<)(pre)\b', bygroups(Punctuation,
+             Name.Tag), ('tag-pre', 'tag-inner')),
+            # 
+            (r'(?i)(<)(categorytree)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-categorytree', 'tag-inner')),
+            # 
+            (r'(?i)(<)(hiero)\b', bygroups(Punctuation,
+             Name.Tag), ('tag-hiero', 'tag-inner')),
+            # 
+            (r'(?i)(<)(math)\b', bygroups(Punctuation,
+             Name.Tag), ('tag-math', 'tag-inner')),
+            # 
+            (r'(?i)(<)(chem)\b', bygroups(Punctuation,
+             Name.Tag), ('tag-chem', 'tag-inner')),
+            # 
+            (r'(?i)(<)(ce)\b', bygroups(Punctuation,
+             Name.Tag), ('tag-ce', 'tag-inner')),
+            # 
+            (r'(?i)(<)(charinsert)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-charinsert', 'tag-inner')),
+            # 
+            (r'(?i)(<)(templatedata)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-templatedata', 'tag-inner')),
+            # 
+            (r'(?i)(<)(gallery)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-gallery', 'tag-inner')),
+            # 
+            (r'(?i)(<)(gallery)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-graph', 'tag-inner')),
+            # 
+            (r'(?i)(<)(dynamicpagelist)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-dynamicpagelist', 'tag-inner')),
+            # 
+            (r'(?i)(<)(inputbox)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-inputbox', 'tag-inner')),
+            # 
+            (r'(?i)(<)(rss)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-rss', 'tag-inner')),
+            # 
+            (r'(?i)(<)(imagemap)\b', bygroups(
+                Punctuation, Name.Tag), ('tag-imagemap', 'tag-inner')),
+            # 
+            (r'(?i)()',
+             bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
+            (r'(?si)(<)(syntaxhighlight)\b([^>]*?(?.*?)(?=)',
+             bygroups(Punctuation, Name.Tag, handle_syntaxhighlight)),
+            # : Fallback case for self-closing tags
+            (r'(?i)(<)(syntaxhighlight)\b(\s*?)((?:[^>]|-->)*?)(/\s*?(?)*?)(/\s*?(?)*?)(/\s*?(?|\Z)', Comment.Multiline),
+            # Parameters
+            (
+                r"""(?x)
+                (\{{3})
+                    ([^|]*?)
+                    (?=\}{3}|\|)
+                """,
+                bygroups(Punctuation, Name.Variable),
+                'parameter-inner',
+            ),
+            # Magic variables
+            (r'(?i)(\{{\{{)(\s*)({})(\s*)(\}}\}})'.format('|'.join(magic_vars_i)),
+             bygroups(Punctuation, Whitespace, Name.Function, Whitespace, Punctuation)),
+            (r'(\{{\{{)(\s*)({})(\s*)(\}}\}})'.format('|'.join(magic_vars)),
+                bygroups(Punctuation, Whitespace, Name.Function, Whitespace, Punctuation)),
+            # Parser functions & templates
+            (r'\{\{', Punctuation, 'template-begin-space'),
+            #  legacy syntax
+            (r'(?i)(<)(tvar)\b(\|)([^>]*?)(>)', bygroups(Punctuation,
+             Name.Tag, Punctuation, String, Punctuation)),
+            (r'', Punctuation, '#pop'),
+            # 
+            (r'(?i)(<)(tvar)\b', bygroups(Punctuation, Name.Tag), 'tag-inner-ordinary'),
+            (r'(?i)()',
+             bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
+        ],
+        'parameter-inner': [
+            (r'\}{3}', Punctuation, '#pop'),
+            (r'\|', Punctuation),
+            include('inline'),
+            include('text'),
+        ],
+        'template-begin-space': [
+            # Templates allow line breaks at the beginning, and due to how MediaWiki handles
+            # comments, an extra state is required to handle things like {{\n\n name}}
+            (r'|\Z)', Comment.Multiline),
+            (r'\s+', Whitespace),
+            # Parser functions
+            (
+                r'(?i)(\#[{}]*?|{})(:)'.format(title_char,
+                                           '|'.join(parser_functions_i)),
+                bygroups(Name.Function, Punctuation), ('#pop', 'template-inner')
+            ),
+            (
+                r'({})(:)'.format('|'.join(parser_functions)),
+                bygroups(Name.Function, Punctuation), ('#pop', 'template-inner')
+            ),
+            # Templates
+            (
+                rf'(?i)([{title_char}]*?)(:)',
+                bygroups(Name.Namespace, Punctuation), ('#pop', 'template-name')
+            ),
+            default(('#pop', 'template-name'),),
+        ],
+        'template-name': [
+            (r'(\s*?)(\|)', bygroups(Text, Punctuation), ('#pop', 'template-inner')),
+            (r'\}\}', Punctuation, '#pop'),
+            (r'\n', Text, '#pop'),
+            include('replaceable'),
+            *text_rules(Name.Tag),
+        ],
+        'template-inner': [
+            (r'\}\}', Punctuation, '#pop'),
+            (r'\|', Punctuation),
+            (
+                r"""(?x)
+                    (?<=\|)
+                    ( (?: (?! \{\{ | \}\} )[^=\|<])*? ) # Exclude templates and tags
+                    (=)
+                """,
+                bygroups(Name.Label, Operator)
+            ),
+            include('inline'),
+            include('text'),
+        ],
+        'table': [
+            # Use [ \t\n\r\0\x0B] instead of \s to follow PHP trim() behavior
+            # Endings
+            (r'^([ \t\n\r\0\x0B]*?)(\|\})',
+             bygroups(Whitespace, Punctuation), '#pop'),
+            # Table rows
+            (r'^([ \t\n\r\0\x0B]*?)(\|-+)(.*)$', bygroups(Whitespace, Punctuation,
+             using(this, state=['root', 'attr']))),
+            # Captions
+            (
+                r"""(?x)
+                ^([ \t\n\r\0\x0B]*?)(\|\+)
+                # Exclude links, template and tags
+                (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|) )?
+                (.*?)$
+                """,
+                bygroups(Whitespace, Punctuation, using(this, state=[
+                         'root', 'attr']), Punctuation, Generic.Heading),
+            ),
+            # Table data
+            (
+                r"""(?x)
+                ( ^(?:[ \t\n\r\0\x0B]*?)\| | \|\| )
+                (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|)(?!\|) )?
+                """,
+                bygroups(Punctuation, using(this, state=[
+                         'root', 'attr']), Punctuation),
+            ),
+            # Table headers
+            (
+                r"""(?x)
+                ( ^(?:[ \t\n\r\0\x0B]*?)!  )
+                (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|)(?!\|) )?
+                """,
+                bygroups(Punctuation, using(this, state=[
+                         'root', 'attr']), Punctuation),
+                'table-header',
+            ),
+            include('list'),
+            include('inline'),
+            include('text'),
+        ],
+        'table-header': [
+            # Requires another state for || handling inside headers
+            (r'\n', Text, '#pop'),
+            (
+                r"""(?x)
+                (!!|\|\|)
+                (?:
+                    ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )
+                    (\|)(?!\|)
+                )?
+                """,
+                bygroups(Punctuation, using(this, state=[
+                         'root', 'attr']), Punctuation)
+            ),
+            *text_rules(Generic.Subheading),
+        ],
+        'entity': [
+            (r'&\S*?;', Name.Entity),
+        ],
+        'dt': [
+            (r'\n', Text, '#pop'),
+            include('inline'),
+            (r':', Keyword, '#pop'),
+            include('text'),
+        ],
+        'extlink-inner': [
+            (r'\]', Punctuation, '#pop'),
+            include('inline'),
+            include('text'),
+        ],
+        'nowiki-ish': [
+            include('entity'),
+            include('text'),
+        ],
+        'attr': [
+            include('replaceable'),
+            (r'\s+', Whitespace),
+            (r'(=)(\s*)(")', bygroups(Operator, Whitespace, String.Double), 'attr-val-2'),
+            (r"(=)(\s*)(')", bygroups(Operator, Whitespace, String.Single), 'attr-val-1'),
+            (r'(=)(\s*)', bygroups(Operator, Whitespace), 'attr-val-0'),
+            (r'[\w:-]+', Name.Attribute),
+
+        ],
+        'attr-val-0': [
+            (r'\s', Whitespace, '#pop'),
+            include('replaceable'),
+            *text_rules(String),
+        ],
+        'attr-val-1': [
+            (r"'", String.Single, '#pop'),
+            include('replaceable'),
+            *text_rules(String.Single),
+        ],
+        'attr-val-2': [
+            (r'"', String.Double, '#pop'),
+            include('replaceable'),
+            *text_rules(String.Double),
+        ],
+        'tag-inner-ordinary': [
+            (r'/?\s*>', Punctuation, '#pop'),
+            include('tag-attr'),
+        ],
+        'tag-inner': [
+            # Return to root state for self-closing tags
+            (r'/\s*>', Punctuation, '#pop:2'),
+            (r'\s*>', Punctuation, '#pop'),
+            include('tag-attr'),
+        ],
+        # There states below are just like their non-tag variants, the key difference is
+        # they forcibly quit when encountering tag closing markup
+        'tag-attr': [
+            include('replaceable'),
+            (r'\s+', Whitespace),
+            (r'(=)(\s*)(")', bygroups(Operator,
+             Whitespace, String.Double), 'tag-attr-val-2'),
+            (r"(=)(\s*)(')", bygroups(Operator,
+             Whitespace, String.Single), 'tag-attr-val-1'),
+            (r'(=)(\s*)', bygroups(Operator, Whitespace), 'tag-attr-val-0'),
+            (r'[\w:-]+', Name.Attribute),
+
+        ],
+        'tag-attr-val-0': [
+            (r'\s', Whitespace, '#pop'),
+            (r'/?>', Punctuation, '#pop:2'),
+            include('replaceable'),
+            *text_rules(String),
+        ],
+        'tag-attr-val-1': [
+            (r"'", String.Single, '#pop'),
+            (r'/?>', Punctuation, '#pop:2'),
+            include('replaceable'),
+            *text_rules(String.Single),
+        ],
+        'tag-attr-val-2': [
+            (r'"', String.Double, '#pop'),
+            (r'/?>', Punctuation, '#pop:2'),
+            include('replaceable'),
+            *text_rules(String.Double),
+        ],
+        'tag-nowiki': nowiki_tag_rules('nowiki'),
+        'tag-pre': nowiki_tag_rules('pre'),
+        'tag-categorytree': plaintext_tag_rules('categorytree'),
+        'tag-dynamicpagelist': plaintext_tag_rules('dynamicpagelist'),
+        'tag-hiero': plaintext_tag_rules('hiero'),
+        'tag-inputbox': plaintext_tag_rules('inputbox'),
+        'tag-imagemap': plaintext_tag_rules('imagemap'),
+        'tag-charinsert': plaintext_tag_rules('charinsert'),
+        'tag-timeline': plaintext_tag_rules('timeline'),
+        'tag-gallery': plaintext_tag_rules('gallery'),
+        'tag-graph': plaintext_tag_rules('graph'),
+        'tag-rss': plaintext_tag_rules('rss'),
+        'tag-math': delegate_tag_rules('math', TexLexer, state='math'),
+        'tag-chem': delegate_tag_rules('chem', TexLexer, state='math'),
+        'tag-ce': delegate_tag_rules('ce', TexLexer, state='math'),
+        'tag-templatedata': delegate_tag_rules('templatedata', JsonLexer),
+        'text-italic': text_rules(Generic.Emph),
+        'text-bold': text_rules(Generic.Strong),
+        'text-bold-italic': text_rules(Generic.EmphStrong),
+        'text': text_rules(Text),
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/math.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/math.py
new file mode 100644
index 00000000..b225ffcf
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/math.py
@@ -0,0 +1,21 @@
+"""
+    pygments.lexers.math
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Just export lexers that were contained in this module.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+# ruff: noqa: F401
+from pygments.lexers.python import NumPyLexer
+from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
+    OctaveLexer, ScilabLexer
+from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
+from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
+from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
+from pygments.lexers.idl import IDLLexer
+from pygments.lexers.algebra import MuPADLexer
+
+__all__ = []
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/matlab.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/matlab.py
new file mode 100644
index 00000000..8eeffc9d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/matlab.py
@@ -0,0 +1,3307 @@
+"""
+    pygments.lexers.matlab
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Matlab and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, default, words, \
+    do_insertions, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Generic, Whitespace
+
+from pygments.lexers import _scilab_builtins
+
+__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
+
+
+class MatlabLexer(RegexLexer):
+    """
+    For Matlab source code.
+    """
+    name = 'Matlab'
+    aliases = ['matlab']
+    filenames = ['*.m']
+    mimetypes = ['text/matlab']
+    url = 'https://www.mathworks.com/products/matlab.html'
+    version_added = '0.10'
+
+    _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\^|\.\\|\./|/|\\'
+
+    tokens = {
+        'expressions': [
+            # operators:
+            (_operators, Operator),
+
+            # numbers (must come before punctuation to handle `.5`; cannot use
+            # `\b` due to e.g. `5. + .5`).  The negative lookahead on operators
+            # avoids including the dot in `1./x` (the dot is part of `./`).
+            (rf'(? and then
+            # (equal | open-parenthesis |  | ).
+            (rf'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|{_operators}\s|\s)',
+             bygroups(Whitespace, Name, Whitespace), 'commandargs'),
+
+            include('expressions')
+        ],
+        'blockcomment': [
+            (r'^\s*%\}', Comment.Multiline, '#pop'),
+            (r'^.*\n', Comment.Multiline),
+            (r'.', Comment.Multiline),
+        ],
+        'deffunc': [
+            (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+             bygroups(Whitespace, Text, Whitespace, Punctuation,
+                      Whitespace, Name.Function, Punctuation, Text,
+                      Punctuation, Whitespace), '#pop'),
+            # function with no args
+            (r'(\s*)([a-zA-Z_]\w*)',
+             bygroups(Whitespace, Name.Function), '#pop'),
+        ],
+        'propattrs': [
+            (r'(\w+)(\s*)(=)(\s*)(\d+)',
+             bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
+                      Number)),
+            (r'(\w+)(\s*)(=)(\s*)([a-zA-Z]\w*)',
+             bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
+                      Keyword)),
+            (r',', Punctuation),
+            (r'\)', Punctuation, '#pop'),
+            (r'\s+', Whitespace),
+            (r'.', Text),
+        ],
+        'defprops': [
+            (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
+            (r'%.*$', Comment),
+            (r'(?.
+    """
+    name = 'Matlab session'
+    aliases = ['matlabsession']
+    url = 'https://www.mathworks.com/products/matlab.html'
+    version_added = '0.10'
+    _example = "matlabsession/matlabsession_sample.txt"
+
+    def get_tokens_unprocessed(self, text):
+        mlexer = MatlabLexer(**self.options)
+
+        curcode = ''
+        insertions = []
+        continuation = False
+
+        for match in line_re.finditer(text):
+            line = match.group()
+
+            if line.startswith('>> '):
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt, line[:3])]))
+                curcode += line[3:]
+
+            elif line.startswith('>>'):
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt, line[:2])]))
+                curcode += line[2:]
+
+            elif line.startswith('???'):
+
+                idx = len(curcode)
+
+                # without is showing error on same line as before...?
+                # line = "\n" + line
+                token = (0, Generic.Traceback, line)
+                insertions.append((idx, [token]))
+            elif continuation and insertions:
+                # line_start is the length of the most recent prompt symbol
+                line_start = len(insertions[-1][-1][-1])
+                # Set leading spaces with the length of the prompt to be a generic prompt
+                # This keeps code aligned when prompts are removed, say with some Javascript
+                if line.startswith(' '*line_start):
+                    insertions.append(
+                        (len(curcode), [(0, Generic.Prompt, line[:line_start])]))
+                    curcode += line[line_start:]
+                else:
+                    curcode += line
+            else:
+                if curcode:
+                    yield from do_insertions(
+                        insertions, mlexer.get_tokens_unprocessed(curcode))
+                    curcode = ''
+                    insertions = []
+
+                yield match.start(), Generic.Output, line
+
+            # Does not allow continuation if a comment is included after the ellipses.
+            # Continues any line that ends with ..., even comments (lines that start with %)
+            if line.strip().endswith('...'):
+                continuation = True
+            else:
+                continuation = False
+
+        if curcode:  # or item:
+            yield from do_insertions(
+                insertions, mlexer.get_tokens_unprocessed(curcode))
+
+
+class OctaveLexer(RegexLexer):
+    """
+    For GNU Octave source code.
+    """
+    name = 'Octave'
+    url = 'https://www.gnu.org/software/octave/index'
+    aliases = ['octave']
+    filenames = ['*.m']
+    mimetypes = ['text/octave']
+    version_added = '1.5'
+
+    # These lists are generated automatically.
+    # Run the following in bash shell:
+    #
+    # First dump all of the Octave manual into a plain text file:
+    #
+    #   $ info octave --subnodes -o octave-manual
+    #
+    # Now grep through it:
+
+    # for i in \
+    #     "Built-in Function" "Command" "Function File" \
+    #     "Loadable Function" "Mapping Function";
+    # do
+    #     perl -e '@name = qw('"$i"');
+    #              print lc($name[0]),"_kw = [\n"';
+    #
+    #     perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
+    #         octave-manual | sort | uniq ;
+    #     echo "]" ;
+    #     echo;
+    # done
+
+    # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
+
+    builtin_kw = (
+        "addlistener", "addpath", "addproperty", "all",
+        "and", "any", "argnames", "argv", "assignin",
+        "atexit", "autoload",
+        "available_graphics_toolkits", "beep_on_error",
+        "bitand", "bitmax", "bitor", "bitshift", "bitxor",
+        "cat", "cell", "cellstr", "char", "class", "clc",
+        "columns", "command_line_path",
+        "completion_append_char", "completion_matches",
+        "complex", "confirm_recursive_rmdir", "cputime",
+        "crash_dumps_octave_core", "ctranspose", "cumprod",
+        "cumsum", "debug_on_error", "debug_on_interrupt",
+        "debug_on_warning", "default_save_options",
+        "dellistener", "diag", "diff", "disp",
+        "doc_cache_file", "do_string_escapes", "double",
+        "drawnow", "e", "echo_executing_commands", "eps",
+        "eq", "errno", "errno_list", "error", "eval",
+        "evalin", "exec", "exist", "exit", "eye", "false",
+        "fclear", "fclose", "fcntl", "fdisp", "feof",
+        "ferror", "feval", "fflush", "fgetl", "fgets",
+        "fieldnames", "file_in_loadpath", "file_in_path",
+        "filemarker", "filesep", "find_dir_in_path",
+        "fixed_point_format", "fnmatch", "fopen", "fork",
+        "formula", "fprintf", "fputs", "fread", "freport",
+        "frewind", "fscanf", "fseek", "fskipl", "ftell",
+        "functions", "fwrite", "ge", "genpath", "get",
+        "getegid", "getenv", "geteuid", "getgid",
+        "getpgrp", "getpid", "getppid", "getuid", "glob",
+        "gt", "gui_mode", "history_control",
+        "history_file", "history_size",
+        "history_timestamp_format_string", "home",
+        "horzcat", "hypot", "ifelse",
+        "ignore_function_time_stamp", "inferiorto",
+        "info_file", "info_program", "inline", "input",
+        "intmax", "intmin", "ipermute",
+        "is_absolute_filename", "isargout", "isbool",
+        "iscell", "iscellstr", "ischar", "iscomplex",
+        "isempty", "isfield", "isfloat", "isglobal",
+        "ishandle", "isieee", "isindex", "isinteger",
+        "islogical", "ismatrix", "ismethod", "isnull",
+        "isnumeric", "isobject", "isreal",
+        "is_rooted_relative_filename", "issorted",
+        "isstruct", "isvarname", "kbhit", "keyboard",
+        "kill", "lasterr", "lasterror", "lastwarn",
+        "ldivide", "le", "length", "link", "linspace",
+        "logical", "lstat", "lt", "make_absolute_filename",
+        "makeinfo_program", "max_recursion_depth", "merge",
+        "methods", "mfilename", "minus", "mislocked",
+        "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
+        "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
+        "munlock", "nargin", "nargout",
+        "native_float_format", "ndims", "ne", "nfields",
+        "nnz", "norm", "not", "numel", "nzmax",
+        "octave_config_info", "octave_core_file_limit",
+        "octave_core_file_name",
+        "octave_core_file_options", "ones", "or",
+        "output_max_field_width", "output_precision",
+        "page_output_immediately", "page_screen_output",
+        "path", "pathsep", "pause", "pclose", "permute",
+        "pi", "pipe", "plus", "popen", "power",
+        "print_empty_dimensions", "printf",
+        "print_struct_array_contents", "prod",
+        "program_invocation_name", "program_name",
+        "putenv", "puts", "pwd", "quit", "rats", "rdivide",
+        "readdir", "readlink", "read_readline_init_file",
+        "realmax", "realmin", "rehash", "rename",
+        "repelems", "re_read_readline_init_file", "reset",
+        "reshape", "resize", "restoredefaultpath",
+        "rethrow", "rmdir", "rmfield", "rmpath", "rows",
+        "save_header_format_string", "save_precision",
+        "saving_history", "scanf", "set", "setenv",
+        "shell_cmd", "sighup_dumps_octave_core",
+        "sigterm_dumps_octave_core", "silent_functions",
+        "single", "size", "size_equal", "sizemax",
+        "sizeof", "sleep", "source", "sparse_auto_mutate",
+        "split_long_rows", "sprintf", "squeeze", "sscanf",
+        "stat", "stderr", "stdin", "stdout", "strcmp",
+        "strcmpi", "string_fill_char", "strncmp",
+        "strncmpi", "struct", "struct_levels_to_print",
+        "strvcat", "subsasgn", "subsref", "sum", "sumsq",
+        "superiorto", "suppress_verbose_help_message",
+        "symlink", "system", "tic", "tilde_expand",
+        "times", "tmpfile", "tmpnam", "toc", "toupper",
+        "transpose", "true", "typeinfo", "umask", "uminus",
+        "uname", "undo_string_escapes", "unlink", "uplus",
+        "upper", "usage", "usleep", "vec", "vectorize",
+        "vertcat", "waitpid", "warning", "warranty",
+        "whos_line_format", "yes_or_no", "zeros",
+        "inf", "Inf", "nan", "NaN")
+
+    command_kw = ("close", "load", "who", "whos")
+
+    function_kw = (
+        "accumarray", "accumdim", "acosd", "acotd",
+        "acscd", "addtodate", "allchild", "ancestor",
+        "anova", "arch_fit", "arch_rnd", "arch_test",
+        "area", "arma_rnd", "arrayfun", "ascii", "asctime",
+        "asecd", "asind", "assert", "atand",
+        "autoreg_matrix", "autumn", "axes", "axis", "bar",
+        "barh", "bartlett", "bartlett_test", "beep",
+        "betacdf", "betainv", "betapdf", "betarnd",
+        "bicgstab", "bicubic", "binary", "binocdf",
+        "binoinv", "binopdf", "binornd", "bitcmp",
+        "bitget", "bitset", "blackman", "blanks",
+        "blkdiag", "bone", "box", "brighten", "calendar",
+        "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
+        "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
+        "chisquare_test_homogeneity",
+        "chisquare_test_independence", "circshift", "cla",
+        "clabel", "clf", "clock", "cloglog", "closereq",
+        "colon", "colorbar", "colormap", "colperm",
+        "comet", "common_size", "commutation_matrix",
+        "compan", "compare_versions", "compass",
+        "computer", "cond", "condest", "contour",
+        "contourc", "contourf", "contrast", "conv",
+        "convhull", "cool", "copper", "copyfile", "cor",
+        "corrcoef", "cor_test", "cosd", "cotd", "cov",
+        "cplxpair", "cross", "cscd", "cstrcat", "csvread",
+        "csvwrite", "ctime", "cumtrapz", "curl", "cut",
+        "cylinder", "date", "datenum", "datestr",
+        "datetick", "datevec", "dblquad", "deal",
+        "deblank", "deconv", "delaunay", "delaunayn",
+        "delete", "demo", "detrend", "diffpara", "diffuse",
+        "dir", "discrete_cdf", "discrete_inv",
+        "discrete_pdf", "discrete_rnd", "display",
+        "divergence", "dlmwrite", "dos", "dsearch",
+        "dsearchn", "duplication_matrix", "durbinlevinson",
+        "ellipsoid", "empirical_cdf", "empirical_inv",
+        "empirical_pdf", "empirical_rnd", "eomday",
+        "errorbar", "etime", "etreeplot", "example",
+        "expcdf", "expinv", "expm", "exppdf", "exprnd",
+        "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
+        "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
+        "factorial", "fail", "fcdf", "feather", "fftconv",
+        "fftfilt", "fftshift", "figure", "fileattrib",
+        "fileparts", "fill", "findall", "findobj",
+        "findstr", "finv", "flag", "flipdim", "fliplr",
+        "flipud", "fpdf", "fplot", "fractdiff", "freqz",
+        "freqz_plot", "frnd", "fsolve",
+        "f_test_regression", "ftp", "fullfile", "fzero",
+        "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
+        "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
+        "geoinv", "geopdf", "geornd", "getfield", "ginput",
+        "glpk", "gls", "gplot", "gradient",
+        "graphics_toolkit", "gray", "grid", "griddata",
+        "griddatan", "gtext", "gunzip", "gzip", "hadamard",
+        "hamming", "hankel", "hanning", "hggroup",
+        "hidden", "hilb", "hist", "histc", "hold", "hot",
+        "hotelling_test", "housh", "hsv", "hurst",
+        "hygecdf", "hygeinv", "hygepdf", "hygernd",
+        "idivide", "ifftshift", "image", "imagesc",
+        "imfinfo", "imread", "imshow", "imwrite", "index",
+        "info", "inpolygon", "inputname", "interpft",
+        "interpn", "intersect", "invhilb", "iqr", "isa",
+        "isdefinite", "isdir", "is_duplicate_entry",
+        "isequal", "isequalwithequalnans", "isfigure",
+        "ishermitian", "ishghandle", "is_leap_year",
+        "isletter", "ismac", "ismember", "ispc", "isprime",
+        "isprop", "isscalar", "issquare", "isstrprop",
+        "issymmetric", "isunix", "is_valid_file_id",
+        "isvector", "jet", "kendall",
+        "kolmogorov_smirnov_cdf",
+        "kolmogorov_smirnov_test", "kruskal_wallis_test",
+        "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
+        "laplace_pdf", "laplace_rnd", "legend", "legendre",
+        "license", "line", "linkprop", "list_primes",
+        "loadaudio", "loadobj", "logistic_cdf",
+        "logistic_inv", "logistic_pdf", "logistic_rnd",
+        "logit", "loglog", "loglogerr", "logm", "logncdf",
+        "logninv", "lognpdf", "lognrnd", "logspace",
+        "lookfor", "ls_command", "lsqnonneg", "magic",
+        "mahalanobis", "manova", "matlabroot",
+        "mcnemar_test", "mean", "meansq", "median", "menu",
+        "mesh", "meshc", "meshgrid", "meshz", "mexext",
+        "mget", "mkpp", "mode", "moment", "movefile",
+        "mpoles", "mput", "namelengthmax", "nargchk",
+        "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
+        "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
+        "nonzeros", "normcdf", "normest", "norminv",
+        "normpdf", "normrnd", "now", "nthroot", "null",
+        "ocean", "ols", "onenormest", "optimget",
+        "optimset", "orderfields", "orient", "orth",
+        "pack", "pareto", "parseparams", "pascal", "patch",
+        "pathdef", "pcg", "pchip", "pcolor", "pcr",
+        "peaks", "periodogram", "perl", "perms", "pie",
+        "pink", "planerot", "playaudio", "plot",
+        "plotmatrix", "plotyy", "poisscdf", "poissinv",
+        "poisspdf", "poissrnd", "polar", "poly",
+        "polyaffine", "polyarea", "polyderiv", "polyfit",
+        "polygcd", "polyint", "polyout", "polyreduce",
+        "polyval", "polyvalm", "postpad", "powerset",
+        "ppder", "ppint", "ppjumps", "ppplot", "ppval",
+        "pqpnonneg", "prepad", "primes", "print",
+        "print_usage", "prism", "probit", "qp", "qqplot",
+        "quadcc", "quadgk", "quadl", "quadv", "quiver",
+        "qzhess", "rainbow", "randi", "range", "rank",
+        "ranks", "rat", "reallog", "realpow", "realsqrt",
+        "record", "rectangle_lw", "rectangle_sw",
+        "rectint", "refresh", "refreshdata",
+        "regexptranslate", "repmat", "residue", "ribbon",
+        "rindex", "roots", "rose", "rosser", "rotdim",
+        "rref", "run", "run_count", "rundemos", "run_test",
+        "runtests", "saveas", "saveaudio", "saveobj",
+        "savepath", "scatter", "secd", "semilogx",
+        "semilogxerr", "semilogy", "semilogyerr",
+        "setaudio", "setdiff", "setfield", "setxor",
+        "shading", "shift", "shiftdim", "sign_test",
+        "sinc", "sind", "sinetone", "sinewave", "skewness",
+        "slice", "sombrero", "sortrows", "spaugment",
+        "spconvert", "spdiags", "spearman", "spectral_adf",
+        "spectral_xdf", "specular", "speed", "spencer",
+        "speye", "spfun", "sphere", "spinmap", "spline",
+        "spones", "sprand", "sprandn", "sprandsym",
+        "spring", "spstats", "spy", "sqp", "stairs",
+        "statistics", "std", "stdnormal_cdf",
+        "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
+        "stem", "stft", "strcat", "strchr", "strjust",
+        "strmatch", "strread", "strsplit", "strtok",
+        "strtrim", "strtrunc", "structfun", "studentize",
+        "subplot", "subsindex", "subspace", "substr",
+        "substruct", "summer", "surf", "surface", "surfc",
+        "surfl", "surfnorm", "svds", "swapbytes",
+        "sylvester_matrix", "symvar", "synthesis", "table",
+        "tand", "tar", "tcdf", "tempdir", "tempname",
+        "test", "text", "textread", "textscan", "tinv",
+        "title", "toeplitz", "tpdf", "trace", "trapz",
+        "treelayout", "treeplot", "triangle_lw",
+        "triangle_sw", "tril", "trimesh", "triplequad",
+        "triplot", "trisurf", "triu", "trnd", "tsearchn",
+        "t_test", "t_test_regression", "type", "unidcdf",
+        "unidinv", "unidpdf", "unidrnd", "unifcdf",
+        "unifinv", "unifpdf", "unifrnd", "union", "unique",
+        "unix", "unmkpp", "unpack", "untabify", "untar",
+        "unwrap", "unzip", "u_test", "validatestring",
+        "vander", "var", "var_test", "vech", "ver",
+        "version", "view", "voronoi", "voronoin",
+        "waitforbuttonpress", "wavread", "wavwrite",
+        "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
+        "welch_test", "what", "white", "whitebg",
+        "wienrnd", "wilcoxon_test", "wilkinson", "winter",
+        "xlabel", "xlim", "ylabel", "yulewalker", "zip",
+        "zlabel", "z_test")
+
+    loadable_kw = (
+        "airy", "amd", "balance", "besselh", "besseli",
+        "besselj", "besselk", "bessely", "bitpack",
+        "bsxfun", "builtin", "ccolamd", "cellfun",
+        "cellslices", "chol", "choldelete", "cholinsert",
+        "cholinv", "cholshift", "cholupdate", "colamd",
+        "colloc", "convhulln", "convn", "csymamd",
+        "cummax", "cummin", "daspk", "daspk_options",
+        "dasrt", "dasrt_options", "dassl", "dassl_options",
+        "dbclear", "dbdown", "dbstack", "dbstatus",
+        "dbstop", "dbtype", "dbup", "dbwhere", "det",
+        "dlmread", "dmperm", "dot", "eig", "eigs",
+        "endgrent", "endpwent", "etree", "fft", "fftn",
+        "fftw", "filter", "find", "full", "gcd",
+        "getgrent", "getgrgid", "getgrnam", "getpwent",
+        "getpwnam", "getpwuid", "getrusage", "givens",
+        "gmtime", "gnuplot_binary", "hess", "ifft",
+        "ifftn", "inv", "isdebugmode", "issparse", "kron",
+        "localtime", "lookup", "lsode", "lsode_options",
+        "lu", "luinc", "luupdate", "matrix_type", "max",
+        "min", "mktime", "pinv", "qr", "qrdelete",
+        "qrinsert", "qrshift", "qrupdate", "quad",
+        "quad_options", "qz", "rand", "rande", "randg",
+        "randn", "randp", "randperm", "rcond", "regexp",
+        "regexpi", "regexprep", "schur", "setgrent",
+        "setpwent", "sort", "spalloc", "sparse", "spparms",
+        "sprank", "sqrtm", "strfind", "strftime",
+        "strptime", "strrep", "svd", "svd_driver", "syl",
+        "symamd", "symbfact", "symrcm", "time", "tsearch",
+        "typecast", "urlread", "urlwrite")
+
+    mapping_kw = (
+        "abs", "acos", "acosh", "acot", "acoth", "acsc",
+        "acsch", "angle", "arg", "asec", "asech", "asin",
+        "asinh", "atan", "atanh", "beta", "betainc",
+        "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
+        "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
+        "erfcx", "erfinv", "exp", "finite", "fix", "floor",
+        "fmod", "gamma", "gammainc", "gammaln", "imag",
+        "isalnum", "isalpha", "isascii", "iscntrl",
+        "isdigit", "isfinite", "isgraph", "isinf",
+        "islower", "isna", "isnan", "isprint", "ispunct",
+        "isspace", "isupper", "isxdigit", "lcm", "lgamma",
+        "log", "lower", "mod", "real", "rem", "round",
+        "roundb", "sec", "sech", "sign", "sin", "sinh",
+        "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
+
+    builtin_consts = (
+        "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
+        "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
+        "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
+        "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
+        "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
+        "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
+        "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
+        "WSTOPSIG", "WTERMSIG", "WUNTRACED")
+
+    tokens = {
+        'root': [
+            (r'%\{\s*\n', Comment.Multiline, 'percentblockcomment'),
+            (r'#\{\s*\n', Comment.Multiline, 'hashblockcomment'),
+            (r'[%#].*$', Comment),
+            (r'^\s*function\b', Keyword, 'deffunc'),
+
+            # from 'iskeyword' on hg changeset 8cc154f45e37
+            (words((
+                '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef',
+                'continue', 'do', 'else', 'elseif', 'end', 'end_try_catch',
+                'end_unwind_protect', 'endclassdef', 'endevents', 'endfor',
+                'endfunction', 'endif', 'endmethods', 'endproperties', 'endswitch',
+                'endwhile', 'events', 'for', 'function', 'get', 'global', 'if',
+                'methods', 'otherwise', 'persistent', 'properties', 'return',
+                'set', 'static', 'switch', 'try', 'until', 'unwind_protect',
+                'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+             Keyword),
+
+            (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
+                   suffix=r'\b'),  Name.Builtin),
+
+            (words(builtin_consts, suffix=r'\b'), Name.Constant),
+
+            # operators in Octave but not Matlab:
+            (r'-=|!=|!|/=|--', Operator),
+            # operators:
+            (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+            # operators in Octave but not Matlab requiring escape for re:
+            (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
+            # operators requiring escape for re:
+            (r'\.\*|\*|\+|\.\^|\^|\.\\|\.\/|\/|\\', Operator),
+
+
+            # punctuation:
+            (r'[\[\](){}:@.,]', Punctuation),
+            (r'=|:|;', Punctuation),
+
+            (r'"[^"]*"', String),
+
+            (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+            (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+            (r'\d+', Number.Integer),
+
+            # quote can be transpose, instead of string:
+            # (not great, but handles common cases...)
+            (r'(?<=[\w)\].])\'+', Operator),
+            (r'(?|<=|>=|&&|&|~|\|\|?', Operator),
+            # operators requiring escape for re:
+            (r'\.\*|\*|\+|\.\^|\^|\.\\|\.\/|\/|\\', Operator),
+
+            # punctuation:
+            (r'[\[\](){}@.,=:;]+', Punctuation),
+
+            (r'"[^"]*"', String),
+
+            # quote can be transpose, instead of string:
+            # (not great, but handles common cases...)
+            (r'(?<=[\w)\].])\'+', Operator),
+            (r'(?', r'<', r'|', r'!', r"'")
+
+    operator_words = ('and', 'or', 'not')
+
+    tokens = {
+        'root': [
+            (r'/\*', Comment.Multiline, 'comment'),
+            (r'"(?:[^"\\]|\\.)*"', String),
+            (r'\(|\)|\[|\]|\{|\}', Punctuation),
+            (r'[,;$]', Punctuation),
+            (words (constants), Name.Constant),
+            (words (keywords), Keyword),
+            (words (operators), Operator),
+            (words (operator_words), Operator.Word),
+            (r'''(?x)
+              ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
+              (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
+             bygroups(Name.Function, Text.Whitespace, Punctuation)),
+            (r'''(?x)
+              (?:[a-zA-Z_#%][\w#%]*|`[^`]*`)
+              (?:::[a-zA-Z_#%][\w#%]*|`[^`]*`)*''', Name.Variable),
+            (r'[-+]?(\d*\.\d+([bdefls][-+]?\d+)?|\d+(\.\d*)?[bdefls][-+]?\d+)', Number.Float),
+            (r'[-+]?\d+', Number.Integer),
+            (r'\s+', Text.Whitespace),
+            (r'.', Text)
+        ],
+        'comment': [
+            (r'[^*/]+', Comment.Multiline),
+            (r'/\*', Comment.Multiline, '#push'),
+            (r'\*/', Comment.Multiline, '#pop'),
+            (r'[*/]', Comment.Multiline)
+        ]
+    }
+
+    def analyse_text (text):
+        strength = 0.0
+        # Input expression terminator.
+        if re.search (r'\$\s*$', text, re.MULTILINE):
+            strength += 0.05
+        # Function definition operator.
+        if ':=' in text:
+            strength += 0.02
+        return strength
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/meson.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/meson.py
new file mode 100644
index 00000000..6f2c6da3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/meson.py
@@ -0,0 +1,139 @@
+"""
+    pygments.lexers.meson
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Pygments lexer for the Meson build system
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include
+from pygments.token import Comment, Name, Number, Punctuation, Operator, \
+    Keyword, String, Whitespace
+
+__all__ = ['MesonLexer']
+
+
+class MesonLexer(RegexLexer):
+    """Meson language lexer.
+
+    The grammar definition use to transcribe the syntax was retrieved from
+    https://mesonbuild.com/Syntax.html#grammar for version 0.58.
+    Some of those definitions are improperly transcribed, so the Meson++
+    implementation was also checked: https://github.com/dcbaker/meson-plus-plus.
+    """
+
+    # TODO String interpolation @VARNAME@ inner matches
+    # TODO keyword_arg: value inner matches
+
+    name = 'Meson'
+    url = 'https://mesonbuild.com/'
+    aliases = ['meson', 'meson.build']
+    filenames = ['meson.build', 'meson_options.txt']
+    mimetypes = ['text/x-meson']
+    version_added = '2.10'
+
+    tokens = {
+        'root': [
+            (r'#.*?$', Comment),
+            (r"'''.*'''", String.Single),
+            (r'[1-9][0-9]*', Number.Integer),
+            (r'0o[0-7]+', Number.Oct),
+            (r'0x[a-fA-F0-9]+', Number.Hex),
+            include('string'),
+            include('keywords'),
+            include('expr'),
+            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
+            (r'\s+', Whitespace),
+        ],
+        'string': [
+            (r"[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}", String),
+            (r"'.*?(?`_.
+    """
+
+    name = "MCFunction"
+    url = "https://minecraft.wiki/w/Commands"
+    aliases = ["mcfunction", "mcf"]
+    filenames = ["*.mcfunction"]
+    mimetypes = ["text/mcfunction"]
+    version_added = '2.12'
+
+    # Used to denotate the start of a block comment, borrowed from Github's mcfunction
+    _block_comment_prefix = "[>!]"
+
+    tokens = {
+        "root": [
+            include("names"),
+            include("comments"),
+            include("literals"),
+            include("whitespace"),
+            include("property"),
+            include("operators"),
+            include("selectors"),
+        ],
+
+        "names": [
+            # The start of a command (either beginning of line OR after the run keyword)
+            #  We don't encode a list of keywords since mods, plugins, or even pre-processors
+            #  may add new commands, so we have a 'close-enough' regex which catches them.
+            (r"^(\s*)([a-z_]+)", bygroups(Whitespace, Name.Builtin)),
+            (r"(?<=run)\s+[a-z_]+", Name.Builtin),
+
+            # UUID
+            (r"\b[0-9a-fA-F]+(?:-[0-9a-fA-F]+){4}\b", Name.Variable),
+            include("resource-name"),
+            # normal command names and scoreboards
+            #  there's no way to know the differences unfortuntely
+            (r"[A-Za-z_][\w.#%$]+", Keyword.Constant),
+            (r"[#%$][\w.#%$]+", Name.Variable.Magic),
+        ],
+
+        "resource-name": [
+            # resource names have to be lowercase
+            (r"#?[a-z_][a-z_.-]*:[a-z0-9_./-]+", Name.Function),
+            # similar to above except optional `:``
+            #  a `/` must be present "somewhere"
+            (r"#?[a-z0-9_\.\-]+\/[a-z0-9_\.\-\/]+", Name.Function),
+        ],
+
+        "whitespace": [
+            (r"\s+", Whitespace),
+        ],
+
+        "comments": [
+            (rf"^\s*(#{_block_comment_prefix})", Comment.Multiline,
+             ("comments.block", "comments.block.emphasized")),
+            (r"#.*$", Comment.Single),
+        ],
+        "comments.block": [
+            (rf"^\s*#{_block_comment_prefix}", Comment.Multiline,
+             "comments.block.emphasized"),
+            (r"^\s*#", Comment.Multiline, "comments.block.normal"),
+            default("#pop"),
+        ],
+        "comments.block.normal": [
+            include("comments.block.special"),
+            (r"\S+", Comment.Multiline),
+            (r"\n", Text, "#pop"),
+            include("whitespace"),
+        ],
+        "comments.block.emphasized": [
+            include("comments.block.special"),
+            (r"\S+", String.Doc),
+            (r"\n", Text, "#pop"),
+            include("whitespace"),
+        ],
+        "comments.block.special": [
+            # Params
+            (r"@\S+", Name.Decorator),
+
+            include("resource-name"),
+
+            # Scoreboard player names
+            (r"[#%$][\w.#%$]+", Name.Variable.Magic),
+        ],
+
+        "operators": [
+            (r"[\-~%^?!+*<>\\/|&=.]", Operator),
+        ],
+
+        "literals": [
+            (r"\.\.", Literal),
+            (r"(true|false)", Keyword.Pseudo),
+
+            # these are like unquoted strings and appear in many places
+            (r"[A-Za-z_]+", Name.Variable.Class),
+
+            (r"[0-7]b", Number.Byte),
+            (r"[+-]?\d*\.?\d+([eE]?[+-]?\d+)?[df]?\b", Number.Float),
+            (r"[+-]?\d+\b", Number.Integer),
+            (r'"', String.Double, "literals.string-double"),
+            (r"'", String.Single, "literals.string-single"),
+        ],
+        "literals.string-double": [
+            (r"\\.", String.Escape),
+            (r'[^\\"\n]+', String.Double),
+            (r'"', String.Double, "#pop"),
+        ],
+        "literals.string-single": [
+            (r"\\.", String.Escape),
+            (r"[^\\'\n]+", String.Single),
+            (r"'", String.Single, "#pop"),
+        ],
+
+        "selectors": [
+            (r"@[a-z]", Name.Variable),
+        ],
+
+
+        ## Generic Property Container
+        # There are several, differing instances where the language accepts
+        #  specific contained keys or contained key, value pairings.
+        #
+        # Property Maps:
+        # - Starts with either `[` or `{`
+        # - Key separated by `:` or `=`
+        # - Deliminated by `,`
+        #
+        # Property Lists:
+        # - Starts with `[`
+        # - Deliminated by `,`
+        #
+        # For simplicity, these patterns match a generic, nestable structure
+        #  which follow a key, value pattern. For normal lists, there's only keys.
+        # This allow some "illegal" structures, but we'll accept those for
+        #  sake of simplicity
+        #
+        # Examples:
+        # - `[facing=up, powered=true]` (blockstate)
+        # - `[name="hello world", nbt={key: 1b}]` (selector + nbt)
+        # - `[{"text": "value"}, "literal"]` (json)
+        ##
+        "property": [
+            # This state gets included in root and also several substates
+            # We do this to shortcut the starting of new properties
+            #  within other properties. Lists can have sublists and compounds
+            #  and values can start a new property (see the `difficult_1.txt`
+            #  snippet).
+            (r"\{", Punctuation, ("property.curly", "property.key")),
+            (r"\[", Punctuation, ("property.square", "property.key")),
+        ],
+        "property.curly": [
+            include("whitespace"),
+            include("property"),
+            (r"\}", Punctuation, "#pop"),
+        ],
+        "property.square": [
+            include("whitespace"),
+            include("property"),
+            (r"\]", Punctuation, "#pop"),
+
+            # lists can have sequences of items
+            (r",", Punctuation),
+        ],
+        "property.key": [
+            include("whitespace"),
+
+            # resource names (for advancements)
+            #  can omit `:` to default `minecraft:`
+            # must check if there is a future equals sign if `:` is in the name
+            (r"#?[a-z_][a-z_\.\-]*\:[a-z0-9_\.\-/]+(?=\s*\=)", Name.Attribute, "property.delimiter"),
+            (r"#?[a-z_][a-z0-9_\.\-/]+", Name.Attribute, "property.delimiter"),
+
+            # unquoted NBT key
+            (r"[A-Za-z_\-\+]+", Name.Attribute, "property.delimiter"),
+
+            # quoted JSON or NBT key
+            (r'"', Name.Attribute, "property.delimiter", "literals.string-double"),
+            (r"'", Name.Attribute, "property.delimiter", "literals.string-single"),
+
+            # index for a list
+            (r"-?\d+", Number.Integer, "property.delimiter"),
+
+            default("#pop"),
+        ],
+        "property.key.string-double": [
+            (r"\\.", String.Escape),
+            (r'[^\\"\n]+', Name.Attribute),
+            (r'"', Name.Attribute, "#pop"),
+        ],
+        "property.key.string-single": [
+            (r"\\.", String.Escape),
+            (r"[^\\'\n]+", Name.Attribute),
+            (r"'", Name.Attribute, "#pop"),
+        ],
+        "property.delimiter": [
+            include("whitespace"),
+
+            (r"[:=]!?", Punctuation, "property.value"),
+            (r",", Punctuation),
+
+            default("#pop"),
+        ],
+        "property.value": [
+            include("whitespace"),
+
+            # unquoted resource names are valid literals here
+            (r"#?[a-z_][a-z_\.\-]*\:[a-z0-9_\.\-/]+", Name.Tag),
+            (r"#?[a-z_][a-z0-9_\.\-/]+", Name.Tag),
+
+            include("literals"),
+            include("property"),
+
+            default("#pop"),
+        ],
+    }
+
+
+class MCSchemaLexer(RegexLexer):
+    """Lexer for Minecraft Add-ons data Schemas, an interface structure standard used in Minecraft
+    """
+
+    name = 'MCSchema'
+    url = 'https://learn.microsoft.com/en-us/minecraft/creator/reference/content/schemasreference/'
+    aliases = ['mcschema']
+    filenames = ['*.mcschema']
+    mimetypes = ['text/mcschema']
+    version_added = '2.14'
+
+    tokens = {
+        'commentsandwhitespace': [
+            (r'\s+', Whitespace),
+            (r'//.*?$', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline)
+        ],
+        'slashstartsregex': [
+            include('commentsandwhitespace'),
+            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+             r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
+            (r'(?=/)', Text, ('#pop', 'badregex')),
+            default('#pop')
+        ],
+        'badregex': [
+            (r'\n', Whitespace, '#pop')
+        ],
+        'singlestring': [
+            (r'\\.', String.Escape),
+            (r"'", String.Single, '#pop'),
+            (r"[^\\']+", String.Single),
+        ],
+        'doublestring': [
+            (r'\\.', String.Escape),
+            (r'"', String.Double, '#pop'),
+            (r'[^\\"]+', String.Double),
+        ],
+        'root': [
+            (r'^(?=\s|/|', Comment, '#pop'),
+            (r'[^\-]+|-', Comment),
+        ],
+    }
+
+
+class ReasonLexer(RegexLexer):
+    """
+    For the ReasonML language.
+    """
+
+    name = 'ReasonML'
+    url = 'https://reasonml.github.io/'
+    aliases = ['reasonml', 'reason']
+    filenames = ['*.re', '*.rei']
+    mimetypes = ['text/x-reasonml']
+    version_added = '2.6'
+
+    keywords = (
+        'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
+        'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
+        'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
+        'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
+        'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+        'type', 'val', 'virtual', 'when', 'while', 'with',
+    )
+    keyopts = (
+        '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
+        r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
+        '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+        r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~'
+    )
+
+    operators = r'[!$%&*+\./:<=>?@^|~-]'
+    word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lsr', 'lxor', 'mod', 'or')
+    prefix_syms = r'[!?~]'
+    infix_syms = r'[=<>@^|&+\*/$%-]'
+    primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
+
+    tokens = {
+        'escape-sequence': [
+            (r'\\[\\"\'ntbr]', String.Escape),
+            (r'\\[0-9]{3}', String.Escape),
+            (r'\\x[0-9a-fA-F]{2}', String.Escape),
+        ],
+        'root': [
+            (r'\s+', Text),
+            (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+            (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+            (r'\b([A-Z][\w\']*)', Name.Class),
+            (r'//.*?\n', Comment.Single),
+            (r'\/\*(?!/)', Comment.Multiline, 'comment'),
+            (r'\b({})\b'.format('|'.join(keywords)), Keyword),
+            (r'({})'.format('|'.join(keyopts[::-1])), Operator.Word),
+            (rf'({infix_syms}|{prefix_syms})?{operators}', Operator),
+            (r'\b({})\b'.format('|'.join(word_operators)), Operator.Word),
+            (r'\b({})\b'.format('|'.join(primitives)), Keyword.Type),
+
+            (r"[^\W\d][\w']*", Name),
+
+            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+            (r'0[oO][0-7][0-7_]*', Number.Oct),
+            (r'0[bB][01][01_]*', Number.Bin),
+            (r'\d[\d_]*', Number.Integer),
+
+            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+             String.Char),
+            (r"'.'", String.Char),
+            (r"'", Keyword),
+
+            (r'"', String.Double, 'string'),
+
+            (r'[~?][a-z][\w\']*:', Name.Variable),
+        ],
+        'comment': [
+            (r'[^/*]+', Comment.Multiline),
+            (r'\/\*', Comment.Multiline, '#push'),
+            (r'\*\/', Comment.Multiline, '#pop'),
+            (r'\*', Comment.Multiline),
+        ],
+        'string': [
+            (r'[^\\"]+', String.Double),
+            include('escape-sequence'),
+            (r'\\\n', String.Double),
+            (r'"', String.Double, '#pop'),
+        ],
+        'dotted': [
+            (r'\s+', Text),
+            (r'\.', Punctuation),
+            (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+            (r'[A-Z][\w\']*', Name.Class, '#pop'),
+            (r'[a-z_][\w\']*', Name, '#pop'),
+            default('#pop'),
+        ],
+    }
+
+
+class FStarLexer(RegexLexer):
+    """
+    For the F* language.
+    """
+
+    name = 'FStar'
+    url = 'https://www.fstar-lang.org/'
+    aliases = ['fstar']
+    filenames = ['*.fst', '*.fsti']
+    mimetypes = ['text/x-fstar']
+    version_added = '2.7'
+
+    keywords = (
+        'abstract', 'attributes', 'noeq', 'unopteq', 'and'
+        'begin', 'by', 'default', 'effect', 'else', 'end', 'ensures',
+        'exception', 'exists', 'false', 'forall', 'fun', 'function', 'if',
+        'in', 'include', 'inline', 'inline_for_extraction', 'irreducible',
+        'logic', 'match', 'module', 'mutable', 'new', 'new_effect', 'noextract',
+        'of', 'open', 'opaque', 'private', 'range_of', 'reifiable',
+        'reify', 'reflectable', 'requires', 'set_range_of', 'sub_effect',
+        'synth', 'then', 'total', 'true', 'try', 'type', 'unfold', 'unfoldable',
+        'val', 'when', 'with', 'not'
+    )
+    decl_keywords = ('let', 'rec')
+    assume_keywords = ('assume', 'admit', 'assert', 'calc')
+    keyopts = (
+        r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#',
+        r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>',
+        r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|',
+        r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{',
+        r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
+    )
+
+    operators = r'[!$%&*+\./:<=>?@^|~-]'
+    prefix_syms = r'[!?~]'
+    infix_syms = r'[=<>@^|&+\*/$%-]'
+    primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
+
+    tokens = {
+        'escape-sequence': [
+            (r'\\[\\"\'ntbr]', String.Escape),
+            (r'\\[0-9]{3}', String.Escape),
+            (r'\\x[0-9a-fA-F]{2}', String.Escape),
+        ],
+        'root': [
+            (r'\s+', Text),
+            (r'false|true|False|True|\(\)|\[\]', Name.Builtin.Pseudo),
+            (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+            (r'\b([A-Z][\w\']*)', Name.Class),
+            (r'\(\*(?![)])', Comment, 'comment'),
+            (r'\/\/.+$', Comment),
+            (r'\b({})\b'.format('|'.join(keywords)), Keyword),
+            (r'\b({})\b'.format('|'.join(assume_keywords)), Name.Exception),
+            (r'\b({})\b'.format('|'.join(decl_keywords)), Keyword.Declaration),
+            (r'({})'.format('|'.join(keyopts[::-1])), Operator),
+            (rf'({infix_syms}|{prefix_syms})?{operators}', Operator),
+            (r'\b({})\b'.format('|'.join(primitives)), Keyword.Type),
+
+            (r"[^\W\d][\w']*", Name),
+
+            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+            (r'0[oO][0-7][0-7_]*', Number.Oct),
+            (r'0[bB][01][01_]*', Number.Bin),
+            (r'\d[\d_]*', Number.Integer),
+
+            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+             String.Char),
+            (r"'.'", String.Char),
+            (r"'", Keyword),  # a stray quote is another syntax element
+            (r"\`([\w\'.]+)\`", Operator.Word),  # for infix applications
+            (r"\`", Keyword),  # for quoting
+            (r'"', String.Double, 'string'),
+
+            (r'[~?][a-z][\w\']*:', Name.Variable),
+        ],
+        'comment': [
+            (r'[^(*)]+', Comment),
+            (r'\(\*', Comment, '#push'),
+            (r'\*\)', Comment, '#pop'),
+            (r'[(*)]', Comment),
+        ],
+        'string': [
+            (r'[^\\"]+', String.Double),
+            include('escape-sequence'),
+            (r'\\\n', String.Double),
+            (r'"', String.Double, '#pop'),
+        ],
+        'dotted': [
+            (r'\s+', Text),
+            (r'\.', Punctuation),
+            (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+            (r'[A-Z][\w\']*', Name.Class, '#pop'),
+            (r'[a-z_][\w\']*', Name, '#pop'),
+            default('#pop'),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/modeling.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/modeling.py
new file mode 100644
index 00000000..d681e7f3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/modeling.py
@@ -0,0 +1,366 @@
+"""
+    pygments.lexers.modeling
+    ~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for modeling languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Whitespace
+
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers import _stan_builtins
+
+__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
+
+
+class ModelicaLexer(RegexLexer):
+    """
+    For Modelica source code.
+    """
+    name = 'Modelica'
+    url = 'http://www.modelica.org/'
+    aliases = ['modelica']
+    filenames = ['*.mo']
+    mimetypes = ['text/x-modelica']
+    version_added = '1.1'
+
+    flags = re.DOTALL | re.MULTILINE
+
+    _name = r"(?:'(?:[^\\']|\\.)+'|[a-zA-Z_]\w*)"
+
+    tokens = {
+        'whitespace': [
+            (r'[\s\ufeff]+', Text),
+            (r'//[^\n]*\n?', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline)
+        ],
+        'root': [
+            include('whitespace'),
+            (r'"', String.Double, 'string'),
+            (r'[()\[\]{},;]+', Punctuation),
+            (r'\.?[*^/+-]|\.|<>|[<>:=]=?', Operator),
+            (r'\d+(\.?\d*[eE][-+]?\d+|\.\d*)', Number.Float),
+            (r'\d+', Number.Integer),
+            (r'(abs|acos|actualStream|array|asin|assert|AssertionLevel|atan|'
+             r'atan2|backSample|Boolean|cardinality|cat|ceil|change|Clock|'
+             r'Connections|cos|cosh|cross|delay|diagonal|div|edge|exp|'
+             r'ExternalObject|fill|floor|getInstanceName|hold|homotopy|'
+             r'identity|inStream|integer|Integer|interval|inverse|isPresent|'
+             r'linspace|log|log10|matrix|max|min|mod|ndims|noClock|noEvent|'
+             r'ones|outerProduct|pre|previous|product|Real|reinit|rem|rooted|'
+             r'sample|scalar|semiLinear|shiftSample|sign|sin|sinh|size|skew|'
+             r'smooth|spatialDistribution|sqrt|StateSelect|String|subSample|'
+             r'sum|superSample|symmetric|tan|tanh|terminal|terminate|time|'
+             r'transpose|vector|zeros)\b', Name.Builtin),
+            (r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
+             r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
+             r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
+             r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
+             r'output|parameter|partial|protected|public|pure|redeclare|'
+             r'replaceable|return|stream|then|when|while)\b',
+             Keyword.Reserved),
+            (r'(and|not|or)\b', Operator.Word),
+            (r'(block|class|connector|end|function|model|operator|package|'
+             r'record|type)\b', Keyword.Reserved, 'class'),
+            (r'(false|true)\b', Keyword.Constant),
+            (r'within\b', Keyword.Reserved, 'package-prefix'),
+            (_name, Name)
+        ],
+        'class': [
+            include('whitespace'),
+            (r'(function|record)\b', Keyword.Reserved),
+            (r'(if|for|when|while)\b', Keyword.Reserved, '#pop'),
+            (_name, Name.Class, '#pop'),
+            default('#pop')
+        ],
+        'package-prefix': [
+            include('whitespace'),
+            (_name, Name.Namespace, '#pop'),
+            default('#pop')
+        ],
+        'string': [
+            (r'"', String.Double, '#pop'),
+            (r'\\[\'"?\\abfnrtv]', String.Escape),
+            (r'(?i)<\s*html\s*>([^\\"]|\\.)+?(<\s*/\s*html\s*>|(?="))',
+             using(HtmlLexer)),
+            (r'<|\\?[^"\\<]+', String.Double)
+        ]
+    }
+
+
+class BugsLexer(RegexLexer):
+    """
+    Pygments Lexer for OpenBugs and WinBugs
+    models.
+    """
+
+    name = 'BUGS'
+    aliases = ['bugs', 'winbugs', 'openbugs']
+    filenames = ['*.bug']
+    url = 'https://www.mrc-bsu.cam.ac.uk/software/bugs/openbugs'
+    version_added = '1.6'
+
+    _FUNCTIONS = (
+        # Scalar functions
+        'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+        'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
+        'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
+        'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
+        'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
+        'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
+        'trunc',
+        # Vector functions
+        'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
+        'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
+        'sd', 'sort', 'sum',
+        # Special
+        'D', 'I', 'F', 'T', 'C')
+    """ OpenBUGS built-in functions
+
+    From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
+
+    This also includes
+
+    - T, C, I : Truncation and censoring.
+      ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
+    - D : ODE
+    - F : Functional http://www.openbugs.info/Examples/Functionals.html
+
+    """
+
+    _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
+                      'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
+                      'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
+                      'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
+                      'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
+                      'dmt', 'dwish')
+    """ OpenBUGS built-in distributions
+
+    Functions from
+    http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
+    """
+
+    tokens = {
+        'whitespace': [
+            (r"\s+", Text),
+        ],
+        'comments': [
+            # Comments
+            (r'#.*$', Comment.Single),
+        ],
+        'root': [
+            # Comments
+            include('comments'),
+            include('whitespace'),
+            # Block start
+            (r'(model)(\s+)(\{)',
+             bygroups(Keyword.Namespace, Text, Punctuation)),
+            # Reserved Words
+            (r'(for|in)(?![\w.])', Keyword.Reserved),
+            # Built-in Functions
+            (r'({})(?=\s*\()'.format(r'|'.join(_FUNCTIONS + _DISTRIBUTIONS)),
+             Name.Builtin),
+            # Regular variable names
+            (r'[A-Za-z][\w.]*', Name),
+            # Number Literals
+            (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+            # Punctuation
+            (r'\[|\]|\(|\)|:|,|;', Punctuation),
+            # Assignment operators
+            # SLexer makes these tokens Operators.
+            (r'<-|~', Operator),
+            # Infix and prefix operators
+            (r'\+|-|\*|/', Operator),
+            # Block
+            (r'[{}]', Punctuation),
+        ]
+    }
+
+    def analyse_text(text):
+        if re.search(r"^\s*model\s*{", text, re.M):
+            return 0.7
+        else:
+            return 0.0
+
+
+class JagsLexer(RegexLexer):
+    """
+    Pygments Lexer for JAGS.
+    """
+
+    name = 'JAGS'
+    aliases = ['jags']
+    filenames = ['*.jag', '*.bug']
+    url = 'https://mcmc-jags.sourceforge.io'
+    version_added = '1.6'
+
+    # JAGS
+    _FUNCTIONS = (
+        'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+        'cos', 'cosh', 'cloglog',
+        'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
+        'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
+        'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
+        'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
+        'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
+        # Truncation/Censoring (should I include)
+        'T', 'I')
+    # Distributions with density, probability and quartile functions
+    _DISTRIBUTIONS = tuple(f'[dpq]{x}' for x in
+                           ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
+                            'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
+                            'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
+    # Other distributions without density and probability
+    _OTHER_DISTRIBUTIONS = (
+        'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
+        'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
+        'dnbinom', 'dweibull', 'ddirich')
+
+    tokens = {
+        'whitespace': [
+            (r"\s+", Text),
+        ],
+        'names': [
+            # Regular variable names
+            (r'[a-zA-Z][\w.]*\b', Name),
+        ],
+        'comments': [
+            # do not use stateful comments
+            (r'(?s)/\*.*?\*/', Comment.Multiline),
+            # Comments
+            (r'#.*$', Comment.Single),
+        ],
+        'root': [
+            # Comments
+            include('comments'),
+            include('whitespace'),
+            # Block start
+            (r'(model|data)(\s+)(\{)',
+             bygroups(Keyword.Namespace, Text, Punctuation)),
+            (r'var(?![\w.])', Keyword.Declaration),
+            # Reserved Words
+            (r'(for|in)(?![\w.])', Keyword.Reserved),
+            # Builtins
+            # Need to use lookahead because . is a valid char
+            (r'({})(?=\s*\()'.format(r'|'.join(_FUNCTIONS
+                                          + _DISTRIBUTIONS
+                                          + _OTHER_DISTRIBUTIONS)),
+             Name.Builtin),
+            # Names
+            include('names'),
+            # Number Literals
+            (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+            (r'\[|\]|\(|\)|:|,|;', Punctuation),
+            # Assignment operators
+            (r'<-|~', Operator),
+            # # JAGS includes many more than OpenBUGS
+            (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
+            (r'[{}]', Punctuation),
+        ]
+    }
+
+    def analyse_text(text):
+        if re.search(r'^\s*model\s*\{', text, re.M):
+            if re.search(r'^\s*data\s*\{', text, re.M):
+                return 0.9
+            elif re.search(r'^\s*var', text, re.M):
+                return 0.9
+            else:
+                return 0.3
+        else:
+            return 0
+
+
+class StanLexer(RegexLexer):
+    """Pygments Lexer for Stan models.
+
+    The Stan modeling language is specified in the *Stan Modeling Language
+    User's Guide and Reference Manual, v2.17.0*,
+    `pdf `__.
+    """
+
+    name = 'Stan'
+    aliases = ['stan']
+    filenames = ['*.stan']
+    url = 'https://mc-stan.org'
+    version_added = '1.6'
+
+    tokens = {
+        'whitespace': [
+            (r"\s+", Text),
+        ],
+        'comments': [
+            (r'(?s)/\*.*?\*/', Comment.Multiline),
+            # Comments
+            (r'(//|#).*$', Comment.Single),
+        ],
+        'root': [
+            (r'"[^"]*"', String),
+            # Comments
+            include('comments'),
+            # block start
+            include('whitespace'),
+            # Block start
+            (r'({})(\s*)(\{{)'.format(r'|'.join(('functions', 'data', r'transformed\s+?data',
+                        'parameters', r'transformed\s+parameters',
+                        'model', r'generated\s+quantities'))),
+             bygroups(Keyword.Namespace, Text, Punctuation)),
+            # target keyword
+            (r'target\s*\+=', Keyword),
+            # Reserved Words
+            (r'({})\b'.format(r'|'.join(_stan_builtins.KEYWORDS)), Keyword),
+            # Truncation
+            (r'T(?=\s*\[)', Keyword),
+            # Data types
+            (r'({})\b'.format(r'|'.join(_stan_builtins.TYPES)), Keyword.Type),
+             # < should be punctuation, but elsewhere I can't tell if it is in
+             # a range constraint
+            (r'(<)(\s*)(upper|lower|offset|multiplier)(\s*)(=)',
+             bygroups(Operator, Whitespace, Keyword, Whitespace, Punctuation)),
+            (r'(,)(\s*)(upper)(\s*)(=)',
+             bygroups(Punctuation, Whitespace, Keyword, Whitespace, Punctuation)),
+            # Punctuation
+            (r"[;,\[\]()]", Punctuation),
+            # Builtin
+            (r'({})(?=\s*\()'.format('|'.join(_stan_builtins.FUNCTIONS)), Name.Builtin),
+            (r'(~)(\s*)({})(?=\s*\()'.format('|'.join(_stan_builtins.DISTRIBUTIONS)),
+                bygroups(Operator, Whitespace, Name.Builtin)),
+            # Special names ending in __, like lp__
+            (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
+            (r'({})\b'.format(r'|'.join(_stan_builtins.RESERVED)), Keyword.Reserved),
+            # user-defined functions
+            (r'[A-Za-z]\w*(?=\s*\()]', Name.Function),
+            # Imaginary Literals
+            (r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?i', Number.Float),
+            (r'\.[0-9]+([eE][+-]?[0-9]+)?i', Number.Float),
+            (r'[0-9]+i', Number.Float),
+            # Real Literals
+            (r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?', Number.Float),
+            (r'\.[0-9]+([eE][+-]?[0-9]+)?', Number.Float),
+            # Integer Literals
+            (r'[0-9]+', Number.Integer),
+            # Regular variable names
+            (r'[A-Za-z]\w*\b', Name),
+            # Assignment operators
+            (r'<-|(?:\+|-|\.?/|\.?\*|=)?=|~', Operator),
+            # Infix, prefix and postfix operators (and = )
+            (r"\+|-|\.?\*|\.?/|\\|'|\.?\^|!=?|<=?|>=?|\|\||&&|%|\?|:|%/%|!", Operator),
+            # Block delimiters
+            (r'[{}]', Punctuation),
+            # Distribution |
+            (r'\|', Punctuation)
+        ]
+    }
+
+    def analyse_text(text):
+        if re.search(r'^\s*parameters\s*\{', text, re.M):
+            return 1.0
+        else:
+            return 0.0
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/modula2.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/modula2.py
new file mode 100644
index 00000000..713f4722
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/modula2.py
@@ -0,0 +1,1579 @@
+"""
+    pygments.lexers.modula2
+    ~~~~~~~~~~~~~~~~~~~~~~~
+
+    Multi-Dialect Lexer for Modula-2.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, \
+    String, Number, Punctuation, Error
+
+__all__ = ['Modula2Lexer']
+
+
+# Multi-Dialect Modula-2 Lexer
+class Modula2Lexer(RegexLexer):
+    """
+    For Modula-2 source code.
+
+    The Modula-2 lexer supports several dialects.  By default, it operates in
+    fallback mode, recognising the *combined* literals, punctuation symbols
+    and operators of all supported dialects, and the *combined* reserved words
+    and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
+    differentiating between library defined identifiers.
+
+    To select a specific dialect, a dialect option may be passed
+    or a dialect tag may be embedded into a source file.
+
+    Dialect Options:
+
+    `m2pim`
+        Select PIM Modula-2 dialect.
+    `m2iso`
+        Select ISO Modula-2 dialect.
+    `m2r10`
+        Select Modula-2 R10 dialect.
+    `objm2`
+        Select Objective Modula-2 dialect.
+
+    The PIM and ISO dialect options may be qualified with a language extension.
+
+    Language Extensions:
+
+    `+aglet`
+        Select Aglet Modula-2 extensions, available with m2iso.
+    `+gm2`
+        Select GNU Modula-2 extensions, available with m2pim.
+    `+p1`
+        Select p1 Modula-2 extensions, available with m2iso.
+    `+xds`
+        Select XDS Modula-2 extensions, available with m2iso.
+
+
+    Passing a Dialect Option via Unix Commandline Interface
+
+    Dialect options may be passed to the lexer using the `dialect` key.
+    Only one such option should be passed. If multiple dialect options are
+    passed, the first valid option is used, any subsequent options are ignored.
+
+    Examples:
+
+    `$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input`
+        Use ISO dialect to render input to HTML output
+    `$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input`
+        Use ISO dialect with p1 extensions to render input to RTF output
+
+
+    Embedding a Dialect Option within a source file
+
+    A dialect option may be embedded in a source file in form of a dialect
+    tag, a specially formatted comment that specifies a dialect option.
+
+    Dialect Tag EBNF::
+
+       dialectTag :
+           OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ;
+
+       dialectOption :
+           'm2pim' | 'm2iso' | 'm2r10' | 'objm2' |
+           'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ;
+
+       Prefix : '!' ;
+
+       OpeningCommentDelim : '(*' ;
+
+       ClosingCommentDelim : '*)' ;
+
+    No whitespace is permitted between the tokens of a dialect tag.
+
+    In the event that a source file contains multiple dialect tags, the first
+    tag that contains a valid dialect option will be used and any subsequent
+    dialect tags will be ignored.  Ideally, a dialect tag should be placed
+    at the beginning of a source file.
+
+    An embedded dialect tag overrides a dialect option set via command line.
+
+    Examples:
+
+    ``(*!m2r10*) DEFINITION MODULE Foobar; ...``
+        Use Modula2 R10 dialect to render this source file.
+    ``(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...``
+        Use PIM dialect with GNU extensions to render this source file.
+
+
+    Algol Publication Mode:
+
+    In Algol publication mode, source text is rendered for publication of
+    algorithms in scientific papers and academic texts, following the format
+    of the Revised Algol-60 Language Report.  It is activated by passing
+    one of two corresponding styles as an option:
+
+    `algol`
+        render reserved words lowercase underline boldface
+        and builtins lowercase boldface italic
+    `algol_nu`
+        render reserved words lowercase boldface (no underlining)
+        and builtins lowercase boldface italic
+
+    The lexer automatically performs the required lowercase conversion when
+    this mode is activated.
+
+    Example:
+
+    ``$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input``
+        Render input file in Algol publication mode to LaTeX output.
+
+
+    Rendering Mode of First Class ADT Identifiers:
+
+    The rendering of standard library first class ADT identifiers is controlled
+    by option flag "treat_stdlib_adts_as_builtins".
+
+    When this option is turned on, standard library ADT identifiers are rendered
+    as builtins.  When it is turned off, they are rendered as ordinary library
+    identifiers.
+
+    `treat_stdlib_adts_as_builtins` (default: On)
+
+    The option is useful for dialects that support ADTs as first class objects
+    and provide ADTs in the standard library that would otherwise be built-in.
+
+    At present, only Modula-2 R10 supports library ADTs as first class objects
+    and therefore, no ADT identifiers are defined for any other dialects.
+
+    Example:
+
+    ``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...``
+        Render standard library ADTs as ordinary library types.
+
+    .. versionchanged:: 2.1
+       Added multi-dialect support.
+    """
+    name = 'Modula-2'
+    url = 'http://www.modula2.org/'
+    aliases = ['modula2', 'm2']
+    filenames = ['*.def', '*.mod']
+    mimetypes = ['text/x-modula2']
+    version_added = '1.3'
+
+    flags = re.MULTILINE | re.DOTALL
+
+    tokens = {
+        'whitespace': [
+            (r'\n+', Text),  # blank lines
+            (r'\s+', Text),  # whitespace
+        ],
+        'dialecttags': [
+            # PIM Dialect Tag
+            (r'\(\*!m2pim\*\)', Comment.Special),
+            # ISO Dialect Tag
+            (r'\(\*!m2iso\*\)', Comment.Special),
+            # M2R10 Dialect Tag
+            (r'\(\*!m2r10\*\)', Comment.Special),
+            # ObjM2 Dialect Tag
+            (r'\(\*!objm2\*\)', Comment.Special),
+            # Aglet Extensions Dialect Tag
+            (r'\(\*!m2iso\+aglet\*\)', Comment.Special),
+            # GNU Extensions Dialect Tag
+            (r'\(\*!m2pim\+gm2\*\)', Comment.Special),
+            # p1 Extensions Dialect Tag
+            (r'\(\*!m2iso\+p1\*\)', Comment.Special),
+            # XDS Extensions Dialect Tag
+            (r'\(\*!m2iso\+xds\*\)', Comment.Special),
+        ],
+        'identifiers': [
+            (r'([a-zA-Z_$][\w$]*)', Name),
+        ],
+        'prefixed_number_literals': [
+            #
+            # Base-2, whole number
+            (r'0b[01]+(\'[01]+)*', Number.Bin),
+            #
+            # Base-16, whole number
+            (r'0[ux][0-9A-F]+(\'[0-9A-F]+)*', Number.Hex),
+        ],
+        'plain_number_literals': [
+            #
+            # Base-10, real number with exponent
+            (r'[0-9]+(\'[0-9]+)*'  # integral part
+             r'\.[0-9]+(\'[0-9]+)*'  # fractional part
+             r'[eE][+-]?[0-9]+(\'[0-9]+)*',  # exponent
+             Number.Float),
+            #
+            # Base-10, real number without exponent
+            (r'[0-9]+(\'[0-9]+)*'  # integral part
+             r'\.[0-9]+(\'[0-9]+)*',  # fractional part
+             Number.Float),
+            #
+            # Base-10, whole number
+            (r'[0-9]+(\'[0-9]+)*', Number.Integer),
+        ],
+        'suffixed_number_literals': [
+            #
+            # Base-8, whole number
+            (r'[0-7]+B', Number.Oct),
+            #
+            # Base-8, character code
+            (r'[0-7]+C', Number.Oct),
+            #
+            # Base-16, number
+            (r'[0-9A-F]+H', Number.Hex),
+        ],
+        'string_literals': [
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+        ],
+        'digraph_operators': [
+            # Dot Product Operator
+            (r'\*\.', Operator),
+            # Array Concatenation Operator
+            (r'\+>', Operator),  # M2R10 + ObjM2
+            # Inequality Operator
+            (r'<>', Operator),  # ISO + PIM
+            # Less-Or-Equal, Subset
+            (r'<=', Operator),
+            # Greater-Or-Equal, Superset
+            (r'>=', Operator),
+            # Identity Operator
+            (r'==', Operator),  # M2R10 + ObjM2
+            # Type Conversion Operator
+            (r'::', Operator),  # M2R10 + ObjM2
+            # Assignment Symbol
+            (r':=', Operator),
+            # Postfix Increment Mutator
+            (r'\+\+', Operator),  # M2R10 + ObjM2
+            # Postfix Decrement Mutator
+            (r'--', Operator),  # M2R10 + ObjM2
+        ],
+        'unigraph_operators': [
+            # Arithmetic Operators
+            (r'[+-]', Operator),
+            (r'[*/]', Operator),
+            # ISO 80000-2 compliant Set Difference Operator
+            (r'\\', Operator),  # M2R10 + ObjM2
+            # Relational Operators
+            (r'[=#<>]', Operator),
+            # Dereferencing Operator
+            (r'\^', Operator),
+            # Dereferencing Operator Synonym
+            (r'@', Operator),  # ISO
+            # Logical AND Operator Synonym
+            (r'&', Operator),  # PIM + ISO
+            # Logical NOT Operator Synonym
+            (r'~', Operator),  # PIM + ISO
+            # Smalltalk Message Prefix
+            (r'`', Operator),  # ObjM2
+        ],
+        'digraph_punctuation': [
+            # Range Constructor
+            (r'\.\.', Punctuation),
+            # Opening Chevron Bracket
+            (r'<<', Punctuation),  # M2R10 + ISO
+            # Closing Chevron Bracket
+            (r'>>', Punctuation),  # M2R10 + ISO
+            # Blueprint Punctuation
+            (r'->', Punctuation),  # M2R10 + ISO
+            # Distinguish |# and # in M2 R10
+            (r'\|#', Punctuation),
+            # Distinguish ## and # in M2 R10
+            (r'##', Punctuation),
+            # Distinguish |* and * in M2 R10
+            (r'\|\*', Punctuation),
+        ],
+        'unigraph_punctuation': [
+            # Common Punctuation
+            (r'[()\[\]{},.:;|]', Punctuation),
+            # Case Label Separator Synonym
+            (r'!', Punctuation),  # ISO
+            # Blueprint Punctuation
+            (r'\?', Punctuation),  # M2R10 + ObjM2
+        ],
+        'comments': [
+            # Single Line Comment
+            (r'^//.*?\n', Comment.Single),  # M2R10 + ObjM2
+            # Block Comment
+            (r'\(\*([^$].*?)\*\)', Comment.Multiline),
+            # Template Block Comment
+            (r'/\*(.*?)\*/', Comment.Multiline),  # M2R10 + ObjM2
+        ],
+        'pragmas': [
+            # ISO Style Pragmas
+            (r'<\*.*?\*>', Comment.Preproc),  # ISO, M2R10 + ObjM2
+            # Pascal Style Pragmas
+            (r'\(\*\$.*?\*\)', Comment.Preproc),  # PIM
+        ],
+        'root': [
+            include('whitespace'),
+            include('dialecttags'),
+            include('pragmas'),
+            include('comments'),
+            include('identifiers'),
+            include('suffixed_number_literals'),  # PIM + ISO
+            include('prefixed_number_literals'),  # M2R10 + ObjM2
+            include('plain_number_literals'),
+            include('string_literals'),
+            include('digraph_punctuation'),
+            include('digraph_operators'),
+            include('unigraph_punctuation'),
+            include('unigraph_operators'),
+        ]
+    }
+
+#  C o m m o n   D a t a s e t s
+
+    # Common Reserved Words Dataset
+    common_reserved_words = (
+        # 37 common reserved words
+        'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
+        'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'FOR', 'FROM', 'IF',
+        'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', 'MODULE', 'NOT',
+        'OF', 'OR', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
+        'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
+    )
+
+    # Common Builtins Dataset
+    common_builtins = (
+        # 16 common builtins
+        'ABS', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'FALSE', 'INTEGER',
+        'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NIL', 'ODD', 'ORD', 'REAL',
+        'TRUE',
+    )
+
+    # Common Pseudo-Module Builtins Dataset
+    common_pseudo_builtins = (
+        # 4 common pseudo builtins
+        'ADDRESS', 'BYTE', 'WORD', 'ADR'
+    )
+
+#  P I M   M o d u l a - 2   D a t a s e t s
+
+    # Lexemes to Mark as Error Tokens for PIM Modula-2
+    pim_lexemes_to_reject = (
+        '!', '`', '@', '$', '%', '?', '\\', '==', '++', '--', '::', '*.',
+        '+>', '->', '<<', '>>', '|#', '##',
+    )
+
+    # PIM Modula-2 Additional Reserved Words Dataset
+    pim_additional_reserved_words = (
+        # 3 additional reserved words
+        'EXPORT', 'QUALIFIED', 'WITH',
+    )
+
+    # PIM Modula-2 Additional Builtins Dataset
+    pim_additional_builtins = (
+        # 16 additional builtins
+        'BITSET', 'CAP', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT', 'HALT', 'HIGH',
+        'INC', 'INCL', 'NEW', 'NIL', 'PROC', 'SIZE', 'TRUNC', 'VAL',
+    )
+
+    # PIM Modula-2 Additional Pseudo-Module Builtins Dataset
+    pim_additional_pseudo_builtins = (
+        # 5 additional pseudo builtins
+        'SYSTEM', 'PROCESS', 'TSIZE', 'NEWPROCESS', 'TRANSFER',
+    )
+
+#  I S O   M o d u l a - 2   D a t a s e t s
+
+    # Lexemes to Mark as Error Tokens for ISO Modula-2
+    iso_lexemes_to_reject = (
+        '`', '$', '%', '?', '\\', '==', '++', '--', '::', '*.', '+>', '->',
+        '<<', '>>', '|#', '##',
+    )
+
+    # ISO Modula-2 Additional Reserved Words Dataset
+    iso_additional_reserved_words = (
+        # 9 additional reserved words (ISO 10514-1)
+        'EXCEPT', 'EXPORT', 'FINALLY', 'FORWARD', 'PACKEDSET', 'QUALIFIED',
+        'REM', 'RETRY', 'WITH',
+        # 10 additional reserved words (ISO 10514-2 & ISO 10514-3)
+        'ABSTRACT', 'AS', 'CLASS', 'GUARD', 'INHERIT', 'OVERRIDE', 'READONLY',
+        'REVEAL', 'TRACED', 'UNSAFEGUARDED',
+    )
+
+    # ISO Modula-2 Additional Builtins Dataset
+    iso_additional_builtins = (
+        # 26 additional builtins (ISO 10514-1)
+        'BITSET', 'CAP', 'CMPLX', 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT',
+        'HALT', 'HIGH', 'IM', 'INC', 'INCL', 'INT', 'INTERRUPTIBLE',  'LENGTH',
+        'LFLOAT', 'LONGCOMPLEX', 'NEW', 'PROC', 'PROTECTION', 'RE', 'SIZE',
+        'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
+        # 5 additional builtins (ISO 10514-2 & ISO 10514-3)
+        'CREATE', 'DESTROY', 'EMPTY', 'ISMEMBER', 'SELF',
+    )
+
+    # ISO Modula-2 Additional Pseudo-Module Builtins Dataset
+    iso_additional_pseudo_builtins = (
+        # 14 additional builtins (SYSTEM)
+        'SYSTEM', 'BITSPERLOC', 'LOCSPERBYTE', 'LOCSPERWORD', 'LOC',
+        'ADDADR', 'SUBADR', 'DIFADR', 'MAKEADR', 'ADR',
+        'ROTATE', 'SHIFT', 'CAST', 'TSIZE',
+        # 13 additional builtins (COROUTINES)
+        'COROUTINES', 'ATTACH', 'COROUTINE', 'CURRENT', 'DETACH', 'HANDLER',
+        'INTERRUPTSOURCE', 'IOTRANSFER', 'IsATTACHED', 'LISTEN',
+        'NEWCOROUTINE', 'PROT', 'TRANSFER',
+        # 9 additional builtins (EXCEPTIONS)
+        'EXCEPTIONS', 'AllocateSource', 'CurrentNumber', 'ExceptionNumber',
+        'ExceptionSource', 'GetMessage', 'IsCurrentSource',
+        'IsExceptionalExecution', 'RAISE',
+        # 3 additional builtins (TERMINATION)
+        'TERMINATION', 'IsTerminating', 'HasHalted',
+        # 4 additional builtins (M2EXCEPTION)
+        'M2EXCEPTION', 'M2Exceptions', 'M2Exception', 'IsM2Exception',
+        'indexException', 'rangeException', 'caseSelectException',
+        'invalidLocation', 'functionException', 'wholeValueException',
+        'wholeDivException', 'realValueException', 'realDivException',
+        'complexValueException', 'complexDivException', 'protException',
+        'sysException', 'coException', 'exException',
+    )
+
+#  M o d u l a - 2   R 1 0   D a t a s e t s
+
+    # Lexemes to Mark as Error Tokens for Modula-2 R10
+    m2r10_lexemes_to_reject = (
+        '!', '`', '@', '$', '%', '&', '<>',
+    )
+
+    # Modula-2 R10 reserved words in addition to the common set
+    m2r10_additional_reserved_words = (
+        # 12 additional reserved words
+        'ALIAS', 'ARGLIST', 'BLUEPRINT', 'COPY', 'GENLIB', 'INDETERMINATE',
+        'NEW', 'NONE', 'OPAQUE', 'REFERENTIAL', 'RELEASE', 'RETAIN',
+        # 2 additional reserved words with symbolic assembly option
+        'ASM', 'REG',
+    )
+
+    # Modula-2 R10 builtins in addition to the common set
+    m2r10_additional_builtins = (
+        # 26 additional builtins
+        'CARDINAL', 'COUNT', 'EMPTY', 'EXISTS', 'INSERT', 'LENGTH', 'LONGCARD',
+        'OCTET', 'PTR', 'PRED', 'READ', 'READNEW', 'REMOVE', 'RETRIEVE', 'SORT',
+        'STORE', 'SUBSET', 'SUCC', 'TLIMIT', 'TMAX', 'TMIN', 'TRUE', 'TSIZE',
+        'UNICHAR', 'WRITE', 'WRITEF',
+    )
+
+    # Modula-2 R10 Additional Pseudo-Module Builtins Dataset
+    m2r10_additional_pseudo_builtins = (
+        # 13 additional builtins (TPROPERTIES)
+        'TPROPERTIES', 'PROPERTY', 'LITERAL', 'TPROPERTY', 'TLITERAL',
+        'TBUILTIN', 'TDYN', 'TREFC', 'TNIL', 'TBASE', 'TPRECISION',
+        'TMAXEXP', 'TMINEXP',
+        # 4 additional builtins (CONVERSION)
+        'CONVERSION', 'TSXFSIZE', 'SXF', 'VAL',
+        # 35 additional builtins (UNSAFE)
+        'UNSAFE', 'CAST', 'INTRINSIC', 'AVAIL', 'ADD', 'SUB', 'ADDC', 'SUBC',
+        'FETCHADD', 'FETCHSUB', 'SHL', 'SHR', 'ASHR', 'ROTL', 'ROTR', 'ROTLC',
+        'ROTRC', 'BWNOT', 'BWAND', 'BWOR', 'BWXOR', 'BWNAND', 'BWNOR',
+        'SETBIT', 'TESTBIT', 'LSBIT', 'MSBIT', 'CSBITS', 'BAIL', 'HALT',
+        'TODO', 'FFI', 'ADDR', 'VARGLIST', 'VARGC',
+        # 11 additional builtins (ATOMIC)
+        'ATOMIC', 'INTRINSIC', 'AVAIL', 'SWAP', 'CAS', 'INC', 'DEC', 'BWAND',
+        'BWNAND', 'BWOR', 'BWXOR',
+        # 7 additional builtins (COMPILER)
+        'COMPILER', 'DEBUG', 'MODNAME', 'PROCNAME', 'LINENUM', 'DEFAULT',
+        'HASH',
+        # 5 additional builtins (ASSEMBLER)
+        'ASSEMBLER', 'REGISTER', 'SETREG', 'GETREG', 'CODE',
+    )
+
+#  O b j e c t i v e   M o d u l a - 2   D a t a s e t s
+
+    # Lexemes to Mark as Error Tokens for Objective Modula-2
+    objm2_lexemes_to_reject = (
+        '!', '$', '%', '&', '<>',
+    )
+
+    # Objective Modula-2 Extensions
+    # reserved words in addition to Modula-2 R10
+    objm2_additional_reserved_words = (
+        # 16 additional reserved words
+        'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
+        'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
+        'SUPER', 'TRY',
+    )
+
+    # Objective Modula-2 Extensions
+    # builtins in addition to Modula-2 R10
+    objm2_additional_builtins = (
+        # 3 additional builtins
+        'OBJECT', 'NO', 'YES',
+    )
+
+    # Objective Modula-2 Extensions
+    # pseudo-module builtins in addition to Modula-2 R10
+    objm2_additional_pseudo_builtins = (
+        # None
+    )
+
+#  A g l e t   M o d u l a - 2   D a t a s e t s
+
+    # Aglet Extensions
+    # reserved words in addition to ISO Modula-2
+    aglet_additional_reserved_words = (
+        # None
+    )
+
+    # Aglet Extensions
+    # builtins in addition to ISO Modula-2
+    aglet_additional_builtins = (
+        # 9 additional builtins
+        'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+        'CARDINAL32', 'INTEGER8', 'INTEGER16', 'INTEGER32',
+    )
+
+    # Aglet Modula-2 Extensions
+    # pseudo-module builtins in addition to ISO Modula-2
+    aglet_additional_pseudo_builtins = (
+        # None
+    )
+
+#  G N U   M o d u l a - 2   D a t a s e t s
+
+    # GNU Extensions
+    # reserved words in addition to PIM Modula-2
+    gm2_additional_reserved_words = (
+        # 10 additional reserved words
+        'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
+        '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
+    )
+
+    # GNU Extensions
+    # builtins in addition to PIM Modula-2
+    gm2_additional_builtins = (
+        # 21 additional builtins
+        'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+        'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
+        'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
+        'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
+    )
+
+    # GNU Extensions
+    # pseudo-module builtins in addition to PIM Modula-2
+    gm2_additional_pseudo_builtins = (
+        # None
+    )
+
+#  p 1   M o d u l a - 2   D a t a s e t s
+
+    # p1 Extensions
+    # reserved words in addition to ISO Modula-2
+    p1_additional_reserved_words = (
+        # None
+    )
+
+    # p1 Extensions
+    # builtins in addition to ISO Modula-2
+    p1_additional_builtins = (
+        # None
+    )
+
+    # p1 Modula-2 Extensions
+    # pseudo-module builtins in addition to ISO Modula-2
+    p1_additional_pseudo_builtins = (
+        # 1 additional builtin
+        'BCD',
+    )
+
+#  X D S   M o d u l a - 2   D a t a s e t s
+
+    # XDS Extensions
+    # reserved words in addition to ISO Modula-2
+    xds_additional_reserved_words = (
+        # 1 additional reserved word
+        'SEQ',
+    )
+
+    # XDS Extensions
+    # builtins in addition to ISO Modula-2
+    xds_additional_builtins = (
+        # 9 additional builtins
+        'ASH', 'ASSERT', 'DIFFADR_TYPE', 'ENTIER', 'INDEX', 'LEN',
+        'LONGCARD', 'SHORTCARD', 'SHORTINT',
+    )
+
+    # XDS Modula-2 Extensions
+    # pseudo-module builtins in addition to ISO Modula-2
+    xds_additional_pseudo_builtins = (
+        # 22 additional builtins (SYSTEM)
+        'PROCESS', 'NEWPROCESS', 'BOOL8', 'BOOL16', 'BOOL32', 'CARD8',
+        'CARD16', 'CARD32', 'INT8', 'INT16', 'INT32', 'REF', 'MOVE',
+        'FILL', 'GET', 'PUT', 'CC', 'int', 'unsigned', 'size_t', 'void'
+        # 3 additional builtins (COMPILER)
+        'COMPILER', 'OPTION', 'EQUATION'
+    )
+
+#  P I M   S t a n d a r d   L i b r a r y   D a t a s e t s
+
+    # PIM Modula-2 Standard Library Modules Dataset
+    pim_stdlib_module_identifiers = (
+        'Terminal', 'FileSystem', 'InOut', 'RealInOut', 'MathLib0', 'Storage',
+    )
+
+    # PIM Modula-2 Standard Library Types Dataset
+    pim_stdlib_type_identifiers = (
+        'Flag', 'FlagSet', 'Response', 'Command', 'Lock', 'Permission',
+        'MediumType', 'File', 'FileProc', 'DirectoryProc', 'FileCommand',
+        'DirectoryCommand',
+    )
+
+    # PIM Modula-2 Standard Library Procedures Dataset
+    pim_stdlib_proc_identifiers = (
+        'Read', 'BusyRead', 'ReadAgain', 'Write', 'WriteString', 'WriteLn',
+        'Create', 'Lookup', 'Close', 'Delete', 'Rename', 'SetRead', 'SetWrite',
+        'SetModify', 'SetOpen', 'Doio', 'SetPos', 'GetPos', 'Length', 'Reset',
+        'Again', 'ReadWord', 'WriteWord', 'ReadChar', 'WriteChar',
+        'CreateMedium', 'DeleteMedium', 'AssignName', 'DeassignName',
+        'ReadMedium', 'LookupMedium', 'OpenInput', 'OpenOutput', 'CloseInput',
+        'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd',
+        'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd',
+        'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp',
+        'ln', 'sin', 'cos', 'arctan', 'entier', 'ALLOCATE', 'DEALLOCATE',
+    )
+
+    # PIM Modula-2 Standard Library Variables Dataset
+    pim_stdlib_var_identifiers = (
+        'Done', 'termCH', 'in', 'out'
+    )
+
+    # PIM Modula-2 Standard Library Constants Dataset
+    pim_stdlib_const_identifiers = (
+        'EOL',
+    )
+
+#  I S O   S t a n d a r d   L i b r a r y   D a t a s e t s
+
+    # ISO Modula-2 Standard Library Modules Dataset
+    iso_stdlib_module_identifiers = (
+        # TO DO
+    )
+
+    # ISO Modula-2 Standard Library Types Dataset
+    iso_stdlib_type_identifiers = (
+        # TO DO
+    )
+
+    # ISO Modula-2 Standard Library Procedures Dataset
+    iso_stdlib_proc_identifiers = (
+        # TO DO
+    )
+
+    # ISO Modula-2 Standard Library Variables Dataset
+    iso_stdlib_var_identifiers = (
+        # TO DO
+    )
+
+    # ISO Modula-2 Standard Library Constants Dataset
+    iso_stdlib_const_identifiers = (
+        # TO DO
+    )
+
+#  M 2   R 1 0   S t a n d a r d   L i b r a r y   D a t a s e t s
+
+    # Modula-2 R10 Standard Library ADTs Dataset
+    m2r10_stdlib_adt_identifiers = (
+        'BCD', 'LONGBCD', 'BITSET', 'SHORTBITSET', 'LONGBITSET',
+        'LONGLONGBITSET', 'COMPLEX', 'LONGCOMPLEX', 'SHORTCARD', 'LONGLONGCARD',
+        'SHORTINT', 'LONGLONGINT', 'POSINT', 'SHORTPOSINT', 'LONGPOSINT',
+        'LONGLONGPOSINT', 'BITSET8', 'BITSET16', 'BITSET32', 'BITSET64',
+        'BITSET128', 'BS8', 'BS16', 'BS32', 'BS64', 'BS128', 'CARDINAL8',
+        'CARDINAL16', 'CARDINAL32', 'CARDINAL64', 'CARDINAL128', 'CARD8',
+        'CARD16', 'CARD32', 'CARD64', 'CARD128', 'INTEGER8', 'INTEGER16',
+        'INTEGER32', 'INTEGER64', 'INTEGER128', 'INT8', 'INT16', 'INT32',
+        'INT64', 'INT128', 'STRING', 'UNISTRING',
+    )
+
+    # Modula-2 R10 Standard Library Blueprints Dataset
+    m2r10_stdlib_blueprint_identifiers = (
+        'ProtoRoot', 'ProtoComputational', 'ProtoNumeric', 'ProtoScalar',
+        'ProtoNonScalar', 'ProtoCardinal', 'ProtoInteger', 'ProtoReal',
+        'ProtoComplex', 'ProtoVector', 'ProtoTuple', 'ProtoCompArray',
+        'ProtoCollection', 'ProtoStaticArray', 'ProtoStaticSet',
+        'ProtoStaticString', 'ProtoArray', 'ProtoString', 'ProtoSet',
+        'ProtoMultiSet', 'ProtoDictionary', 'ProtoMultiDict', 'ProtoExtension',
+        'ProtoIO', 'ProtoCardMath', 'ProtoIntMath', 'ProtoRealMath',
+    )
+
+    # Modula-2 R10 Standard Library Modules Dataset
+    m2r10_stdlib_module_identifiers = (
+        'ASCII', 'BooleanIO', 'CharIO', 'UnicharIO', 'OctetIO',
+        'CardinalIO', 'LongCardIO', 'IntegerIO', 'LongIntIO', 'RealIO',
+        'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath',
+        'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath',
+        'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport',
+    )
+
+    # Modula-2 R10 Standard Library Types Dataset
+    m2r10_stdlib_type_identifiers = (
+        'File', 'Status',
+        # TO BE COMPLETED
+    )
+
+    # Modula-2 R10 Standard Library Procedures Dataset
+    m2r10_stdlib_proc_identifiers = (
+        'ALLOCATE', 'DEALLOCATE', 'SIZE',
+        # TO BE COMPLETED
+    )
+
+    # Modula-2 R10 Standard Library Variables Dataset
+    m2r10_stdlib_var_identifiers = (
+        'stdIn', 'stdOut', 'stdErr',
+    )
+
+    # Modula-2 R10 Standard Library Constants Dataset
+    m2r10_stdlib_const_identifiers = (
+        'pi', 'tau',
+    )
+
+#  D i a l e c t s
+
+    # Dialect modes
+    dialects = (
+        'unknown',
+        'm2pim', 'm2iso', 'm2r10', 'objm2',
+        'm2iso+aglet', 'm2pim+gm2', 'm2iso+p1', 'm2iso+xds',
+    )
+
+#   D a t a b a s e s
+
+    # Lexemes to Mark as Errors Database
+    lexemes_to_reject_db = {
+        # Lexemes to reject for unknown dialect
+        'unknown': (
+            # LEAVE THIS EMPTY
+        ),
+        # Lexemes to reject for PIM Modula-2
+        'm2pim': (
+            pim_lexemes_to_reject,
+        ),
+        # Lexemes to reject for ISO Modula-2
+        'm2iso': (
+            iso_lexemes_to_reject,
+        ),
+        # Lexemes to reject for Modula-2 R10
+        'm2r10': (
+            m2r10_lexemes_to_reject,
+        ),
+        # Lexemes to reject for Objective Modula-2
+        'objm2': (
+            objm2_lexemes_to_reject,
+        ),
+        # Lexemes to reject for Aglet Modula-2
+        'm2iso+aglet': (
+            iso_lexemes_to_reject,
+        ),
+        # Lexemes to reject for GNU Modula-2
+        'm2pim+gm2': (
+            pim_lexemes_to_reject,
+        ),
+        # Lexemes to reject for p1 Modula-2
+        'm2iso+p1': (
+            iso_lexemes_to_reject,
+        ),
+        # Lexemes to reject for XDS Modula-2
+        'm2iso+xds': (
+            iso_lexemes_to_reject,
+        ),
+    }
+
+    # Reserved Words Database
+    reserved_words_db = {
+        # Reserved words for unknown dialect
+        'unknown': (
+            common_reserved_words,
+            pim_additional_reserved_words,
+            iso_additional_reserved_words,
+            m2r10_additional_reserved_words,
+        ),
+
+        # Reserved words for PIM Modula-2
+        'm2pim': (
+            common_reserved_words,
+            pim_additional_reserved_words,
+        ),
+
+        # Reserved words for Modula-2 R10
+        'm2iso': (
+            common_reserved_words,
+            iso_additional_reserved_words,
+        ),
+
+        # Reserved words for ISO Modula-2
+        'm2r10': (
+            common_reserved_words,
+            m2r10_additional_reserved_words,
+        ),
+
+        # Reserved words for Objective Modula-2
+        'objm2': (
+            common_reserved_words,
+            m2r10_additional_reserved_words,
+            objm2_additional_reserved_words,
+        ),
+
+        # Reserved words for Aglet Modula-2 Extensions
+        'm2iso+aglet': (
+            common_reserved_words,
+            iso_additional_reserved_words,
+            aglet_additional_reserved_words,
+        ),
+
+        # Reserved words for GNU Modula-2 Extensions
+        'm2pim+gm2': (
+            common_reserved_words,
+            pim_additional_reserved_words,
+            gm2_additional_reserved_words,
+        ),
+
+        # Reserved words for p1 Modula-2 Extensions
+        'm2iso+p1': (
+            common_reserved_words,
+            iso_additional_reserved_words,
+            p1_additional_reserved_words,
+        ),
+
+        # Reserved words for XDS Modula-2 Extensions
+        'm2iso+xds': (
+            common_reserved_words,
+            iso_additional_reserved_words,
+            xds_additional_reserved_words,
+        ),
+    }
+
+    # Builtins Database
+    builtins_db = {
+        # Builtins for unknown dialect
+        'unknown': (
+            common_builtins,
+            pim_additional_builtins,
+            iso_additional_builtins,
+            m2r10_additional_builtins,
+        ),
+
+        # Builtins for PIM Modula-2
+        'm2pim': (
+            common_builtins,
+            pim_additional_builtins,
+        ),
+
+        # Builtins for ISO Modula-2
+        'm2iso': (
+            common_builtins,
+            iso_additional_builtins,
+        ),
+
+        # Builtins for ISO Modula-2
+        'm2r10': (
+            common_builtins,
+            m2r10_additional_builtins,
+        ),
+
+        # Builtins for Objective Modula-2
+        'objm2': (
+            common_builtins,
+            m2r10_additional_builtins,
+            objm2_additional_builtins,
+        ),
+
+        # Builtins for Aglet Modula-2 Extensions
+        'm2iso+aglet': (
+            common_builtins,
+            iso_additional_builtins,
+            aglet_additional_builtins,
+        ),
+
+        # Builtins for GNU Modula-2 Extensions
+        'm2pim+gm2': (
+            common_builtins,
+            pim_additional_builtins,
+            gm2_additional_builtins,
+        ),
+
+        # Builtins for p1 Modula-2 Extensions
+        'm2iso+p1': (
+            common_builtins,
+            iso_additional_builtins,
+            p1_additional_builtins,
+        ),
+
+        # Builtins for XDS Modula-2 Extensions
+        'm2iso+xds': (
+            common_builtins,
+            iso_additional_builtins,
+            xds_additional_builtins,
+        ),
+    }
+
+    # Pseudo-Module Builtins Database
+    pseudo_builtins_db = {
+        # Builtins for unknown dialect
+        'unknown': (
+            common_pseudo_builtins,
+            pim_additional_pseudo_builtins,
+            iso_additional_pseudo_builtins,
+            m2r10_additional_pseudo_builtins,
+        ),
+
+        # Builtins for PIM Modula-2
+        'm2pim': (
+            common_pseudo_builtins,
+            pim_additional_pseudo_builtins,
+        ),
+
+        # Builtins for ISO Modula-2
+        'm2iso': (
+            common_pseudo_builtins,
+            iso_additional_pseudo_builtins,
+        ),
+
+        # Builtins for ISO Modula-2
+        'm2r10': (
+            common_pseudo_builtins,
+            m2r10_additional_pseudo_builtins,
+        ),
+
+        # Builtins for Objective Modula-2
+        'objm2': (
+            common_pseudo_builtins,
+            m2r10_additional_pseudo_builtins,
+            objm2_additional_pseudo_builtins,
+        ),
+
+        # Builtins for Aglet Modula-2 Extensions
+        'm2iso+aglet': (
+            common_pseudo_builtins,
+            iso_additional_pseudo_builtins,
+            aglet_additional_pseudo_builtins,
+        ),
+
+        # Builtins for GNU Modula-2 Extensions
+        'm2pim+gm2': (
+            common_pseudo_builtins,
+            pim_additional_pseudo_builtins,
+            gm2_additional_pseudo_builtins,
+        ),
+
+        # Builtins for p1 Modula-2 Extensions
+        'm2iso+p1': (
+            common_pseudo_builtins,
+            iso_additional_pseudo_builtins,
+            p1_additional_pseudo_builtins,
+        ),
+
+        # Builtins for XDS Modula-2 Extensions
+        'm2iso+xds': (
+            common_pseudo_builtins,
+            iso_additional_pseudo_builtins,
+            xds_additional_pseudo_builtins,
+        ),
+    }
+
+    # Standard Library ADTs Database
+    stdlib_adts_db = {
+        # Empty entry for unknown dialect
+        'unknown': (
+            # LEAVE THIS EMPTY
+        ),
+        # Standard Library ADTs for PIM Modula-2
+        'm2pim': (
+            # No first class library types
+        ),
+
+        # Standard Library ADTs for ISO Modula-2
+        'm2iso': (
+            # No first class library types
+        ),
+
+        # Standard Library ADTs for Modula-2 R10
+        'm2r10': (
+            m2r10_stdlib_adt_identifiers,
+        ),
+
+        # Standard Library ADTs for Objective Modula-2
+        'objm2': (
+            m2r10_stdlib_adt_identifiers,
+        ),
+
+        # Standard Library ADTs for Aglet Modula-2
+        'm2iso+aglet': (
+            # No first class library types
+        ),
+
+        # Standard Library ADTs for GNU Modula-2
+        'm2pim+gm2': (
+            # No first class library types
+        ),
+
+        # Standard Library ADTs for p1 Modula-2
+        'm2iso+p1': (
+            # No first class library types
+        ),
+
+        # Standard Library ADTs for XDS Modula-2
+        'm2iso+xds': (
+            # No first class library types
+        ),
+    }
+
+    # Standard Library Modules Database
+    stdlib_modules_db = {
+        # Empty entry for unknown dialect
+        'unknown': (
+            # LEAVE THIS EMPTY
+        ),
+        # Standard Library Modules for PIM Modula-2
+        'm2pim': (
+            pim_stdlib_module_identifiers,
+        ),
+
+        # Standard Library Modules for ISO Modula-2
+        'm2iso': (
+            iso_stdlib_module_identifiers,
+        ),
+
+        # Standard Library Modules for Modula-2 R10
+        'm2r10': (
+            m2r10_stdlib_blueprint_identifiers,
+            m2r10_stdlib_module_identifiers,
+            m2r10_stdlib_adt_identifiers,
+        ),
+
+        # Standard Library Modules for Objective Modula-2
+        'objm2': (
+            m2r10_stdlib_blueprint_identifiers,
+            m2r10_stdlib_module_identifiers,
+        ),
+
+        # Standard Library Modules for Aglet Modula-2
+        'm2iso+aglet': (
+            iso_stdlib_module_identifiers,
+        ),
+
+        # Standard Library Modules for GNU Modula-2
+        'm2pim+gm2': (
+            pim_stdlib_module_identifiers,
+        ),
+
+        # Standard Library Modules for p1 Modula-2
+        'm2iso+p1': (
+            iso_stdlib_module_identifiers,
+        ),
+
+        # Standard Library Modules for XDS Modula-2
+        'm2iso+xds': (
+            iso_stdlib_module_identifiers,
+        ),
+    }
+
+    # Standard Library Types Database
+    stdlib_types_db = {
+        # Empty entry for unknown dialect
+        'unknown': (
+            # LEAVE THIS EMPTY
+        ),
+        # Standard Library Types for PIM Modula-2
+        'm2pim': (
+            pim_stdlib_type_identifiers,
+        ),
+
+        # Standard Library Types for ISO Modula-2
+        'm2iso': (
+            iso_stdlib_type_identifiers,
+        ),
+
+        # Standard Library Types for Modula-2 R10
+        'm2r10': (
+            m2r10_stdlib_type_identifiers,
+        ),
+
+        # Standard Library Types for Objective Modula-2
+        'objm2': (
+            m2r10_stdlib_type_identifiers,
+        ),
+
+        # Standard Library Types for Aglet Modula-2
+        'm2iso+aglet': (
+            iso_stdlib_type_identifiers,
+        ),
+
+        # Standard Library Types for GNU Modula-2
+        'm2pim+gm2': (
+            pim_stdlib_type_identifiers,
+        ),
+
+        # Standard Library Types for p1 Modula-2
+        'm2iso+p1': (
+            iso_stdlib_type_identifiers,
+        ),
+
+        # Standard Library Types for XDS Modula-2
+        'm2iso+xds': (
+            iso_stdlib_type_identifiers,
+        ),
+    }
+
+    # Standard Library Procedures Database
+    stdlib_procedures_db = {
+        # Empty entry for unknown dialect
+        'unknown': (
+            # LEAVE THIS EMPTY
+        ),
+        # Standard Library Procedures for PIM Modula-2
+        'm2pim': (
+            pim_stdlib_proc_identifiers,
+        ),
+
+        # Standard Library Procedures for ISO Modula-2
+        'm2iso': (
+            iso_stdlib_proc_identifiers,
+        ),
+
+        # Standard Library Procedures for Modula-2 R10
+        'm2r10': (
+            m2r10_stdlib_proc_identifiers,
+        ),
+
+        # Standard Library Procedures for Objective Modula-2
+        'objm2': (
+            m2r10_stdlib_proc_identifiers,
+        ),
+
+        # Standard Library Procedures for Aglet Modula-2
+        'm2iso+aglet': (
+            iso_stdlib_proc_identifiers,
+        ),
+
+        # Standard Library Procedures for GNU Modula-2
+        'm2pim+gm2': (
+            pim_stdlib_proc_identifiers,
+        ),
+
+        # Standard Library Procedures for p1 Modula-2
+        'm2iso+p1': (
+            iso_stdlib_proc_identifiers,
+        ),
+
+        # Standard Library Procedures for XDS Modula-2
+        'm2iso+xds': (
+            iso_stdlib_proc_identifiers,
+        ),
+    }
+
+    # Standard Library Variables Database
+    stdlib_variables_db = {
+        # Empty entry for unknown dialect
+        'unknown': (
+            # LEAVE THIS EMPTY
+        ),
+        # Standard Library Variables for PIM Modula-2
+        'm2pim': (
+            pim_stdlib_var_identifiers,
+        ),
+
+        # Standard Library Variables for ISO Modula-2
+        'm2iso': (
+            iso_stdlib_var_identifiers,
+        ),
+
+        # Standard Library Variables for Modula-2 R10
+        'm2r10': (
+            m2r10_stdlib_var_identifiers,
+        ),
+
+        # Standard Library Variables for Objective Modula-2
+        'objm2': (
+            m2r10_stdlib_var_identifiers,
+        ),
+
+        # Standard Library Variables for Aglet Modula-2
+        'm2iso+aglet': (
+            iso_stdlib_var_identifiers,
+        ),
+
+        # Standard Library Variables for GNU Modula-2
+        'm2pim+gm2': (
+            pim_stdlib_var_identifiers,
+        ),
+
+        # Standard Library Variables for p1 Modula-2
+        'm2iso+p1': (
+            iso_stdlib_var_identifiers,
+        ),
+
+        # Standard Library Variables for XDS Modula-2
+        'm2iso+xds': (
+            iso_stdlib_var_identifiers,
+        ),
+    }
+
+    # Standard Library Constants Database
+    stdlib_constants_db = {
+        # Empty entry for unknown dialect
+        'unknown': (
+            # LEAVE THIS EMPTY
+        ),
+        # Standard Library Constants for PIM Modula-2
+        'm2pim': (
+            pim_stdlib_const_identifiers,
+        ),
+
+        # Standard Library Constants for ISO Modula-2
+        'm2iso': (
+            iso_stdlib_const_identifiers,
+        ),
+
+        # Standard Library Constants for Modula-2 R10
+        'm2r10': (
+            m2r10_stdlib_const_identifiers,
+        ),
+
+        # Standard Library Constants for Objective Modula-2
+        'objm2': (
+            m2r10_stdlib_const_identifiers,
+        ),
+
+        # Standard Library Constants for Aglet Modula-2
+        'm2iso+aglet': (
+            iso_stdlib_const_identifiers,
+        ),
+
+        # Standard Library Constants for GNU Modula-2
+        'm2pim+gm2': (
+            pim_stdlib_const_identifiers,
+        ),
+
+        # Standard Library Constants for p1 Modula-2
+        'm2iso+p1': (
+            iso_stdlib_const_identifiers,
+        ),
+
+        # Standard Library Constants for XDS Modula-2
+        'm2iso+xds': (
+            iso_stdlib_const_identifiers,
+        ),
+    }
+
+#   M e t h o d s
+
+    # initialise a lexer instance
+    def __init__(self, **options):
+        #
+        # check dialect options
+        #
+        dialects = get_list_opt(options, 'dialect', [])
+        #
+        for dialect_option in dialects:
+            if dialect_option in self.dialects[1:-1]:
+                # valid dialect option found
+                self.set_dialect(dialect_option)
+                break
+        #
+        # Fallback Mode (DEFAULT)
+        else:
+            # no valid dialect option
+            self.set_dialect('unknown')
+        #
+        self.dialect_set_by_tag = False
+        #
+        # check style options
+        #
+        styles = get_list_opt(options, 'style', [])
+        #
+        # use lowercase mode for Algol style
+        if 'algol' in styles or 'algol_nu' in styles:
+            self.algol_publication_mode = True
+        else:
+            self.algol_publication_mode = False
+        #
+        # Check option flags
+        #
+        self.treat_stdlib_adts_as_builtins = get_bool_opt(
+            options, 'treat_stdlib_adts_as_builtins', True)
+        #
+        # call superclass initialiser
+        RegexLexer.__init__(self, **options)
+
+    # Set lexer to a specified dialect
+    def set_dialect(self, dialect_id):
+        #
+        # if __debug__:
+        #    print 'entered set_dialect with arg: ', dialect_id
+        #
+        # check dialect name against known dialects
+        if dialect_id not in self.dialects:
+            dialect = 'unknown'  # default
+        else:
+            dialect = dialect_id
+        #
+        # compose lexemes to reject set
+        lexemes_to_reject_set = set()
+        # add each list of reject lexemes for this dialect
+        for list in self.lexemes_to_reject_db[dialect]:
+            lexemes_to_reject_set.update(set(list))
+        #
+        # compose reserved words set
+        reswords_set = set()
+        # add each list of reserved words for this dialect
+        for list in self.reserved_words_db[dialect]:
+            reswords_set.update(set(list))
+        #
+        # compose builtins set
+        builtins_set = set()
+        # add each list of builtins for this dialect excluding reserved words
+        for list in self.builtins_db[dialect]:
+            builtins_set.update(set(list).difference(reswords_set))
+        #
+        # compose pseudo-builtins set
+        pseudo_builtins_set = set()
+        # add each list of builtins for this dialect excluding reserved words
+        for list in self.pseudo_builtins_db[dialect]:
+            pseudo_builtins_set.update(set(list).difference(reswords_set))
+        #
+        # compose ADTs set
+        adts_set = set()
+        # add each list of ADTs for this dialect excluding reserved words
+        for list in self.stdlib_adts_db[dialect]:
+            adts_set.update(set(list).difference(reswords_set))
+        #
+        # compose modules set
+        modules_set = set()
+        # add each list of builtins for this dialect excluding builtins
+        for list in self.stdlib_modules_db[dialect]:
+            modules_set.update(set(list).difference(builtins_set))
+        #
+        # compose types set
+        types_set = set()
+        # add each list of types for this dialect excluding builtins
+        for list in self.stdlib_types_db[dialect]:
+            types_set.update(set(list).difference(builtins_set))
+        #
+        # compose procedures set
+        procedures_set = set()
+        # add each list of procedures for this dialect excluding builtins
+        for list in self.stdlib_procedures_db[dialect]:
+            procedures_set.update(set(list).difference(builtins_set))
+        #
+        # compose variables set
+        variables_set = set()
+        # add each list of variables for this dialect excluding builtins
+        for list in self.stdlib_variables_db[dialect]:
+            variables_set.update(set(list).difference(builtins_set))
+        #
+        # compose constants set
+        constants_set = set()
+        # add each list of constants for this dialect excluding builtins
+        for list in self.stdlib_constants_db[dialect]:
+            constants_set.update(set(list).difference(builtins_set))
+        #
+        # update lexer state
+        self.dialect = dialect
+        self.lexemes_to_reject = lexemes_to_reject_set
+        self.reserved_words = reswords_set
+        self.builtins = builtins_set
+        self.pseudo_builtins = pseudo_builtins_set
+        self.adts = adts_set
+        self.modules = modules_set
+        self.types = types_set
+        self.procedures = procedures_set
+        self.variables = variables_set
+        self.constants = constants_set
+        #
+        # if __debug__:
+        #    print 'exiting set_dialect'
+        #    print ' self.dialect: ', self.dialect
+        #    print ' self.lexemes_to_reject: ', self.lexemes_to_reject
+        #    print ' self.reserved_words: ', self.reserved_words
+        #    print ' self.builtins: ', self.builtins
+        #    print ' self.pseudo_builtins: ', self.pseudo_builtins
+        #    print ' self.adts: ', self.adts
+        #    print ' self.modules: ', self.modules
+        #    print ' self.types: ', self.types
+        #    print ' self.procedures: ', self.procedures
+        #    print ' self.variables: ', self.variables
+        #    print ' self.types: ', self.types
+        #    print ' self.constants: ', self.constants
+
+    # Extracts a dialect name from a dialect tag comment string  and checks
+    # the extracted name against known dialects.  If a match is found,  the
+    # matching name is returned, otherwise dialect id 'unknown' is returned
+    def get_dialect_from_dialect_tag(self, dialect_tag):
+        #
+        # if __debug__:
+        #    print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag
+        #
+        # constants
+        left_tag_delim = '(*!'
+        right_tag_delim = '*)'
+        left_tag_delim_len = len(left_tag_delim)
+        right_tag_delim_len = len(right_tag_delim)
+        indicator_start = left_tag_delim_len
+        indicator_end = -(right_tag_delim_len)
+        #
+        # check comment string for dialect indicator
+        if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \
+           and dialect_tag.startswith(left_tag_delim) \
+           and dialect_tag.endswith(right_tag_delim):
+            #
+            # if __debug__:
+            #    print 'dialect tag found'
+            #
+            # extract dialect indicator
+            indicator = dialect_tag[indicator_start:indicator_end]
+            #
+            # if __debug__:
+            #    print 'extracted: ', indicator
+            #
+            # check against known dialects
+            for index in range(1, len(self.dialects)):
+                #
+                # if __debug__:
+                #    print 'dialects[', index, ']: ', self.dialects[index]
+                #
+                if indicator == self.dialects[index]:
+                    #
+                    # if __debug__:
+                    #    print 'matching dialect found'
+                    #
+                    # indicator matches known dialect
+                    return indicator
+            else:
+                # indicator does not match any dialect
+                return 'unknown'  # default
+        else:
+            # invalid indicator string
+            return 'unknown'  # default
+
+    # intercept the token stream, modify token attributes and return them
+    def get_tokens_unprocessed(self, text):
+        for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+            #
+            # check for dialect tag if dialect has not been set by tag
+            if not self.dialect_set_by_tag and token == Comment.Special:
+                indicated_dialect = self.get_dialect_from_dialect_tag(value)
+                if indicated_dialect != 'unknown':
+                    # token is a dialect indicator
+                    # reset reserved words and builtins
+                    self.set_dialect(indicated_dialect)
+                    self.dialect_set_by_tag = True
+            #
+            # check for reserved words, predefined and stdlib identifiers
+            if token is Name:
+                if value in self.reserved_words:
+                    token = Keyword.Reserved
+                    if self.algol_publication_mode:
+                        value = value.lower()
+                #
+                elif value in self.builtins:
+                    token = Name.Builtin
+                    if self.algol_publication_mode:
+                        value = value.lower()
+                #
+                elif value in self.pseudo_builtins:
+                    token = Name.Builtin.Pseudo
+                    if self.algol_publication_mode:
+                        value = value.lower()
+                #
+                elif value in self.adts:
+                    if not self.treat_stdlib_adts_as_builtins:
+                        token = Name.Namespace
+                    else:
+                        token = Name.Builtin.Pseudo
+                        if self.algol_publication_mode:
+                            value = value.lower()
+                #
+                elif value in self.modules:
+                    token = Name.Namespace
+                #
+                elif value in self.types:
+                    token = Name.Class
+                #
+                elif value in self.procedures:
+                    token = Name.Function
+                #
+                elif value in self.variables:
+                    token = Name.Variable
+                #
+                elif value in self.constants:
+                    token = Name.Constant
+            #
+            elif token in Number:
+                #
+                # mark prefix number literals as error for PIM and ISO dialects
+                if self.dialect not in ('unknown', 'm2r10', 'objm2'):
+                    if "'" in value or value[0:2] in ('0b', '0x', '0u'):
+                        token = Error
+                #
+                elif self.dialect in ('m2r10', 'objm2'):
+                    # mark base-8 number literals as errors for M2 R10 and ObjM2
+                    if token is Number.Oct:
+                        token = Error
+                    # mark suffix base-16 literals as errors for M2 R10 and ObjM2
+                    elif token is Number.Hex and 'H' in value:
+                        token = Error
+                    # mark real numbers with E as errors for M2 R10 and ObjM2
+                    elif token is Number.Float and 'E' in value:
+                        token = Error
+            #
+            elif token in Comment:
+                #
+                # mark single line comment as error for PIM and ISO dialects
+                if token is Comment.Single:
+                    if self.dialect not in ('unknown', 'm2r10', 'objm2'):
+                        token = Error
+                #
+                if token is Comment.Preproc:
+                    # mark ISO pragma as error for PIM dialects
+                    if value.startswith('<*') and \
+                       self.dialect.startswith('m2pim'):
+                        token = Error
+                    # mark PIM pragma as comment for other dialects
+                    elif value.startswith('(*$') and \
+                            self.dialect != 'unknown' and \
+                            not self.dialect.startswith('m2pim'):
+                        token = Comment.Multiline
+            #
+            else:  # token is neither Name nor Comment
+                #
+                # mark lexemes matching the dialect's error token set as errors
+                if value in self.lexemes_to_reject:
+                    token = Error
+                #
+                # substitute lexemes when in Algol mode
+                if self.algol_publication_mode:
+                    if value == '#':
+                        value = '≠'
+                    elif value == '<=':
+                        value = '≤'
+                    elif value == '>=':
+                        value = '≥'
+                    elif value == '==':
+                        value = '≡'
+                    elif value == '*.':
+                        value = '•'
+
+            # return result
+            yield index, token, value
+
+    def analyse_text(text):
+        """It's Pascal-like, but does not use FUNCTION -- uses PROCEDURE
+        instead."""
+
+        # Check if this looks like Pascal, if not, bail out early
+        if not ('(*' in text and '*)' in text and ':=' in text):
+            return
+
+        result = 0
+        # Procedure is in Modula2
+        if re.search(r'\bPROCEDURE\b', text):
+            result += 0.6
+
+        # FUNCTION is only valid in Pascal, but not in Modula2
+        if re.search(r'\bFUNCTION\b', text):
+            result = 0.0
+
+        return result
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/mojo.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/mojo.py
new file mode 100644
index 00000000..4df18c4f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/mojo.py
@@ -0,0 +1,707 @@
+"""
+    pygments.lexers.mojo
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Mojo and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import keyword
+
+from pygments import unistring as uni
+from pygments.lexer import (
+    RegexLexer,
+    bygroups,
+    combined,
+    default,
+    include,
+    this,
+    using,
+    words,
+)
+from pygments.token import (
+    Comment,
+    # Error,
+    Keyword,
+    Name,
+    Number,
+    Operator,
+    Punctuation,
+    String,
+    Text,
+    Whitespace,
+)
+from pygments.util import shebang_matches
+
+__all__ = ["MojoLexer"]
+
+
+class MojoLexer(RegexLexer):
+    """
+    For Mojo source code (version 24.2.1).
+    """
+
+    name = "Mojo"
+    url = "https://docs.modular.com/mojo/"
+    aliases = ["mojo", "🔥"]
+    filenames = [
+        "*.mojo",
+        "*.🔥",
+    ]
+    mimetypes = [
+        "text/x-mojo",
+        "application/x-mojo",
+    ]
+    version_added = "2.18"
+
+    uni_name = f"[{uni.xid_start}][{uni.xid_continue}]*"
+
+    def innerstring_rules(ttype):
+        return [
+            # the old style '%s' % (...) string formatting (still valid in Py3)
+            (
+                r"%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?"
+                "[hlL]?[E-GXc-giorsaux%]",
+                String.Interpol,
+            ),
+            # the new style '{}'.format(...) string formatting
+            (
+                r"\{"
+                r"((\w+)((\.\w+)|(\[[^\]]+\]))*)?"  # field name
+                r"(\![sra])?"  # conversion
+                r"(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?"
+                r"\}",
+                String.Interpol,
+            ),
+            # backslashes, quotes and formatting signs must be parsed one at a time
+            (r'[^\\\'"%{\n]+', ttype),
+            (r'[\'"\\]', ttype),
+            # unhandled string formatting sign
+            (r"%|(\{{1,2})", ttype),
+            # newlines are an error (use "nl" state)
+        ]
+
+    def fstring_rules(ttype):
+        return [
+            # Assuming that a '}' is the closing brace after format specifier.
+            # Sadly, this means that we won't detect syntax error. But it's
+            # more important to parse correct syntax correctly, than to
+            # highlight invalid syntax.
+            (r"\}", String.Interpol),
+            (r"\{", String.Interpol, "expr-inside-fstring"),
+            # backslashes, quotes and formatting signs must be parsed one at a time
+            (r'[^\\\'"{}\n]+', ttype),
+            (r'[\'"\\]', ttype),
+            # newlines are an error (use "nl" state)
+        ]
+
+    tokens = {
+        "root": [
+            (r"\s+", Whitespace),
+            (
+                r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
+                bygroups(Whitespace, String.Affix, String.Doc),
+            ),
+            (
+                r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
+                bygroups(Whitespace, String.Affix, String.Doc),
+            ),
+            (r"\A#!.+$", Comment.Hashbang),
+            (r"#.*$", Comment.Single),
+            (r"\\\n", Whitespace),
+            (r"\\", Whitespace),
+            include("keywords"),
+            include("soft-keywords"),
+            # In the original PR, all the below here used ((?:\s|\\\s)+) to
+            # designate whitespace, but I can't find any example of this being
+            # needed in the example file, so we're replacing it with `\s+`.
+            (
+                r"(alias)(\s+)",
+                bygroups(Keyword, Whitespace),
+                "varname",  # TODO varname the right fit?
+            ),
+            (r"(var)(\s+)", bygroups(Keyword, Whitespace), "varname"),
+            (r"(def)(\s+)", bygroups(Keyword, Whitespace), "funcname"),
+            (r"(fn)(\s+)", bygroups(Keyword, Whitespace), "funcname"),
+            (
+                r"(class)(\s+)",
+                bygroups(Keyword, Whitespace),
+                "classname",
+            ),  # not implemented yet
+            (r"(struct)(\s+)", bygroups(Keyword, Whitespace), "structname"),
+            (r"(trait)(\s+)", bygroups(Keyword, Whitespace), "structname"),
+            (r"(from)(\s+)", bygroups(Keyword.Namespace, Whitespace), "fromimport"),
+            (r"(import)(\s+)", bygroups(Keyword.Namespace, Whitespace), "import"),
+            include("expr"),
+        ],
+        "expr": [
+            # raw f-strings
+            (
+                '(?i)(rf|fr)(""")',
+                bygroups(String.Affix, String.Double),
+                combined("rfstringescape", "tdqf"),
+            ),
+            (
+                "(?i)(rf|fr)(''')",
+                bygroups(String.Affix, String.Single),
+                combined("rfstringescape", "tsqf"),
+            ),
+            (
+                '(?i)(rf|fr)(")',
+                bygroups(String.Affix, String.Double),
+                combined("rfstringescape", "dqf"),
+            ),
+            (
+                "(?i)(rf|fr)(')",
+                bygroups(String.Affix, String.Single),
+                combined("rfstringescape", "sqf"),
+            ),
+            # non-raw f-strings
+            (
+                '([fF])(""")',
+                bygroups(String.Affix, String.Double),
+                combined("fstringescape", "tdqf"),
+            ),
+            (
+                "([fF])(''')",
+                bygroups(String.Affix, String.Single),
+                combined("fstringescape", "tsqf"),
+            ),
+            (
+                '([fF])(")',
+                bygroups(String.Affix, String.Double),
+                combined("fstringescape", "dqf"),
+            ),
+            (
+                "([fF])(')",
+                bygroups(String.Affix, String.Single),
+                combined("fstringescape", "sqf"),
+            ),
+            # raw bytes and strings
+            ('(?i)(rb|br|r)(""")', bygroups(String.Affix, String.Double), "tdqs"),
+            ("(?i)(rb|br|r)(''')", bygroups(String.Affix, String.Single), "tsqs"),
+            ('(?i)(rb|br|r)(")', bygroups(String.Affix, String.Double), "dqs"),
+            ("(?i)(rb|br|r)(')", bygroups(String.Affix, String.Single), "sqs"),
+            # non-raw strings
+            (
+                '([uU]?)(""")',
+                bygroups(String.Affix, String.Double),
+                combined("stringescape", "tdqs"),
+            ),
+            (
+                "([uU]?)(''')",
+                bygroups(String.Affix, String.Single),
+                combined("stringescape", "tsqs"),
+            ),
+            (
+                '([uU]?)(")',
+                bygroups(String.Affix, String.Double),
+                combined("stringescape", "dqs"),
+            ),
+            (
+                "([uU]?)(')",
+                bygroups(String.Affix, String.Single),
+                combined("stringescape", "sqs"),
+            ),
+            # non-raw bytes
+            (
+                '([bB])(""")',
+                bygroups(String.Affix, String.Double),
+                combined("bytesescape", "tdqs"),
+            ),
+            (
+                "([bB])(''')",
+                bygroups(String.Affix, String.Single),
+                combined("bytesescape", "tsqs"),
+            ),
+            (
+                '([bB])(")',
+                bygroups(String.Affix, String.Double),
+                combined("bytesescape", "dqs"),
+            ),
+            (
+                "([bB])(')",
+                bygroups(String.Affix, String.Single),
+                combined("bytesescape", "sqs"),
+            ),
+            (r"[^\S\n]+", Text),
+            include("numbers"),
+            (r"!=|==|<<|>>|:=|[-~+/*%=<>&^|.]", Operator),
+            (r"([]{}:\(\),;[])+", Punctuation),
+            (r"(in|is|and|or|not)\b", Operator.Word),
+            include("expr-keywords"),
+            include("builtins"),
+            include("magicfuncs"),
+            include("magicvars"),
+            include("name"),
+        ],
+        "expr-inside-fstring": [
+            (r"[{([]", Punctuation, "expr-inside-fstring-inner"),
+            # without format specifier
+            (
+                r"(=\s*)?"  # debug (https://bugs.python.org/issue36817)
+                r"(\![sraf])?"  # conversion
+                r"\}",
+                String.Interpol,
+                "#pop",
+            ),
+            # with format specifier
+            # we'll catch the remaining '}' in the outer scope
+            (
+                r"(=\s*)?"  # debug (https://bugs.python.org/issue36817)
+                r"(\![sraf])?"  # conversion
+                r":",
+                String.Interpol,
+                "#pop",
+            ),
+            (r"\s+", Whitespace),  # allow new lines
+            include("expr"),
+        ],
+        "expr-inside-fstring-inner": [
+            (r"[{([]", Punctuation, "expr-inside-fstring-inner"),
+            (r"[])}]", Punctuation, "#pop"),
+            (r"\s+", Whitespace),  # allow new lines
+            include("expr"),
+        ],
+        "expr-keywords": [
+            # Based on https://docs.python.org/3/reference/expressions.html
+            (
+                words(
+                    (
+                        "async for",  # TODO https://docs.modular.com/mojo/roadmap#no-async-for-or-async-with
+                        "async with",  # TODO https://docs.modular.com/mojo/roadmap#no-async-for-or-async-with
+                        "await",
+                        "else",
+                        "for",
+                        "if",
+                        "lambda",
+                        "yield",
+                        "yield from",
+                    ),
+                    suffix=r"\b",
+                ),
+                Keyword,
+            ),
+            (words(("True", "False", "None"), suffix=r"\b"), Keyword.Constant),
+        ],
+        "keywords": [
+            (
+                words(
+                    (
+                        "assert",
+                        "async",
+                        "await",
+                        "borrowed",
+                        "break",
+                        "continue",
+                        "del",
+                        "elif",
+                        "else",
+                        "except",
+                        "finally",
+                        "for",
+                        "global",
+                        "if",
+                        "lambda",
+                        "pass",
+                        "raise",
+                        "nonlocal",
+                        "return",
+                        "try",
+                        "while",
+                        "yield",
+                        "yield from",
+                        "as",
+                        "with",
+                    ),
+                    suffix=r"\b",
+                ),
+                Keyword,
+            ),
+            (words(("True", "False", "None"), suffix=r"\b"), Keyword.Constant),
+        ],
+        "soft-keywords": [
+            # `match`, `case` and `_` soft keywords
+            (
+                r"(^[ \t]*)"  # at beginning of line + possible indentation
+                r"(match|case)\b"  # a possible keyword
+                r"(?![ \t]*(?:"  # not followed by...
+                r"[:,;=^&|@~)\]}]|(?:" +  # characters and keywords that mean this isn't
+                # pattern matching (but None/True/False is ok)
+                r"|".join(k for k in keyword.kwlist if k[0].islower())
+                + r")\b))",
+                bygroups(Whitespace, Keyword),
+                "soft-keywords-inner",
+            ),
+        ],
+        "soft-keywords-inner": [
+            # optional `_` keyword
+            (r"(\s+)([^\n_]*)(_\b)", bygroups(Whitespace, using(this), Keyword)),
+            default("#pop"),
+        ],
+        "builtins": [
+            (
+                words(
+                    (
+                        "__import__",
+                        "abs",
+                        "aiter",
+                        "all",
+                        "any",
+                        "bin",
+                        "bool",
+                        "bytearray",
+                        "breakpoint",
+                        "bytes",
+                        "callable",
+                        "chr",
+                        "classmethod",
+                        "compile",
+                        "complex",
+                        "delattr",
+                        "dict",
+                        "dir",
+                        "divmod",
+                        "enumerate",
+                        "eval",
+                        "filter",
+                        "float",
+                        "format",
+                        "frozenset",
+                        "getattr",
+                        "globals",
+                        "hasattr",
+                        "hash",
+                        "hex",
+                        "id",
+                        "input",
+                        "int",
+                        "isinstance",
+                        "issubclass",
+                        "iter",
+                        "len",
+                        "list",
+                        "locals",
+                        "map",
+                        "max",
+                        "memoryview",
+                        "min",
+                        "next",
+                        "object",
+                        "oct",
+                        "open",
+                        "ord",
+                        "pow",
+                        "print",
+                        "property",
+                        "range",
+                        "repr",
+                        "reversed",
+                        "round",
+                        "set",
+                        "setattr",
+                        "slice",
+                        "sorted",
+                        "staticmethod",
+                        "str",
+                        "sum",
+                        "super",
+                        "tuple",
+                        "type",
+                        "vars",
+                        "zip",
+                        # Mojo builtin types: https://docs.modular.com/mojo/stdlib/builtin/
+                        "AnyType",
+                        "Coroutine",
+                        "DType",
+                        "Error",
+                        "Int",
+                        "List",
+                        "ListLiteral",
+                        "Scalar",
+                        "Int8",
+                        "UInt8",
+                        "Int16",
+                        "UInt16",
+                        "Int32",
+                        "UInt32",
+                        "Int64",
+                        "UInt64",
+                        "BFloat16",
+                        "Float16",
+                        "Float32",
+                        "Float64",
+                        "SIMD",
+                        "String",
+                        "Tensor",
+                        "Tuple",
+                        "Movable",
+                        "Copyable",
+                        "CollectionElement",
+                    ),
+                    prefix=r"(?>',
+    # Binary augmented
+    '+=', '-=', '*=', '/=', '%=', '**=', '&=', '|=', '^=', '<<=', '>>=',
+    # Comparison
+    '==', '!=', '<', '<=', '>', '>=', '<=>',
+    # Patterns and assignment
+    ':=', '?', '=~', '!~', '=>',
+    # Calls and sends
+    '.', '<-', '->',
+]
+_escape_pattern = (
+    r'(?:\\x[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
+    r'\\["\'\\bftnr])')
+# _char = _escape_chars + [('.', String.Char)]
+_identifier = r'[_a-zA-Z]\w*'
+
+_constants = [
+    # Void constants
+    'null',
+    # Bool constants
+    'false', 'true',
+    # Double constants
+    'Infinity', 'NaN',
+    # Special objects
+    'M', 'Ref', 'throw', 'traceln',
+]
+
+_guards = [
+    'Any', 'Binding', 'Bool', 'Bytes', 'Char', 'DeepFrozen', 'Double',
+    'Empty', 'Int', 'List', 'Map', 'Near', 'NullOk', 'Same', 'Selfless',
+    'Set', 'Str', 'SubrangeGuard', 'Transparent', 'Void',
+]
+
+_safeScope = [
+    '_accumulateList', '_accumulateMap', '_auditedBy', '_bind',
+    '_booleanFlow', '_comparer', '_equalizer', '_iterForever', '_loop',
+    '_makeBytes', '_makeDouble', '_makeFinalSlot', '_makeInt', '_makeList',
+    '_makeMap', '_makeMessageDesc', '_makeOrderedSpace', '_makeParamDesc',
+    '_makeProtocolDesc', '_makeSourceSpan', '_makeString', '_makeVarSlot',
+    '_makeVerbFacet', '_mapExtract', '_matchSame', '_quasiMatcher',
+    '_slotToBinding', '_splitList', '_suchThat', '_switchFailed',
+    '_validateFor', 'b__quasiParser', 'eval', 'import', 'm__quasiParser',
+    'makeBrandPair', 'makeLazySlot', 'safeScope', 'simple__quasiParser',
+]
+
+
+class MonteLexer(RegexLexer):
+    """
+    Lexer for the Monte programming language.
+    """
+    name = 'Monte'
+    url = 'https://monte.readthedocs.io/'
+    aliases = ['monte']
+    filenames = ['*.mt']
+    version_added = '2.2'
+
+    tokens = {
+        'root': [
+            # Comments
+            (r'#[^\n]*\n', Comment),
+
+            # Docstrings
+            # Apologies for the non-greedy matcher here.
+            (r'/\*\*.*?\*/', String.Doc),
+
+            # `var` declarations
+            (r'\bvar\b', Keyword.Declaration, 'var'),
+
+            # `interface` declarations
+            (r'\binterface\b', Keyword.Declaration, 'interface'),
+
+            # method declarations
+            (words(_methods, prefix='\\b', suffix='\\b'),
+             Keyword, 'method'),
+
+            # All other declarations
+            (words(_declarations, prefix='\\b', suffix='\\b'),
+             Keyword.Declaration),
+
+            # Keywords
+            (words(_keywords, prefix='\\b', suffix='\\b'), Keyword),
+
+            # Literals
+            ('[+-]?0x[_0-9a-fA-F]+', Number.Hex),
+            (r'[+-]?[_0-9]+\.[_0-9]*([eE][+-]?[_0-9]+)?', Number.Float),
+            ('[+-]?[_0-9]+', Number.Integer),
+            ("'", String.Double, 'char'),
+            ('"', String.Double, 'string'),
+
+            # Quasiliterals
+            ('`', String.Backtick, 'ql'),
+
+            # Operators
+            (words(_operators), Operator),
+
+            # Verb operators
+            (_identifier + '=', Operator.Word),
+
+            # Safe scope constants
+            (words(_constants, prefix='\\b', suffix='\\b'),
+             Keyword.Pseudo),
+
+            # Safe scope guards
+            (words(_guards, prefix='\\b', suffix='\\b'), Keyword.Type),
+
+            # All other safe scope names
+            (words(_safeScope, prefix='\\b', suffix='\\b'),
+             Name.Builtin),
+
+            # Identifiers
+            (_identifier, Name),
+
+            # Punctuation
+            (r'\(|\)|\{|\}|\[|\]|:|,', Punctuation),
+
+            # Whitespace
+            (' +', Whitespace),
+
+            # Definite lexer errors
+            ('=', Error),
+        ],
+        'char': [
+            # It is definitely an error to have a char of width == 0.
+            ("'", Error, 'root'),
+            (_escape_pattern, String.Escape, 'charEnd'),
+            ('.', String.Char, 'charEnd'),
+        ],
+        'charEnd': [
+            ("'", String.Char, '#pop:2'),
+            # It is definitely an error to have a char of width > 1.
+            ('.', Error),
+        ],
+        # The state of things coming into an interface.
+        'interface': [
+            (' +', Whitespace),
+            (_identifier, Name.Class, '#pop'),
+            include('root'),
+        ],
+        # The state of things coming into a method.
+        'method': [
+            (' +', Whitespace),
+            (_identifier, Name.Function, '#pop'),
+            include('root'),
+        ],
+        'string': [
+            ('"', String.Double, 'root'),
+            (_escape_pattern, String.Escape),
+            (r'\n', String.Double),
+            ('.', String.Double),
+        ],
+        'ql': [
+            ('`', String.Backtick, 'root'),
+            (r'\$' + _escape_pattern, String.Escape),
+            (r'\$\$', String.Escape),
+            (r'@@', String.Escape),
+            (r'\$\{', String.Interpol, 'qlNest'),
+            (r'@\{', String.Interpol, 'qlNest'),
+            (r'\$' + _identifier, Name),
+            ('@' + _identifier, Name),
+            ('.', String.Backtick),
+        ],
+        'qlNest': [
+            (r'\}', String.Interpol, '#pop'),
+            include('root'),
+        ],
+        # The state of things immediately following `var`.
+        'var': [
+            (' +', Whitespace),
+            (_identifier, Name.Variable, '#pop'),
+            include('root'),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/mosel.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/mosel.py
new file mode 100644
index 00000000..426c9a14
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/mosel.py
@@ -0,0 +1,447 @@
+"""
+    pygments.lexers.mosel
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the mosel language.
+    http://www.fico.com/en/products/fico-xpress-optimization
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+
+__all__ = ['MoselLexer']
+
+FUNCTIONS = (
+    # core functions
+    '_',
+    'abs',
+    'arctan',
+    'asproc',
+    'assert',
+    'bitflip',
+    'bitneg',
+    'bitset',
+    'bitshift',
+    'bittest',
+    'bitval',
+    'ceil',
+    'cos',
+    'create',
+    'currentdate',
+    'currenttime',
+    'cutelt',
+    'cutfirst',
+    'cuthead',
+    'cutlast',
+    'cuttail',
+    'datablock',
+    'delcell',
+    'exists',
+    'exit',
+    'exp',
+    'exportprob',
+    'fclose',
+    'fflush',
+    'finalize',
+    'findfirst',
+    'findlast',
+    'floor',
+    'fopen',
+    'fselect',
+    'fskipline',
+    'fwrite',
+    'fwrite_',
+    'fwriteln',
+    'fwriteln_',
+    'getact',
+    'getcoeff',
+    'getcoeffs',
+    'getdual',
+    'getelt',
+    'getfid',
+    'getfirst',
+    'getfname',
+    'gethead',
+    'getlast',
+    'getobjval',
+    'getparam',
+    'getrcost',
+    'getreadcnt',
+    'getreverse',
+    'getsize',
+    'getslack',
+    'getsol',
+    'gettail',
+    'gettype',
+    'getvars',
+    'isdynamic',
+    'iseof',
+    'isfinite',
+    'ishidden',
+    'isinf',
+    'isnan',
+    'isodd',
+    'ln',
+    'localsetparam',
+    'log',
+    'makesos1',
+    'makesos2',
+    'maxlist',
+    'memoryuse',
+    'minlist',
+    'newmuid',
+    'publish',
+    'random',
+    'read',
+    'readln',
+    'reset',
+    'restoreparam',
+    'reverse',
+    'round',
+    'setcoeff',
+    'sethidden',
+    'setioerr',
+    'setmatherr',
+    'setname',
+    'setparam',
+    'setrandseed',
+    'setrange',
+    'settype',
+    'sin',
+    'splithead',
+    'splittail',
+    'sqrt',
+    'strfmt',
+    'substr',
+    'timestamp',
+    'unpublish',
+    'versionnum',
+    'versionstr',
+    'write',
+    'write_',
+    'writeln',
+    'writeln_',
+
+    # mosel exam mmxprs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
+    'addcut',
+    'addcuts',
+    'addmipsol',
+    'basisstability',
+    'calcsolinfo',
+    'clearmipdir',
+    'clearmodcut',
+    'command',
+    'copysoltoinit',
+    'crossoverlpsol',
+    'defdelayedrows',
+    'defsecurevecs',
+    'delcuts',
+    'dropcuts',
+    'estimatemarginals',
+    'fixglobal',
+    'flushmsgq',
+    'getbstat',
+    'getcnlist',
+    'getcplist',
+    'getdualray',
+    'getiis',
+    'getiissense',
+    'getiistype',
+    'getinfcause',
+    'getinfeas',
+    'getlb',
+    'getlct',
+    'getleft',
+    'getloadedlinctrs',
+    'getloadedmpvars',
+    'getname',
+    'getprimalray',
+    'getprobstat',
+    'getrange',
+    'getright',
+    'getsensrng',
+    'getsize',
+    'getsol',
+    'gettype',
+    'getub',
+    'getvars',
+    'gety',
+    'hasfeature',
+    'implies',
+    'indicator',
+    'initglobal',
+    'ishidden',
+    'isiisvalid',
+    'isintegral',
+    'loadbasis',
+    'loadcuts',
+    'loadlpsol',
+    'loadmipsol',
+    'loadprob',
+    'maximise',
+    'maximize',
+    'minimise',
+    'minimize',
+    'postsolve',
+    'readbasis',
+    'readdirs',
+    'readsol',
+    'refinemipsol',
+    'rejectintsol',
+    'repairinfeas',
+    'repairinfeas_deprec',
+    'resetbasis',
+    'resetiis',
+    'resetsol',
+    'savebasis',
+    'savemipsol',
+    'savesol',
+    'savestate',
+    'selectsol',
+    'setarchconsistency',
+    'setbstat',
+    'setcallback',
+    'setcbcutoff',
+    'setgndata',
+    'sethidden',
+    'setlb',
+    'setmipdir',
+    'setmodcut',
+    'setsol',
+    'setub',
+    'setucbdata',
+    'stopoptimise',
+    'stopoptimize',
+    'storecut',
+    'storecuts',
+    'unloadprob',
+    'uselastbarsol',
+    'writebasis',
+    'writedirs',
+    'writeprob',
+    'writesol',
+    'xor',
+    'xprs_addctr',
+    'xprs_addindic',
+
+    # mosel exam mmsystem | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
+    'addmonths',
+    'copytext',
+    'cuttext',
+    'deltext',
+    'endswith',
+    'erase',
+    'expandpath',
+    'fcopy',
+    'fdelete',
+    'findfiles',
+    'findtext',
+    'fmove',
+    'formattext',
+    'getasnumber',
+    'getchar',
+    'getcwd',
+    'getdate',
+    'getday',
+    'getdaynum',
+    'getdays',
+    'getdirsep',
+    'getdsoparam',
+    'getendparse',
+    'getenv',
+    'getfsize',
+    'getfstat',
+    'getftime',
+    'gethour',
+    'getminute',
+    'getmonth',
+    'getmsec',
+    'getoserrmsg',
+    'getoserror',
+    'getpathsep',
+    'getqtype',
+    'getsecond',
+    'getsepchar',
+    'getsize',
+    'getstart',
+    'getsucc',
+    'getsysinfo',
+    'getsysstat',
+    'gettime',
+    'gettmpdir',
+    'gettrim',
+    'getweekday',
+    'getyear',
+    'inserttext',
+    'isvalid',
+    'jointext',
+    'makedir',
+    'makepath',
+    'newtar',
+    'newzip',
+    'nextfield',
+    'openpipe',
+    'parseextn',
+    'parseint',
+    'parsereal',
+    'parsetext',
+    'pastetext',
+    'pathmatch',
+    'pathsplit',
+    'qsort',
+    'quote',
+    'readtextline',
+    'regmatch',
+    'regreplace',
+    'removedir',
+    'removefiles',
+    'setchar',
+    'setdate',
+    'setday',
+    'setdsoparam',
+    'setendparse',
+    'setenv',
+    'sethour',
+    'setminute',
+    'setmonth',
+    'setmsec',
+    'setoserror',
+    'setqtype',
+    'setsecond',
+    'setsepchar',
+    'setstart',
+    'setsucc',
+    'settime',
+    'settrim',
+    'setyear',
+    'sleep',
+    'splittext',
+    'startswith',
+    'system',
+    'tarlist',
+    'textfmt',
+    'tolower',
+    'toupper',
+    'trim',
+    'untar',
+    'unzip',
+    'ziplist',
+
+    # mosel exam mmjobs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
+    'canceltimer',
+    'clearaliases',
+    'compile',
+    'connect',
+    'detach',
+    'disconnect',
+    'dropnextevent',
+    'findxsrvs',
+    'getaliases',
+    'getannidents',
+    'getannotations',
+    'getbanner',
+    'getclass',
+    'getdsoprop',
+    'getdsopropnum',
+    'getexitcode',
+    'getfromgid',
+    'getfromid',
+    'getfromuid',
+    'getgid',
+    'gethostalias',
+    'getid',
+    'getmodprop',
+    'getmodpropnum',
+    'getnextevent',
+    'getnode',
+    'getrmtid',
+    'getstatus',
+    'getsysinfo',
+    'gettimer',
+    'getuid',
+    'getvalue',
+    'isqueueempty',
+    'load',
+    'nullevent',
+    'peeknextevent',
+    'resetmodpar',
+    'run',
+    'send',
+    'setcontrol',
+    'setdefstream',
+    'setgid',
+    'sethostalias',
+    'setmodpar',
+    'settimer',
+    'setuid',
+    'setworkdir',
+    'stop',
+    'unload',
+    'wait',
+    'waitexpired',
+    'waitfor',
+    'waitforend',
+)
+
+
+class MoselLexer(RegexLexer):
+    """
+    For the Mosel optimization language.
+    """
+    name = 'Mosel'
+    aliases = ['mosel']
+    filenames = ['*.mos']
+    url = 'https://www.fico.com/fico-xpress-optimization/docs/latest/mosel/mosel_lang/dhtml/moselreflang.html'
+    version_added = '2.6'
+
+    tokens = {
+        'root': [
+            (r'\n', Text),
+            (r'\s+', Text.Whitespace),
+            (r'!.*?\n', Comment.Single),
+            (r'\(!(.|\n)*?!\)', Comment.Multiline),
+            (words((
+                'and', 'as', 'break', 'case', 'count', 'declarations', 'do',
+                'dynamic', 'elif', 'else', 'end-', 'end', 'evaluation', 'false',
+                'forall', 'forward', 'from', 'function', 'hashmap', 'if',
+                'imports', 'include', 'initialisations', 'initializations', 'inter',
+                'max', 'min', 'model', 'namespace', 'next', 'not', 'nsgroup',
+                'nssearch', 'of', 'options', 'or', 'package', 'parameters',
+                'procedure', 'public', 'prod', 'record', 'repeat', 'requirements',
+                'return', 'sum', 'then', 'to', 'true', 'union', 'until', 'uses',
+                'version', 'while', 'with'), prefix=r'\b', suffix=r'\b'),
+             Keyword.Builtin),
+            (words((
+                'range', 'array', 'set', 'list', 'mpvar', 'mpproblem', 'linctr',
+                'nlctr', 'integer', 'string', 'real', 'boolean', 'text', 'time',
+                'date', 'datetime', 'returned', 'Model', 'Mosel', 'counter',
+                'xmldoc', 'is_sos1', 'is_sos2', 'is_integer', 'is_binary',
+                'is_continuous', 'is_free', 'is_semcont', 'is_semint',
+                'is_partint'), prefix=r'\b', suffix=r'\b'),
+             Keyword.Type),
+            (r'(\+|\-|\*|/|=|<=|>=|\||\^|<|>|<>|\.\.|\.|:=|::|:|in|mod|div)',
+             Operator),
+            (r'[()\[\]{},;]+', Punctuation),
+            (words(FUNCTIONS,  prefix=r'\b', suffix=r'\b'), Name.Function),
+            (r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
+            (r'\d+([eE][+-]?\d+)?', Number.Integer),
+            (r'[+-]?Infinity', Number.Integer),
+            (r'0[xX][0-9a-fA-F]+', Number),
+            (r'"', String.Double, 'double_quote'),
+            (r'\'', String.Single, 'single_quote'),
+            (r'(\w+|(\.(?!\.)))', Text),
+        ],
+        'single_quote': [
+            (r'\'', String.Single, '#pop'),
+            (r'[^\']+', String.Single),
+        ],
+        'double_quote': [
+            (r'(\\"|\\[0-7]{1,3}\D|\\[abfnrtv]|\\\\)', String.Escape),
+            (r'\"', String.Double, '#pop'),
+            (r'[^"\\]+', String.Double),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ncl.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ncl.py
new file mode 100644
index 00000000..d2f47608
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ncl.py
@@ -0,0 +1,894 @@
+"""
+    pygments.lexers.ncl
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for NCAR Command Language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+
+__all__ = ['NCLLexer']
+
+
+class NCLLexer(RegexLexer):
+    """
+    Lexer for NCL code.
+    """
+    name = 'NCL'
+    aliases = ['ncl']
+    filenames = ['*.ncl']
+    mimetypes = ['text/ncl']
+    url = 'https://www.ncl.ucar.edu'
+    version_added = '2.2'
+
+    flags = re.MULTILINE
+
+    tokens = {
+        'root': [
+            (r';.*\n', Comment),
+            include('strings'),
+            include('core'),
+            (r'[a-zA-Z_]\w*', Name),
+            include('nums'),
+            (r'[\s]+', Text),
+        ],
+        'core': [
+            # Statements
+            (words((
+                'begin', 'break', 'continue', 'create', 'defaultapp', 'do',
+                'else', 'end', 'external', 'exit', 'True', 'False', 'file', 'function',
+                'getvalues', 'graphic', 'group', 'if', 'list', 'load', 'local',
+                'new', '_Missing', 'Missing', 'noparent', 'procedure',
+                'quit', 'QUIT', 'Quit', 'record', 'return', 'setvalues', 'stop',
+                'then', 'while'), prefix=r'\b', suffix=r'\s*\b'),
+             Keyword),
+
+            # Data Types
+            (words((
+                'ubyte', 'uint', 'uint64', 'ulong', 'string', 'byte',
+                'character', 'double', 'float', 'integer', 'int64', 'logical',
+                'long', 'short', 'ushort', 'enumeric', 'numeric', 'snumeric'),
+                prefix=r'\b', suffix=r'\s*\b'),
+             Keyword.Type),
+
+            # Operators
+            (r'[\%^*+\-/<>]', Operator),
+
+            # punctuation:
+            (r'[\[\]():@$!&|.,\\{}]', Punctuation),
+            (r'[=:]', Punctuation),
+
+            # Intrinsics
+            (words((
+                'abs', 'acos', 'addfile', 'addfiles', 'all', 'angmom_atm', 'any',
+                'area_conserve_remap', 'area_hi2lores', 'area_poly_sphere',
+                'asciiread', 'asciiwrite', 'asin', 'atan', 'atan2', 'attsetvalues',
+                'avg', 'betainc', 'bin_avg', 'bin_sum', 'bw_bandpass_filter',
+                'cancor', 'cbinread', 'cbinwrite', 'cd_calendar', 'cd_inv_calendar',
+                'cdfbin_p', 'cdfbin_pr', 'cdfbin_s', 'cdfbin_xn', 'cdfchi_p',
+                'cdfchi_x', 'cdfgam_p', 'cdfgam_x', 'cdfnor_p', 'cdfnor_x',
+                'cdft_p', 'cdft_t', 'ceil', 'center_finite_diff',
+                'center_finite_diff_n', 'cfftb', 'cfftf', 'cfftf_frq_reorder',
+                'charactertodouble', 'charactertofloat', 'charactertointeger',
+                'charactertolong', 'charactertoshort', 'charactertostring',
+                'chartodouble', 'chartofloat', 'chartoint', 'chartointeger',
+                'chartolong', 'chartoshort', 'chartostring', 'chiinv', 'clear',
+                'color_index_to_rgba', 'conform', 'conform_dims', 'cos', 'cosh',
+                'count_unique_values', 'covcorm', 'covcorm_xy', 'craybinnumrec',
+                'craybinrecread', 'create_graphic', 'csa1', 'csa1d', 'csa1s',
+                'csa1x', 'csa1xd', 'csa1xs', 'csa2', 'csa2d', 'csa2l', 'csa2ld',
+                'csa2ls', 'csa2lx', 'csa2lxd', 'csa2lxs', 'csa2s', 'csa2x',
+                'csa2xd', 'csa2xs', 'csa3', 'csa3d', 'csa3l', 'csa3ld', 'csa3ls',
+                'csa3lx', 'csa3lxd', 'csa3lxs', 'csa3s', 'csa3x', 'csa3xd',
+                'csa3xs', 'csc2s', 'csgetp', 'css2c', 'cssetp', 'cssgrid', 'csstri',
+                'csvoro', 'cumsum', 'cz2ccm', 'datatondc', 'day_of_week',
+                'day_of_year', 'days_in_month', 'default_fillvalue', 'delete',
+                'depth_to_pres', 'destroy', 'determinant', 'dewtemp_trh',
+                'dgeevx_lapack', 'dim_acumrun_n', 'dim_avg', 'dim_avg_n',
+                'dim_avg_wgt', 'dim_avg_wgt_n', 'dim_cumsum', 'dim_cumsum_n',
+                'dim_gamfit_n', 'dim_gbits', 'dim_max', 'dim_max_n', 'dim_median',
+                'dim_median_n', 'dim_min', 'dim_min_n', 'dim_num', 'dim_num_n',
+                'dim_numrun_n', 'dim_pqsort', 'dim_pqsort_n', 'dim_product',
+                'dim_product_n', 'dim_rmsd', 'dim_rmsd_n', 'dim_rmvmean',
+                'dim_rmvmean_n', 'dim_rmvmed', 'dim_rmvmed_n', 'dim_spi_n',
+                'dim_standardize', 'dim_standardize_n', 'dim_stat4', 'dim_stat4_n',
+                'dim_stddev', 'dim_stddev_n', 'dim_sum', 'dim_sum_n', 'dim_sum_wgt',
+                'dim_sum_wgt_n', 'dim_variance', 'dim_variance_n', 'dimsizes',
+                'doubletobyte', 'doubletochar', 'doubletocharacter',
+                'doubletofloat', 'doubletoint', 'doubletointeger', 'doubletolong',
+                'doubletoshort', 'dpres_hybrid_ccm', 'dpres_plevel', 'draw',
+                'draw_color_palette', 'dsgetp', 'dsgrid2', 'dsgrid2d', 'dsgrid2s',
+                'dsgrid3', 'dsgrid3d', 'dsgrid3s', 'dspnt2', 'dspnt2d', 'dspnt2s',
+                'dspnt3', 'dspnt3d', 'dspnt3s', 'dssetp', 'dtrend', 'dtrend_msg',
+                'dtrend_msg_n', 'dtrend_n', 'dtrend_quadratic',
+                'dtrend_quadratic_msg_n', 'dv2uvf', 'dv2uvg', 'dz_height',
+                'echo_off', 'echo_on', 'eof2data', 'eof_varimax', 'eofcor',
+                'eofcor_pcmsg', 'eofcor_ts', 'eofcov', 'eofcov_pcmsg', 'eofcov_ts',
+                'eofunc', 'eofunc_ts', 'eofunc_varimax', 'equiv_sample_size', 'erf',
+                'erfc', 'esacr', 'esacv', 'esccr', 'esccv', 'escorc', 'escorc_n',
+                'escovc', 'exit', 'exp', 'exp_tapersh', 'exp_tapersh_wgts',
+                'exp_tapershC', 'ezfftb', 'ezfftb_n', 'ezfftf', 'ezfftf_n',
+                'f2fosh', 'f2foshv', 'f2fsh', 'f2fshv', 'f2gsh', 'f2gshv', 'fabs',
+                'fbindirread', 'fbindirwrite', 'fbinnumrec', 'fbinread',
+                'fbinrecread', 'fbinrecwrite', 'fbinwrite', 'fft2db', 'fft2df',
+                'fftshift', 'fileattdef', 'filechunkdimdef', 'filedimdef',
+                'fileexists', 'filegrpdef', 'filevarattdef', 'filevarchunkdef',
+                'filevarcompressleveldef', 'filevardef', 'filevardimsizes',
+                'filwgts_lancos', 'filwgts_lanczos', 'filwgts_normal',
+                'floattobyte', 'floattochar', 'floattocharacter', 'floattoint',
+                'floattointeger', 'floattolong', 'floattoshort', 'floor',
+                'fluxEddy', 'fo2fsh', 'fo2fshv', 'fourier_info', 'frame', 'fspan',
+                'ftcurv', 'ftcurvd', 'ftcurvi', 'ftcurvp', 'ftcurvpi', 'ftcurvps',
+                'ftcurvs', 'ftest', 'ftgetp', 'ftkurv', 'ftkurvd', 'ftkurvp',
+                'ftkurvpd', 'ftsetp', 'ftsurf', 'g2fsh', 'g2fshv', 'g2gsh',
+                'g2gshv', 'gamma', 'gammainc', 'gaus', 'gaus_lobat',
+                'gaus_lobat_wgt', 'gc_aangle', 'gc_clkwise', 'gc_dangle',
+                'gc_inout', 'gc_latlon', 'gc_onarc', 'gc_pnt2gc', 'gc_qarea',
+                'gc_tarea', 'generate_2d_array', 'get_color_index',
+                'get_color_rgba', 'get_cpu_time', 'get_isolines', 'get_ncl_version',
+                'get_script_name', 'get_script_prefix_name', 'get_sphere_radius',
+                'get_unique_values', 'getbitsone', 'getenv', 'getfiledimsizes',
+                'getfilegrpnames', 'getfilepath', 'getfilevaratts',
+                'getfilevarchunkdimsizes', 'getfilevardims', 'getfilevardimsizes',
+                'getfilevarnames', 'getfilevartypes', 'getvaratts', 'getvardims',
+                'gradsf', 'gradsg', 'greg2jul', 'grid2triple', 'hlsrgb', 'hsvrgb',
+                'hydro', 'hyi2hyo', 'idsfft', 'igradsf', 'igradsg', 'ilapsf',
+                'ilapsg', 'ilapvf', 'ilapvg', 'ind', 'ind_resolve', 'int2p',
+                'int2p_n', 'integertobyte', 'integertochar', 'integertocharacter',
+                'integertoshort', 'inttobyte', 'inttochar', 'inttoshort',
+                'inverse_matrix', 'isatt', 'isbigendian', 'isbyte', 'ischar',
+                'iscoord', 'isdefined', 'isdim', 'isdimnamed', 'isdouble',
+                'isenumeric', 'isfile', 'isfilepresent', 'isfilevar',
+                'isfilevaratt', 'isfilevarcoord', 'isfilevardim', 'isfloat',
+                'isfunc', 'isgraphic', 'isint', 'isint64', 'isinteger',
+                'isleapyear', 'islogical', 'islong', 'ismissing', 'isnan_ieee',
+                'isnumeric', 'ispan', 'isproc', 'isshort', 'issnumeric', 'isstring',
+                'isubyte', 'isuint', 'isuint64', 'isulong', 'isunlimited',
+                'isunsigned', 'isushort', 'isvar', 'jul2greg', 'kmeans_as136',
+                'kolsm2_n', 'kron_product', 'lapsf', 'lapsg', 'lapvf', 'lapvg',
+                'latlon2utm', 'lclvl', 'lderuvf', 'lderuvg', 'linint1', 'linint1_n',
+                'linint2', 'linint2_points', 'linmsg', 'linmsg_n', 'linrood_latwgt',
+                'linrood_wgt', 'list_files', 'list_filevars', 'list_hlus',
+                'list_procfuncs', 'list_vars', 'ListAppend', 'ListCount',
+                'ListGetType', 'ListIndex', 'ListIndexFromName', 'ListPop',
+                'ListPush', 'ListSetType', 'loadscript', 'local_max', 'local_min',
+                'log', 'log10', 'longtobyte', 'longtochar', 'longtocharacter',
+                'longtoint', 'longtointeger', 'longtoshort', 'lspoly', 'lspoly_n',
+                'mask', 'max', 'maxind', 'min', 'minind', 'mixed_layer_depth',
+                'mixhum_ptd', 'mixhum_ptrh', 'mjo_cross_coh2pha',
+                'mjo_cross_segment', 'moc_globe_atl', 'monthday', 'natgrid',
+                'natgridd', 'natgrids', 'ncargpath', 'ncargversion', 'ndctodata',
+                'ndtooned', 'new', 'NewList', 'ngezlogo', 'nggcog', 'nggetp',
+                'nglogo', 'ngsetp', 'NhlAddAnnotation', 'NhlAddData',
+                'NhlAddOverlay', 'NhlAddPrimitive', 'NhlAppGetDefaultParentId',
+                'NhlChangeWorkstation', 'NhlClassName', 'NhlClearWorkstation',
+                'NhlDataPolygon', 'NhlDataPolyline', 'NhlDataPolymarker',
+                'NhlDataToNDC', 'NhlDestroy', 'NhlDraw', 'NhlFrame', 'NhlFreeColor',
+                'NhlGetBB', 'NhlGetClassResources', 'NhlGetErrorObjectId',
+                'NhlGetNamedColorIndex', 'NhlGetParentId',
+                'NhlGetParentWorkstation', 'NhlGetWorkspaceObjectId',
+                'NhlIsAllocatedColor', 'NhlIsApp', 'NhlIsDataComm', 'NhlIsDataItem',
+                'NhlIsDataSpec', 'NhlIsTransform', 'NhlIsView', 'NhlIsWorkstation',
+                'NhlName', 'NhlNDCPolygon', 'NhlNDCPolyline', 'NhlNDCPolymarker',
+                'NhlNDCToData', 'NhlNewColor', 'NhlNewDashPattern', 'NhlNewMarker',
+                'NhlPalGetDefined', 'NhlRemoveAnnotation', 'NhlRemoveData',
+                'NhlRemoveOverlay', 'NhlRemovePrimitive', 'NhlSetColor',
+                'NhlSetDashPattern', 'NhlSetMarker', 'NhlUpdateData',
+                'NhlUpdateWorkstation', 'nice_mnmxintvl', 'nngetaspectd',
+                'nngetaspects', 'nngetp', 'nngetsloped', 'nngetslopes', 'nngetwts',
+                'nngetwtsd', 'nnpnt', 'nnpntd', 'nnpntend', 'nnpntendd',
+                'nnpntinit', 'nnpntinitd', 'nnpntinits', 'nnpnts', 'nnsetp', 'num',
+                'obj_anal_ic', 'omega_ccm', 'onedtond', 'overlay', 'paleo_outline',
+                'pdfxy_bin', 'poisson_grid_fill', 'pop_remap', 'potmp_insitu_ocn',
+                'prcwater_dp', 'pres2hybrid', 'pres_hybrid_ccm', 'pres_sigma',
+                'print', 'print_table', 'printFileVarSummary', 'printVarSummary',
+                'product', 'pslec', 'pslhor', 'pslhyp', 'qsort', 'rand',
+                'random_chi', 'random_gamma', 'random_normal', 'random_setallseed',
+                'random_uniform', 'rcm2points', 'rcm2rgrid', 'rdsstoi',
+                'read_colormap_file', 'reg_multlin', 'regcoef', 'regCoef_n',
+                'regline', 'relhum', 'replace_ieeenan', 'reshape', 'reshape_ind',
+                'rgba_to_color_index', 'rgbhls', 'rgbhsv', 'rgbyiq', 'rgrid2rcm',
+                'rhomb_trunc', 'rip_cape_2d', 'rip_cape_3d', 'round', 'rtest',
+                'runave', 'runave_n', 'set_default_fillvalue', 'set_sphere_radius',
+                'setfileoption', 'sfvp2uvf', 'sfvp2uvg', 'shaec', 'shagc',
+                'shgetnp', 'shgetp', 'shgrid', 'shorttobyte', 'shorttochar',
+                'shorttocharacter', 'show_ascii', 'shsec', 'shsetp', 'shsgc',
+                'shsgc_R42', 'sigma2hybrid', 'simpeq', 'simpne', 'sin',
+                'sindex_yrmo', 'sinh', 'sizeof', 'sleep', 'smth9', 'snindex_yrmo',
+                'solve_linsys', 'span_color_indexes', 'span_color_rgba',
+                'sparse_matrix_mult', 'spcorr', 'spcorr_n', 'specx_anal',
+                'specxy_anal', 'spei', 'sprintf', 'sprinti', 'sqrt', 'sqsort',
+                'srand', 'stat2', 'stat4', 'stat_medrng', 'stat_trim',
+                'status_exit', 'stdatmus_p2tdz', 'stdatmus_z2tdp', 'stddev',
+                'str_capital', 'str_concat', 'str_fields_count', 'str_get_cols',
+                'str_get_dq', 'str_get_field', 'str_get_nl', 'str_get_sq',
+                'str_get_tab', 'str_index_of_substr', 'str_insert', 'str_is_blank',
+                'str_join', 'str_left_strip', 'str_lower', 'str_match',
+                'str_match_ic', 'str_match_ic_regex', 'str_match_ind',
+                'str_match_ind_ic', 'str_match_ind_ic_regex', 'str_match_ind_regex',
+                'str_match_regex', 'str_right_strip', 'str_split',
+                'str_split_by_length', 'str_split_csv', 'str_squeeze', 'str_strip',
+                'str_sub_str', 'str_switch', 'str_upper', 'stringtochar',
+                'stringtocharacter', 'stringtodouble', 'stringtofloat',
+                'stringtoint', 'stringtointeger', 'stringtolong', 'stringtoshort',
+                'strlen', 'student_t', 'sum', 'svd_lapack', 'svdcov', 'svdcov_sv',
+                'svdstd', 'svdstd_sv', 'system', 'systemfunc', 'tan', 'tanh',
+                'taper', 'taper_n', 'tdclrs', 'tdctri', 'tdcudp', 'tdcurv',
+                'tddtri', 'tdez2d', 'tdez3d', 'tdgetp', 'tdgrds', 'tdgrid',
+                'tdgtrs', 'tdinit', 'tditri', 'tdlbla', 'tdlblp', 'tdlbls',
+                'tdline', 'tdlndp', 'tdlnpa', 'tdlpdp', 'tdmtri', 'tdotri',
+                'tdpara', 'tdplch', 'tdprpa', 'tdprpi', 'tdprpt', 'tdsetp',
+                'tdsort', 'tdstri', 'tdstrs', 'tdttri', 'thornthwaite', 'tobyte',
+                'tochar', 'todouble', 'tofloat', 'toint', 'toint64', 'tointeger',
+                'tolong', 'toshort', 'tosigned', 'tostring', 'tostring_with_format',
+                'totype', 'toubyte', 'touint', 'touint64', 'toulong', 'tounsigned',
+                'toushort', 'trend_manken', 'tri_trunc', 'triple2grid',
+                'triple2grid2d', 'trop_wmo', 'ttest', 'typeof', 'undef',
+                'unique_string', 'update', 'ushorttoint', 'ut_calendar',
+                'ut_inv_calendar', 'utm2latlon', 'uv2dv_cfd', 'uv2dvf', 'uv2dvg',
+                'uv2sfvpf', 'uv2sfvpg', 'uv2vr_cfd', 'uv2vrdvf', 'uv2vrdvg',
+                'uv2vrf', 'uv2vrg', 'v5d_close', 'v5d_create', 'v5d_setLowLev',
+                'v5d_setUnits', 'v5d_write', 'v5d_write_var', 'variance', 'vhaec',
+                'vhagc', 'vhsec', 'vhsgc', 'vibeta', 'vinth2p', 'vinth2p_ecmwf',
+                'vinth2p_ecmwf_nodes', 'vinth2p_nodes', 'vintp2p_ecmwf', 'vr2uvf',
+                'vr2uvg', 'vrdv2uvf', 'vrdv2uvg', 'wavelet', 'wavelet_default',
+                'weibull', 'wgt_area_smooth', 'wgt_areaave', 'wgt_areaave2',
+                'wgt_arearmse', 'wgt_arearmse2', 'wgt_areasum2', 'wgt_runave',
+                'wgt_runave_n', 'wgt_vert_avg_beta', 'wgt_volave', 'wgt_volave_ccm',
+                'wgt_volrmse', 'wgt_volrmse_ccm', 'where', 'wk_smooth121', 'wmbarb',
+                'wmbarbmap', 'wmdrft', 'wmgetp', 'wmlabs', 'wmsetp', 'wmstnm',
+                'wmvect', 'wmvectmap', 'wmvlbl', 'wrf_avo', 'wrf_cape_2d',
+                'wrf_cape_3d', 'wrf_dbz', 'wrf_eth', 'wrf_helicity', 'wrf_ij_to_ll',
+                'wrf_interp_1d', 'wrf_interp_2d_xy', 'wrf_interp_3d_z',
+                'wrf_latlon_to_ij', 'wrf_ll_to_ij', 'wrf_omega', 'wrf_pvo',
+                'wrf_rh', 'wrf_slp', 'wrf_smooth_2d', 'wrf_td', 'wrf_tk',
+                'wrf_updraft_helicity', 'wrf_uvmet', 'wrf_virtual_temp',
+                'wrf_wetbulb', 'wrf_wps_close_int', 'wrf_wps_open_int',
+                'wrf_wps_rddata_int', 'wrf_wps_rdhead_int', 'wrf_wps_read_int',
+                'wrf_wps_write_int', 'write_matrix', 'write_table', 'yiqrgb',
+                'z2geouv', 'zonal_mpsi', 'addfiles_GetVar', 'advect_variable',
+                'area_conserve_remap_Wrap', 'area_hi2lores_Wrap',
+                'array_append_record', 'assignFillValue', 'byte2flt',
+                'byte2flt_hdf', 'calcDayAnomTLL', 'calcMonAnomLLLT',
+                'calcMonAnomLLT', 'calcMonAnomTLL', 'calcMonAnomTLLL',
+                'calculate_monthly_values', 'cd_convert', 'changeCase',
+                'changeCaseChar', 'clmDayTLL', 'clmDayTLLL', 'clmMon2clmDay',
+                'clmMonLLLT', 'clmMonLLT', 'clmMonTLL', 'clmMonTLLL', 'closest_val',
+                'copy_VarAtts', 'copy_VarCoords', 'copy_VarCoords_1',
+                'copy_VarCoords_2', 'copy_VarMeta', 'copyatt', 'crossp3',
+                'cshstringtolist', 'cssgrid_Wrap', 'dble2flt', 'decimalPlaces',
+                'delete_VarAtts', 'dim_avg_n_Wrap', 'dim_avg_wgt_n_Wrap',
+                'dim_avg_wgt_Wrap', 'dim_avg_Wrap', 'dim_cumsum_n_Wrap',
+                'dim_cumsum_Wrap', 'dim_max_n_Wrap', 'dim_min_n_Wrap',
+                'dim_rmsd_n_Wrap', 'dim_rmsd_Wrap', 'dim_rmvmean_n_Wrap',
+                'dim_rmvmean_Wrap', 'dim_rmvmed_n_Wrap', 'dim_rmvmed_Wrap',
+                'dim_standardize_n_Wrap', 'dim_standardize_Wrap',
+                'dim_stddev_n_Wrap', 'dim_stddev_Wrap', 'dim_sum_n_Wrap',
+                'dim_sum_wgt_n_Wrap', 'dim_sum_wgt_Wrap', 'dim_sum_Wrap',
+                'dim_variance_n_Wrap', 'dim_variance_Wrap', 'dpres_plevel_Wrap',
+                'dtrend_leftdim', 'dv2uvF_Wrap', 'dv2uvG_Wrap', 'eof_north',
+                'eofcor_Wrap', 'eofcov_Wrap', 'eofunc_north', 'eofunc_ts_Wrap',
+                'eofunc_varimax_reorder', 'eofunc_varimax_Wrap', 'eofunc_Wrap',
+                'epsZero', 'f2fosh_Wrap', 'f2foshv_Wrap', 'f2fsh_Wrap',
+                'f2fshv_Wrap', 'f2gsh_Wrap', 'f2gshv_Wrap', 'fbindirSwap',
+                'fbinseqSwap1', 'fbinseqSwap2', 'flt2dble', 'flt2string',
+                'fo2fsh_Wrap', 'fo2fshv_Wrap', 'g2fsh_Wrap', 'g2fshv_Wrap',
+                'g2gsh_Wrap', 'g2gshv_Wrap', 'generate_resample_indices',
+                'generate_sample_indices', 'generate_unique_indices',
+                'genNormalDist', 'get1Dindex', 'get1Dindex_Collapse',
+                'get1Dindex_Exclude', 'get_file_suffix', 'GetFillColor',
+                'GetFillColorIndex', 'getFillValue', 'getind_latlon2d',
+                'getVarDimNames', 'getVarFillValue', 'grib_stime2itime',
+                'hyi2hyo_Wrap', 'ilapsF_Wrap', 'ilapsG_Wrap', 'ind_nearest_coord',
+                'indStrSubset', 'int2dble', 'int2flt', 'int2p_n_Wrap', 'int2p_Wrap',
+                'isMonotonic', 'isStrSubset', 'latGau', 'latGauWgt', 'latGlobeF',
+                'latGlobeFo', 'latRegWgt', 'linint1_n_Wrap', 'linint1_Wrap',
+                'linint2_points_Wrap', 'linint2_Wrap', 'local_max_1d',
+                'local_min_1d', 'lonFlip', 'lonGlobeF', 'lonGlobeFo', 'lonPivot',
+                'merge_levels_sfc', 'mod', 'month_to_annual',
+                'month_to_annual_weighted', 'month_to_season', 'month_to_season12',
+                'month_to_seasonN', 'monthly_total_to_daily_mean', 'nameDim',
+                'natgrid_Wrap', 'NewCosWeight', 'niceLatLon2D', 'NormCosWgtGlobe',
+                'numAsciiCol', 'numAsciiRow', 'numeric2int',
+                'obj_anal_ic_deprecated', 'obj_anal_ic_Wrap', 'omega_ccm_driver',
+                'omega_to_w', 'oneDtostring', 'pack_values', 'pattern_cor', 'pdfx',
+                'pdfxy', 'pdfxy_conform', 'pot_temp', 'pot_vort_hybrid',
+                'pot_vort_isobaric', 'pres2hybrid_Wrap', 'print_clock',
+                'printMinMax', 'quadroots', 'rcm2points_Wrap', 'rcm2rgrid_Wrap',
+                'readAsciiHead', 'readAsciiTable', 'reg_multlin_stats',
+                'region_ind', 'regline_stats', 'relhum_ttd', 'replaceSingleChar',
+                'RGBtoCmap', 'rgrid2rcm_Wrap', 'rho_mwjf', 'rm_single_dims',
+                'rmAnnCycle1D', 'rmInsufData', 'rmMonAnnCycLLLT', 'rmMonAnnCycLLT',
+                'rmMonAnnCycTLL', 'runave_n_Wrap', 'runave_Wrap', 'short2flt',
+                'short2flt_hdf', 'shsgc_R42_Wrap', 'sign_f90', 'sign_matlab',
+                'smth9_Wrap', 'smthClmDayTLL', 'smthClmDayTLLL', 'SqrtCosWeight',
+                'stat_dispersion', 'static_stability', 'stdMonLLLT', 'stdMonLLT',
+                'stdMonTLL', 'stdMonTLLL', 'symMinMaxPlt', 'table_attach_columns',
+                'table_attach_rows', 'time_to_newtime', 'transpose',
+                'triple2grid_Wrap', 'ut_convert', 'uv2dvF_Wrap', 'uv2dvG_Wrap',
+                'uv2vrF_Wrap', 'uv2vrG_Wrap', 'vr2uvF_Wrap', 'vr2uvG_Wrap',
+                'w_to_omega', 'wallClockElapseTime', 'wave_number_spc',
+                'wgt_areaave_Wrap', 'wgt_runave_leftdim', 'wgt_runave_n_Wrap',
+                'wgt_runave_Wrap', 'wgt_vertical_n', 'wind_component',
+                'wind_direction', 'yyyyddd_to_yyyymmdd', 'yyyymm_time',
+                'yyyymm_to_yyyyfrac', 'yyyymmdd_time', 'yyyymmdd_to_yyyyddd',
+                'yyyymmdd_to_yyyyfrac', 'yyyymmddhh_time', 'yyyymmddhh_to_yyyyfrac',
+                'zonal_mpsi_Wrap', 'zonalAve', 'calendar_decode2', 'cd_string',
+                'kf_filter', 'run_cor', 'time_axis_labels', 'ut_string',
+                'wrf_contour', 'wrf_map', 'wrf_map_overlay', 'wrf_map_overlays',
+                'wrf_map_resources', 'wrf_map_zoom', 'wrf_overlay', 'wrf_overlays',
+                'wrf_user_getvar', 'wrf_user_ij_to_ll', 'wrf_user_intrp2d',
+                'wrf_user_intrp3d', 'wrf_user_latlon_to_ij', 'wrf_user_list_times',
+                'wrf_user_ll_to_ij', 'wrf_user_unstagger', 'wrf_user_vert_interp',
+                'wrf_vector', 'gsn_add_annotation', 'gsn_add_polygon',
+                'gsn_add_polyline', 'gsn_add_polymarker',
+                'gsn_add_shapefile_polygons', 'gsn_add_shapefile_polylines',
+                'gsn_add_shapefile_polymarkers', 'gsn_add_text', 'gsn_attach_plots',
+                'gsn_blank_plot', 'gsn_contour', 'gsn_contour_map',
+                'gsn_contour_shade', 'gsn_coordinates', 'gsn_create_labelbar',
+                'gsn_create_legend', 'gsn_create_text',
+                'gsn_csm_attach_zonal_means', 'gsn_csm_blank_plot',
+                'gsn_csm_contour', 'gsn_csm_contour_map', 'gsn_csm_contour_map_ce',
+                'gsn_csm_contour_map_overlay', 'gsn_csm_contour_map_polar',
+                'gsn_csm_hov', 'gsn_csm_lat_time', 'gsn_csm_map', 'gsn_csm_map_ce',
+                'gsn_csm_map_polar', 'gsn_csm_pres_hgt',
+                'gsn_csm_pres_hgt_streamline', 'gsn_csm_pres_hgt_vector',
+                'gsn_csm_streamline', 'gsn_csm_streamline_contour_map',
+                'gsn_csm_streamline_contour_map_ce',
+                'gsn_csm_streamline_contour_map_polar', 'gsn_csm_streamline_map',
+                'gsn_csm_streamline_map_ce', 'gsn_csm_streamline_map_polar',
+                'gsn_csm_streamline_scalar', 'gsn_csm_streamline_scalar_map',
+                'gsn_csm_streamline_scalar_map_ce',
+                'gsn_csm_streamline_scalar_map_polar', 'gsn_csm_time_lat',
+                'gsn_csm_vector', 'gsn_csm_vector_map', 'gsn_csm_vector_map_ce',
+                'gsn_csm_vector_map_polar', 'gsn_csm_vector_scalar',
+                'gsn_csm_vector_scalar_map', 'gsn_csm_vector_scalar_map_ce',
+                'gsn_csm_vector_scalar_map_polar', 'gsn_csm_x2y', 'gsn_csm_x2y2',
+                'gsn_csm_xy', 'gsn_csm_xy2', 'gsn_csm_xy3', 'gsn_csm_y',
+                'gsn_define_colormap', 'gsn_draw_colormap', 'gsn_draw_named_colors',
+                'gsn_histogram', 'gsn_labelbar_ndc', 'gsn_legend_ndc', 'gsn_map',
+                'gsn_merge_colormaps', 'gsn_open_wks', 'gsn_panel', 'gsn_polygon',
+                'gsn_polygon_ndc', 'gsn_polyline', 'gsn_polyline_ndc',
+                'gsn_polymarker', 'gsn_polymarker_ndc', 'gsn_retrieve_colormap',
+                'gsn_reverse_colormap', 'gsn_streamline', 'gsn_streamline_map',
+                'gsn_streamline_scalar', 'gsn_streamline_scalar_map', 'gsn_table',
+                'gsn_text', 'gsn_text_ndc', 'gsn_vector', 'gsn_vector_map',
+                'gsn_vector_scalar', 'gsn_vector_scalar_map', 'gsn_xy', 'gsn_y',
+                'hsv2rgb', 'maximize_output', 'namedcolor2rgb', 'namedcolor2rgba',
+                'reset_device_coordinates', 'span_named_colors'), prefix=r'\b'),
+             Name.Builtin),
+
+            # Resources
+            (words((
+                'amDataXF', 'amDataYF', 'amJust', 'amOn', 'amOrthogonalPosF',
+                'amParallelPosF', 'amResizeNotify', 'amSide', 'amTrackData',
+                'amViewId', 'amZone', 'appDefaultParent', 'appFileSuffix',
+                'appResources', 'appSysDir', 'appUsrDir', 'caCopyArrays',
+                'caXArray', 'caXCast', 'caXMaxV', 'caXMinV', 'caXMissingV',
+                'caYArray', 'caYCast', 'caYMaxV', 'caYMinV', 'caYMissingV',
+                'cnCellFillEdgeColor', 'cnCellFillMissingValEdgeColor',
+                'cnConpackParams', 'cnConstFEnableFill', 'cnConstFLabelAngleF',
+                'cnConstFLabelBackgroundColor', 'cnConstFLabelConstantSpacingF',
+                'cnConstFLabelFont', 'cnConstFLabelFontAspectF',
+                'cnConstFLabelFontColor', 'cnConstFLabelFontHeightF',
+                'cnConstFLabelFontQuality', 'cnConstFLabelFontThicknessF',
+                'cnConstFLabelFormat', 'cnConstFLabelFuncCode', 'cnConstFLabelJust',
+                'cnConstFLabelOn', 'cnConstFLabelOrthogonalPosF',
+                'cnConstFLabelParallelPosF', 'cnConstFLabelPerimColor',
+                'cnConstFLabelPerimOn', 'cnConstFLabelPerimSpaceF',
+                'cnConstFLabelPerimThicknessF', 'cnConstFLabelSide',
+                'cnConstFLabelString', 'cnConstFLabelTextDirection',
+                'cnConstFLabelZone', 'cnConstFUseInfoLabelRes',
+                'cnExplicitLabelBarLabelsOn', 'cnExplicitLegendLabelsOn',
+                'cnExplicitLineLabelsOn', 'cnFillBackgroundColor', 'cnFillColor',
+                'cnFillColors', 'cnFillDotSizeF', 'cnFillDrawOrder', 'cnFillMode',
+                'cnFillOn', 'cnFillOpacityF', 'cnFillPalette', 'cnFillPattern',
+                'cnFillPatterns', 'cnFillScaleF', 'cnFillScales', 'cnFixFillBleed',
+                'cnGridBoundFillColor', 'cnGridBoundFillPattern',
+                'cnGridBoundFillScaleF', 'cnGridBoundPerimColor',
+                'cnGridBoundPerimDashPattern', 'cnGridBoundPerimOn',
+                'cnGridBoundPerimThicknessF', 'cnHighLabelAngleF',
+                'cnHighLabelBackgroundColor', 'cnHighLabelConstantSpacingF',
+                'cnHighLabelCount', 'cnHighLabelFont', 'cnHighLabelFontAspectF',
+                'cnHighLabelFontColor', 'cnHighLabelFontHeightF',
+                'cnHighLabelFontQuality', 'cnHighLabelFontThicknessF',
+                'cnHighLabelFormat', 'cnHighLabelFuncCode', 'cnHighLabelPerimColor',
+                'cnHighLabelPerimOn', 'cnHighLabelPerimSpaceF',
+                'cnHighLabelPerimThicknessF', 'cnHighLabelString', 'cnHighLabelsOn',
+                'cnHighLowLabelOverlapMode', 'cnHighUseLineLabelRes',
+                'cnInfoLabelAngleF', 'cnInfoLabelBackgroundColor',
+                'cnInfoLabelConstantSpacingF', 'cnInfoLabelFont',
+                'cnInfoLabelFontAspectF', 'cnInfoLabelFontColor',
+                'cnInfoLabelFontHeightF', 'cnInfoLabelFontQuality',
+                'cnInfoLabelFontThicknessF', 'cnInfoLabelFormat',
+                'cnInfoLabelFuncCode', 'cnInfoLabelJust', 'cnInfoLabelOn',
+                'cnInfoLabelOrthogonalPosF', 'cnInfoLabelParallelPosF',
+                'cnInfoLabelPerimColor', 'cnInfoLabelPerimOn',
+                'cnInfoLabelPerimSpaceF', 'cnInfoLabelPerimThicknessF',
+                'cnInfoLabelSide', 'cnInfoLabelString', 'cnInfoLabelTextDirection',
+                'cnInfoLabelZone', 'cnLabelBarEndLabelsOn', 'cnLabelBarEndStyle',
+                'cnLabelDrawOrder', 'cnLabelMasking', 'cnLabelScaleFactorF',
+                'cnLabelScaleValueF', 'cnLabelScalingMode', 'cnLegendLevelFlags',
+                'cnLevelCount', 'cnLevelFlag', 'cnLevelFlags', 'cnLevelSelectionMode',
+                'cnLevelSpacingF', 'cnLevels', 'cnLineColor', 'cnLineColors',
+                'cnLineDashPattern', 'cnLineDashPatterns', 'cnLineDashSegLenF',
+                'cnLineDrawOrder', 'cnLineLabelAngleF', 'cnLineLabelBackgroundColor',
+                'cnLineLabelConstantSpacingF', 'cnLineLabelCount',
+                'cnLineLabelDensityF', 'cnLineLabelFont', 'cnLineLabelFontAspectF',
+                'cnLineLabelFontColor', 'cnLineLabelFontColors',
+                'cnLineLabelFontHeightF', 'cnLineLabelFontQuality',
+                'cnLineLabelFontThicknessF', 'cnLineLabelFormat',
+                'cnLineLabelFuncCode', 'cnLineLabelInterval', 'cnLineLabelPerimColor',
+                'cnLineLabelPerimOn', 'cnLineLabelPerimSpaceF',
+                'cnLineLabelPerimThicknessF', 'cnLineLabelPlacementMode',
+                'cnLineLabelStrings', 'cnLineLabelsOn', 'cnLinePalette',
+                'cnLineThicknessF', 'cnLineThicknesses', 'cnLinesOn',
+                'cnLowLabelAngleF', 'cnLowLabelBackgroundColor',
+                'cnLowLabelConstantSpacingF', 'cnLowLabelCount', 'cnLowLabelFont',
+                'cnLowLabelFontAspectF', 'cnLowLabelFontColor',
+                'cnLowLabelFontHeightF', 'cnLowLabelFontQuality',
+                'cnLowLabelFontThicknessF', 'cnLowLabelFormat', 'cnLowLabelFuncCode',
+                'cnLowLabelPerimColor', 'cnLowLabelPerimOn', 'cnLowLabelPerimSpaceF',
+                'cnLowLabelPerimThicknessF', 'cnLowLabelString', 'cnLowLabelsOn',
+                'cnLowUseHighLabelRes', 'cnMaxDataValueFormat', 'cnMaxLevelCount',
+                'cnMaxLevelValF', 'cnMaxPointDistanceF', 'cnMinLevelValF',
+                'cnMissingValFillColor', 'cnMissingValFillPattern',
+                'cnMissingValFillScaleF', 'cnMissingValPerimColor',
+                'cnMissingValPerimDashPattern', 'cnMissingValPerimGridBoundOn',
+                'cnMissingValPerimOn', 'cnMissingValPerimThicknessF',
+                'cnMonoFillColor', 'cnMonoFillPattern', 'cnMonoFillScale',
+                'cnMonoLevelFlag', 'cnMonoLineColor', 'cnMonoLineDashPattern',
+                'cnMonoLineLabelFontColor', 'cnMonoLineThickness', 'cnNoDataLabelOn',
+                'cnNoDataLabelString', 'cnOutOfRangeFillColor',
+                'cnOutOfRangeFillPattern', 'cnOutOfRangeFillScaleF',
+                'cnOutOfRangePerimColor', 'cnOutOfRangePerimDashPattern',
+                'cnOutOfRangePerimOn', 'cnOutOfRangePerimThicknessF',
+                'cnRasterCellSizeF', 'cnRasterMinCellSizeF', 'cnRasterModeOn',
+                'cnRasterSampleFactorF', 'cnRasterSmoothingOn', 'cnScalarFieldData',
+                'cnSmoothingDistanceF', 'cnSmoothingOn', 'cnSmoothingTensionF',
+                'cnSpanFillPalette', 'cnSpanLinePalette', 'ctCopyTables',
+                'ctXElementSize', 'ctXMaxV', 'ctXMinV', 'ctXMissingV', 'ctXTable',
+                'ctXTableLengths', 'ctXTableType', 'ctYElementSize', 'ctYMaxV',
+                'ctYMinV', 'ctYMissingV', 'ctYTable', 'ctYTableLengths',
+                'ctYTableType', 'dcDelayCompute', 'errBuffer',
+                'errFileName', 'errFilePtr', 'errLevel', 'errPrint', 'errUnitNumber',
+                'gsClipOn', 'gsColors', 'gsEdgeColor', 'gsEdgeDashPattern',
+                'gsEdgeDashSegLenF', 'gsEdgeThicknessF', 'gsEdgesOn',
+                'gsFillBackgroundColor', 'gsFillColor', 'gsFillDotSizeF',
+                'gsFillIndex', 'gsFillLineThicknessF', 'gsFillOpacityF',
+                'gsFillScaleF', 'gsFont', 'gsFontAspectF', 'gsFontColor',
+                'gsFontHeightF', 'gsFontOpacityF', 'gsFontQuality',
+                'gsFontThicknessF', 'gsLineColor', 'gsLineDashPattern',
+                'gsLineDashSegLenF', 'gsLineLabelConstantSpacingF', 'gsLineLabelFont',
+                'gsLineLabelFontAspectF', 'gsLineLabelFontColor',
+                'gsLineLabelFontHeightF', 'gsLineLabelFontQuality',
+                'gsLineLabelFontThicknessF', 'gsLineLabelFuncCode',
+                'gsLineLabelString', 'gsLineOpacityF', 'gsLineThicknessF',
+                'gsMarkerColor', 'gsMarkerIndex', 'gsMarkerOpacityF', 'gsMarkerSizeF',
+                'gsMarkerThicknessF', 'gsSegments', 'gsTextAngleF',
+                'gsTextConstantSpacingF', 'gsTextDirection', 'gsTextFuncCode',
+                'gsTextJustification', 'gsnAboveYRefLineBarColors',
+                'gsnAboveYRefLineBarFillScales', 'gsnAboveYRefLineBarPatterns',
+                'gsnAboveYRefLineColor', 'gsnAddCyclic', 'gsnAttachBorderOn',
+                'gsnAttachPlotsXAxis', 'gsnBelowYRefLineBarColors',
+                'gsnBelowYRefLineBarFillScales', 'gsnBelowYRefLineBarPatterns',
+                'gsnBelowYRefLineColor', 'gsnBoxMargin', 'gsnCenterString',
+                'gsnCenterStringFontColor', 'gsnCenterStringFontHeightF',
+                'gsnCenterStringFuncCode', 'gsnCenterStringOrthogonalPosF',
+                'gsnCenterStringParallelPosF', 'gsnContourLineThicknessesScale',
+                'gsnContourNegLineDashPattern', 'gsnContourPosLineDashPattern',
+                'gsnContourZeroLineThicknessF', 'gsnDebugWriteFileName', 'gsnDraw',
+                'gsnFrame', 'gsnHistogramBarWidthPercent', 'gsnHistogramBinIntervals',
+                'gsnHistogramBinMissing', 'gsnHistogramBinWidth',
+                'gsnHistogramClassIntervals', 'gsnHistogramCompare',
+                'gsnHistogramComputePercentages',
+                'gsnHistogramComputePercentagesNoMissing',
+                'gsnHistogramDiscreteBinValues', 'gsnHistogramDiscreteClassValues',
+                'gsnHistogramHorizontal', 'gsnHistogramMinMaxBinsOn',
+                'gsnHistogramNumberOfBins', 'gsnHistogramPercentSign',
+                'gsnHistogramSelectNiceIntervals', 'gsnLeftString',
+                'gsnLeftStringFontColor', 'gsnLeftStringFontHeightF',
+                'gsnLeftStringFuncCode', 'gsnLeftStringOrthogonalPosF',
+                'gsnLeftStringParallelPosF', 'gsnMajorLatSpacing',
+                'gsnMajorLonSpacing', 'gsnMaskLambertConformal',
+                'gsnMaskLambertConformalOutlineOn', 'gsnMaximize',
+                'gsnMinorLatSpacing', 'gsnMinorLonSpacing', 'gsnPanelBottom',
+                'gsnPanelCenter', 'gsnPanelDebug', 'gsnPanelFigureStrings',
+                'gsnPanelFigureStringsBackgroundFillColor',
+                'gsnPanelFigureStringsFontHeightF', 'gsnPanelFigureStringsJust',
+                'gsnPanelFigureStringsPerimOn', 'gsnPanelLabelBar', 'gsnPanelLeft',
+                'gsnPanelMainFont', 'gsnPanelMainFontColor',
+                'gsnPanelMainFontHeightF', 'gsnPanelMainString', 'gsnPanelRight',
+                'gsnPanelRowSpec', 'gsnPanelScalePlotIndex', 'gsnPanelTop',
+                'gsnPanelXF', 'gsnPanelXWhiteSpacePercent', 'gsnPanelYF',
+                'gsnPanelYWhiteSpacePercent', 'gsnPaperHeight', 'gsnPaperMargin',
+                'gsnPaperOrientation', 'gsnPaperWidth', 'gsnPolar',
+                'gsnPolarLabelDistance', 'gsnPolarLabelFont',
+                'gsnPolarLabelFontHeightF', 'gsnPolarLabelSpacing', 'gsnPolarTime',
+                'gsnPolarUT', 'gsnRightString', 'gsnRightStringFontColor',
+                'gsnRightStringFontHeightF', 'gsnRightStringFuncCode',
+                'gsnRightStringOrthogonalPosF', 'gsnRightStringParallelPosF',
+                'gsnScalarContour', 'gsnScale', 'gsnShape', 'gsnSpreadColorEnd',
+                'gsnSpreadColorStart', 'gsnSpreadColors', 'gsnStringFont',
+                'gsnStringFontColor', 'gsnStringFontHeightF', 'gsnStringFuncCode',
+                'gsnTickMarksOn', 'gsnXAxisIrregular2Linear', 'gsnXAxisIrregular2Log',
+                'gsnXRefLine', 'gsnXRefLineColor', 'gsnXRefLineDashPattern',
+                'gsnXRefLineThicknessF', 'gsnXYAboveFillColors', 'gsnXYBarChart',
+                'gsnXYBarChartBarWidth', 'gsnXYBarChartColors',
+                'gsnXYBarChartColors2', 'gsnXYBarChartFillDotSizeF',
+                'gsnXYBarChartFillLineThicknessF', 'gsnXYBarChartFillOpacityF',
+                'gsnXYBarChartFillScaleF', 'gsnXYBarChartOutlineOnly',
+                'gsnXYBarChartOutlineThicknessF', 'gsnXYBarChartPatterns',
+                'gsnXYBarChartPatterns2', 'gsnXYBelowFillColors', 'gsnXYFillColors',
+                'gsnXYFillOpacities', 'gsnXYLeftFillColors', 'gsnXYRightFillColors',
+                'gsnYAxisIrregular2Linear', 'gsnYAxisIrregular2Log', 'gsnYRefLine',
+                'gsnYRefLineColor', 'gsnYRefLineColors', 'gsnYRefLineDashPattern',
+                'gsnYRefLineDashPatterns', 'gsnYRefLineThicknessF',
+                'gsnYRefLineThicknesses', 'gsnZonalMean', 'gsnZonalMeanXMaxF',
+                'gsnZonalMeanXMinF', 'gsnZonalMeanYRefLine', 'lbAutoManage',
+                'lbBottomMarginF', 'lbBoxCount', 'lbBoxEndCapStyle', 'lbBoxFractions',
+                'lbBoxLineColor', 'lbBoxLineDashPattern', 'lbBoxLineDashSegLenF',
+                'lbBoxLineThicknessF', 'lbBoxLinesOn', 'lbBoxMajorExtentF',
+                'lbBoxMinorExtentF', 'lbBoxSeparatorLinesOn', 'lbBoxSizing',
+                'lbFillBackground', 'lbFillColor', 'lbFillColors', 'lbFillDotSizeF',
+                'lbFillLineThicknessF', 'lbFillPattern', 'lbFillPatterns',
+                'lbFillScaleF', 'lbFillScales', 'lbJustification', 'lbLabelAlignment',
+                'lbLabelAngleF', 'lbLabelAutoStride', 'lbLabelBarOn',
+                'lbLabelConstantSpacingF', 'lbLabelDirection', 'lbLabelFont',
+                'lbLabelFontAspectF', 'lbLabelFontColor', 'lbLabelFontHeightF',
+                'lbLabelFontQuality', 'lbLabelFontThicknessF', 'lbLabelFuncCode',
+                'lbLabelJust', 'lbLabelOffsetF', 'lbLabelPosition', 'lbLabelStride',
+                'lbLabelStrings', 'lbLabelsOn', 'lbLeftMarginF', 'lbMaxLabelLenF',
+                'lbMinLabelSpacingF', 'lbMonoFillColor', 'lbMonoFillPattern',
+                'lbMonoFillScale', 'lbOrientation', 'lbPerimColor',
+                'lbPerimDashPattern', 'lbPerimDashSegLenF', 'lbPerimFill',
+                'lbPerimFillColor', 'lbPerimOn', 'lbPerimThicknessF',
+                'lbRasterFillOn', 'lbRightMarginF', 'lbTitleAngleF',
+                'lbTitleConstantSpacingF', 'lbTitleDirection', 'lbTitleExtentF',
+                'lbTitleFont', 'lbTitleFontAspectF', 'lbTitleFontColor',
+                'lbTitleFontHeightF', 'lbTitleFontQuality', 'lbTitleFontThicknessF',
+                'lbTitleFuncCode', 'lbTitleJust', 'lbTitleOffsetF', 'lbTitleOn',
+                'lbTitlePosition', 'lbTitleString', 'lbTopMarginF', 'lgAutoManage',
+                'lgBottomMarginF', 'lgBoxBackground', 'lgBoxLineColor',
+                'lgBoxLineDashPattern', 'lgBoxLineDashSegLenF', 'lgBoxLineThicknessF',
+                'lgBoxLinesOn', 'lgBoxMajorExtentF', 'lgBoxMinorExtentF',
+                'lgDashIndex', 'lgDashIndexes', 'lgItemCount', 'lgItemOrder',
+                'lgItemPlacement', 'lgItemPositions', 'lgItemType', 'lgItemTypes',
+                'lgJustification', 'lgLabelAlignment', 'lgLabelAngleF',
+                'lgLabelAutoStride', 'lgLabelConstantSpacingF', 'lgLabelDirection',
+                'lgLabelFont', 'lgLabelFontAspectF', 'lgLabelFontColor',
+                'lgLabelFontHeightF', 'lgLabelFontQuality', 'lgLabelFontThicknessF',
+                'lgLabelFuncCode', 'lgLabelJust', 'lgLabelOffsetF', 'lgLabelPosition',
+                'lgLabelStride', 'lgLabelStrings', 'lgLabelsOn', 'lgLeftMarginF',
+                'lgLegendOn', 'lgLineColor', 'lgLineColors', 'lgLineDashSegLenF',
+                'lgLineDashSegLens', 'lgLineLabelConstantSpacingF', 'lgLineLabelFont',
+                'lgLineLabelFontAspectF', 'lgLineLabelFontColor',
+                'lgLineLabelFontColors', 'lgLineLabelFontHeightF',
+                'lgLineLabelFontHeights', 'lgLineLabelFontQuality',
+                'lgLineLabelFontThicknessF', 'lgLineLabelFuncCode',
+                'lgLineLabelStrings', 'lgLineLabelsOn', 'lgLineThicknessF',
+                'lgLineThicknesses', 'lgMarkerColor', 'lgMarkerColors',
+                'lgMarkerIndex', 'lgMarkerIndexes', 'lgMarkerSizeF', 'lgMarkerSizes',
+                'lgMarkerThicknessF', 'lgMarkerThicknesses', 'lgMonoDashIndex',
+                'lgMonoItemType', 'lgMonoLineColor', 'lgMonoLineDashSegLen',
+                'lgMonoLineLabelFontColor', 'lgMonoLineLabelFontHeight',
+                'lgMonoLineThickness', 'lgMonoMarkerColor', 'lgMonoMarkerIndex',
+                'lgMonoMarkerSize', 'lgMonoMarkerThickness', 'lgOrientation',
+                'lgPerimColor', 'lgPerimDashPattern', 'lgPerimDashSegLenF',
+                'lgPerimFill', 'lgPerimFillColor', 'lgPerimOn', 'lgPerimThicknessF',
+                'lgRightMarginF', 'lgTitleAngleF', 'lgTitleConstantSpacingF',
+                'lgTitleDirection', 'lgTitleExtentF', 'lgTitleFont',
+                'lgTitleFontAspectF', 'lgTitleFontColor', 'lgTitleFontHeightF',
+                'lgTitleFontQuality', 'lgTitleFontThicknessF', 'lgTitleFuncCode',
+                'lgTitleJust', 'lgTitleOffsetF', 'lgTitleOn', 'lgTitlePosition',
+                'lgTitleString', 'lgTopMarginF', 'mpAreaGroupCount',
+                'mpAreaMaskingOn', 'mpAreaNames', 'mpAreaTypes', 'mpBottomAngleF',
+                'mpBottomMapPosF', 'mpBottomNDCF', 'mpBottomNPCF',
+                'mpBottomPointLatF', 'mpBottomPointLonF', 'mpBottomWindowF',
+                'mpCenterLatF', 'mpCenterLonF', 'mpCenterRotF', 'mpCountyLineColor',
+                'mpCountyLineDashPattern', 'mpCountyLineDashSegLenF',
+                'mpCountyLineThicknessF', 'mpDataBaseVersion', 'mpDataResolution',
+                'mpDataSetName', 'mpDefaultFillColor', 'mpDefaultFillPattern',
+                'mpDefaultFillScaleF', 'mpDynamicAreaGroups', 'mpEllipticalBoundary',
+                'mpFillAreaSpecifiers', 'mpFillBoundarySets', 'mpFillColor',
+                'mpFillColors', 'mpFillColors-default', 'mpFillDotSizeF',
+                'mpFillDrawOrder', 'mpFillOn', 'mpFillPatternBackground',
+                'mpFillPattern', 'mpFillPatterns', 'mpFillPatterns-default',
+                'mpFillScaleF', 'mpFillScales', 'mpFillScales-default',
+                'mpFixedAreaGroups', 'mpGeophysicalLineColor',
+                'mpGeophysicalLineDashPattern', 'mpGeophysicalLineDashSegLenF',
+                'mpGeophysicalLineThicknessF', 'mpGreatCircleLinesOn',
+                'mpGridAndLimbDrawOrder', 'mpGridAndLimbOn', 'mpGridLatSpacingF',
+                'mpGridLineColor', 'mpGridLineDashPattern', 'mpGridLineDashSegLenF',
+                'mpGridLineThicknessF', 'mpGridLonSpacingF', 'mpGridMaskMode',
+                'mpGridMaxLatF', 'mpGridPolarLonSpacingF', 'mpGridSpacingF',
+                'mpInlandWaterFillColor', 'mpInlandWaterFillPattern',
+                'mpInlandWaterFillScaleF', 'mpLabelDrawOrder', 'mpLabelFontColor',
+                'mpLabelFontHeightF', 'mpLabelsOn', 'mpLambertMeridianF',
+                'mpLambertParallel1F', 'mpLambertParallel2F', 'mpLandFillColor',
+                'mpLandFillPattern', 'mpLandFillScaleF', 'mpLeftAngleF',
+                'mpLeftCornerLatF', 'mpLeftCornerLonF', 'mpLeftMapPosF',
+                'mpLeftNDCF', 'mpLeftNPCF', 'mpLeftPointLatF',
+                'mpLeftPointLonF', 'mpLeftWindowF', 'mpLimbLineColor',
+                'mpLimbLineDashPattern', 'mpLimbLineDashSegLenF',
+                'mpLimbLineThicknessF', 'mpLimitMode', 'mpMaskAreaSpecifiers',
+                'mpMaskOutlineSpecifiers', 'mpMaxLatF', 'mpMaxLonF',
+                'mpMinLatF', 'mpMinLonF', 'mpMonoFillColor', 'mpMonoFillPattern',
+                'mpMonoFillScale', 'mpNationalLineColor', 'mpNationalLineDashPattern',
+                'mpNationalLineThicknessF', 'mpOceanFillColor', 'mpOceanFillPattern',
+                'mpOceanFillScaleF', 'mpOutlineBoundarySets', 'mpOutlineDrawOrder',
+                'mpOutlineMaskingOn', 'mpOutlineOn', 'mpOutlineSpecifiers',
+                'mpPerimDrawOrder', 'mpPerimLineColor', 'mpPerimLineDashPattern',
+                'mpPerimLineDashSegLenF', 'mpPerimLineThicknessF', 'mpPerimOn',
+                'mpPolyMode', 'mpProjection', 'mpProvincialLineColor',
+                'mpProvincialLineDashPattern', 'mpProvincialLineDashSegLenF',
+                'mpProvincialLineThicknessF', 'mpRelativeCenterLat',
+                'mpRelativeCenterLon', 'mpRightAngleF', 'mpRightCornerLatF',
+                'mpRightCornerLonF', 'mpRightMapPosF', 'mpRightNDCF',
+                'mpRightNPCF', 'mpRightPointLatF', 'mpRightPointLonF',
+                'mpRightWindowF', 'mpSatelliteAngle1F', 'mpSatelliteAngle2F',
+                'mpSatelliteDistF', 'mpShapeMode', 'mpSpecifiedFillColors',
+                'mpSpecifiedFillDirectIndexing', 'mpSpecifiedFillPatterns',
+                'mpSpecifiedFillPriority', 'mpSpecifiedFillScales',
+                'mpTopAngleF', 'mpTopMapPosF', 'mpTopNDCF', 'mpTopNPCF',
+                'mpTopPointLatF', 'mpTopPointLonF', 'mpTopWindowF',
+                'mpUSStateLineColor', 'mpUSStateLineDashPattern',
+                'mpUSStateLineDashSegLenF', 'mpUSStateLineThicknessF',
+                'pmAnnoManagers', 'pmAnnoViews', 'pmLabelBarDisplayMode',
+                'pmLabelBarHeightF', 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
+                'pmLabelBarParallelPosF', 'pmLabelBarSide', 'pmLabelBarWidthF',
+                'pmLabelBarZone', 'pmLegendDisplayMode', 'pmLegendHeightF',
+                'pmLegendKeepAspect', 'pmLegendOrthogonalPosF',
+                'pmLegendParallelPosF', 'pmLegendSide', 'pmLegendWidthF',
+                'pmLegendZone', 'pmOverlaySequenceIds', 'pmTickMarkDisplayMode',
+                'pmTickMarkZone', 'pmTitleDisplayMode', 'pmTitleZone',
+                'prGraphicStyle', 'prPolyType', 'prXArray', 'prYArray',
+                'sfCopyData', 'sfDataArray', 'sfDataMaxV', 'sfDataMinV',
+                'sfElementNodes', 'sfExchangeDimensions', 'sfFirstNodeIndex',
+                'sfMissingValueV', 'sfXArray', 'sfXCActualEndF', 'sfXCActualStartF',
+                'sfXCEndIndex', 'sfXCEndSubsetV', 'sfXCEndV', 'sfXCStartIndex',
+                'sfXCStartSubsetV', 'sfXCStartV', 'sfXCStride', 'sfXCellBounds',
+                'sfYArray', 'sfYCActualEndF', 'sfYCActualStartF', 'sfYCEndIndex',
+                'sfYCEndSubsetV', 'sfYCEndV', 'sfYCStartIndex', 'sfYCStartSubsetV',
+                'sfYCStartV', 'sfYCStride', 'sfYCellBounds', 'stArrowLengthF',
+                'stArrowStride', 'stCrossoverCheckCount',
+                'stExplicitLabelBarLabelsOn', 'stLabelBarEndLabelsOn',
+                'stLabelFormat', 'stLengthCheckCount', 'stLevelColors',
+                'stLevelCount', 'stLevelPalette', 'stLevelSelectionMode',
+                'stLevelSpacingF', 'stLevels', 'stLineColor', 'stLineOpacityF',
+                'stLineStartStride', 'stLineThicknessF', 'stMapDirection',
+                'stMaxLevelCount', 'stMaxLevelValF', 'stMinArrowSpacingF',
+                'stMinDistanceF', 'stMinLevelValF', 'stMinLineSpacingF',
+                'stMinStepFactorF', 'stMonoLineColor', 'stNoDataLabelOn',
+                'stNoDataLabelString', 'stScalarFieldData', 'stScalarMissingValColor',
+                'stSpanLevelPalette', 'stStepSizeF', 'stStreamlineDrawOrder',
+                'stUseScalarArray', 'stVectorFieldData', 'stZeroFLabelAngleF',
+                'stZeroFLabelBackgroundColor', 'stZeroFLabelConstantSpacingF',
+                'stZeroFLabelFont', 'stZeroFLabelFontAspectF',
+                'stZeroFLabelFontColor', 'stZeroFLabelFontHeightF',
+                'stZeroFLabelFontQuality', 'stZeroFLabelFontThicknessF',
+                'stZeroFLabelFuncCode', 'stZeroFLabelJust', 'stZeroFLabelOn',
+                'stZeroFLabelOrthogonalPosF', 'stZeroFLabelParallelPosF',
+                'stZeroFLabelPerimColor', 'stZeroFLabelPerimOn',
+                'stZeroFLabelPerimSpaceF', 'stZeroFLabelPerimThicknessF',
+                'stZeroFLabelSide', 'stZeroFLabelString', 'stZeroFLabelTextDirection',
+                'stZeroFLabelZone', 'tfDoNDCOverlay', 'tfPlotManagerOn',
+                'tfPolyDrawList', 'tfPolyDrawOrder', 'tiDeltaF', 'tiMainAngleF',
+                'tiMainConstantSpacingF', 'tiMainDirection', 'tiMainFont',
+                'tiMainFontAspectF', 'tiMainFontColor', 'tiMainFontHeightF',
+                'tiMainFontQuality', 'tiMainFontThicknessF', 'tiMainFuncCode',
+                'tiMainJust', 'tiMainOffsetXF', 'tiMainOffsetYF', 'tiMainOn',
+                'tiMainPosition', 'tiMainSide', 'tiMainString', 'tiUseMainAttributes',
+                'tiXAxisAngleF', 'tiXAxisConstantSpacingF', 'tiXAxisDirection',
+                'tiXAxisFont', 'tiXAxisFontAspectF', 'tiXAxisFontColor',
+                'tiXAxisFontHeightF', 'tiXAxisFontQuality', 'tiXAxisFontThicknessF',
+                'tiXAxisFuncCode', 'tiXAxisJust', 'tiXAxisOffsetXF',
+                'tiXAxisOffsetYF', 'tiXAxisOn', 'tiXAxisPosition', 'tiXAxisSide',
+                'tiXAxisString', 'tiYAxisAngleF', 'tiYAxisConstantSpacingF',
+                'tiYAxisDirection', 'tiYAxisFont', 'tiYAxisFontAspectF',
+                'tiYAxisFontColor', 'tiYAxisFontHeightF', 'tiYAxisFontQuality',
+                'tiYAxisFontThicknessF', 'tiYAxisFuncCode', 'tiYAxisJust',
+                'tiYAxisOffsetXF', 'tiYAxisOffsetYF', 'tiYAxisOn', 'tiYAxisPosition',
+                'tiYAxisSide', 'tiYAxisString', 'tmBorderLineColor',
+                'tmBorderThicknessF', 'tmEqualizeXYSizes', 'tmLabelAutoStride',
+                'tmSciNoteCutoff', 'tmXBAutoPrecision', 'tmXBBorderOn',
+                'tmXBDataLeftF', 'tmXBDataRightF', 'tmXBFormat', 'tmXBIrrTensionF',
+                'tmXBIrregularPoints', 'tmXBLabelAngleF', 'tmXBLabelConstantSpacingF',
+                'tmXBLabelDeltaF', 'tmXBLabelDirection', 'tmXBLabelFont',
+                'tmXBLabelFontAspectF', 'tmXBLabelFontColor', 'tmXBLabelFontHeightF',
+                'tmXBLabelFontQuality', 'tmXBLabelFontThicknessF',
+                'tmXBLabelFuncCode', 'tmXBLabelJust', 'tmXBLabelStride', 'tmXBLabels',
+                'tmXBLabelsOn', 'tmXBMajorLengthF', 'tmXBMajorLineColor',
+                'tmXBMajorOutwardLengthF', 'tmXBMajorThicknessF', 'tmXBMaxLabelLenF',
+                'tmXBMaxTicks', 'tmXBMinLabelSpacingF', 'tmXBMinorLengthF',
+                'tmXBMinorLineColor', 'tmXBMinorOn', 'tmXBMinorOutwardLengthF',
+                'tmXBMinorPerMajor', 'tmXBMinorThicknessF', 'tmXBMinorValues',
+                'tmXBMode', 'tmXBOn', 'tmXBPrecision', 'tmXBStyle', 'tmXBTickEndF',
+                'tmXBTickSpacingF', 'tmXBTickStartF', 'tmXBValues', 'tmXMajorGrid',
+                'tmXMajorGridLineColor', 'tmXMajorGridLineDashPattern',
+                'tmXMajorGridThicknessF', 'tmXMinorGrid', 'tmXMinorGridLineColor',
+                'tmXMinorGridLineDashPattern', 'tmXMinorGridThicknessF',
+                'tmXTAutoPrecision', 'tmXTBorderOn', 'tmXTDataLeftF',
+                'tmXTDataRightF', 'tmXTFormat', 'tmXTIrrTensionF',
+                'tmXTIrregularPoints', 'tmXTLabelAngleF', 'tmXTLabelConstantSpacingF',
+                'tmXTLabelDeltaF', 'tmXTLabelDirection', 'tmXTLabelFont',
+                'tmXTLabelFontAspectF', 'tmXTLabelFontColor', 'tmXTLabelFontHeightF',
+                'tmXTLabelFontQuality', 'tmXTLabelFontThicknessF',
+                'tmXTLabelFuncCode', 'tmXTLabelJust', 'tmXTLabelStride', 'tmXTLabels',
+                'tmXTLabelsOn', 'tmXTMajorLengthF', 'tmXTMajorLineColor',
+                'tmXTMajorOutwardLengthF', 'tmXTMajorThicknessF', 'tmXTMaxLabelLenF',
+                'tmXTMaxTicks', 'tmXTMinLabelSpacingF', 'tmXTMinorLengthF',
+                'tmXTMinorLineColor', 'tmXTMinorOn', 'tmXTMinorOutwardLengthF',
+                'tmXTMinorPerMajor', 'tmXTMinorThicknessF', 'tmXTMinorValues',
+                'tmXTMode', 'tmXTOn', 'tmXTPrecision', 'tmXTStyle', 'tmXTTickEndF',
+                'tmXTTickSpacingF', 'tmXTTickStartF', 'tmXTValues', 'tmXUseBottom',
+                'tmYLAutoPrecision', 'tmYLBorderOn', 'tmYLDataBottomF',
+                'tmYLDataTopF', 'tmYLFormat', 'tmYLIrrTensionF',
+                'tmYLIrregularPoints', 'tmYLLabelAngleF', 'tmYLLabelConstantSpacingF',
+                'tmYLLabelDeltaF', 'tmYLLabelDirection', 'tmYLLabelFont',
+                'tmYLLabelFontAspectF', 'tmYLLabelFontColor', 'tmYLLabelFontHeightF',
+                'tmYLLabelFontQuality', 'tmYLLabelFontThicknessF',
+                'tmYLLabelFuncCode', 'tmYLLabelJust', 'tmYLLabelStride', 'tmYLLabels',
+                'tmYLLabelsOn', 'tmYLMajorLengthF', 'tmYLMajorLineColor',
+                'tmYLMajorOutwardLengthF', 'tmYLMajorThicknessF', 'tmYLMaxLabelLenF',
+                'tmYLMaxTicks', 'tmYLMinLabelSpacingF', 'tmYLMinorLengthF',
+                'tmYLMinorLineColor', 'tmYLMinorOn', 'tmYLMinorOutwardLengthF',
+                'tmYLMinorPerMajor', 'tmYLMinorThicknessF', 'tmYLMinorValues',
+                'tmYLMode', 'tmYLOn', 'tmYLPrecision', 'tmYLStyle', 'tmYLTickEndF',
+                'tmYLTickSpacingF', 'tmYLTickStartF', 'tmYLValues', 'tmYMajorGrid',
+                'tmYMajorGridLineColor', 'tmYMajorGridLineDashPattern',
+                'tmYMajorGridThicknessF', 'tmYMinorGrid', 'tmYMinorGridLineColor',
+                'tmYMinorGridLineDashPattern', 'tmYMinorGridThicknessF',
+                'tmYRAutoPrecision', 'tmYRBorderOn', 'tmYRDataBottomF',
+                'tmYRDataTopF', 'tmYRFormat', 'tmYRIrrTensionF',
+                'tmYRIrregularPoints', 'tmYRLabelAngleF', 'tmYRLabelConstantSpacingF',
+                'tmYRLabelDeltaF', 'tmYRLabelDirection', 'tmYRLabelFont',
+                'tmYRLabelFontAspectF', 'tmYRLabelFontColor', 'tmYRLabelFontHeightF',
+                'tmYRLabelFontQuality', 'tmYRLabelFontThicknessF',
+                'tmYRLabelFuncCode', 'tmYRLabelJust', 'tmYRLabelStride', 'tmYRLabels',
+                'tmYRLabelsOn', 'tmYRMajorLengthF', 'tmYRMajorLineColor',
+                'tmYRMajorOutwardLengthF', 'tmYRMajorThicknessF', 'tmYRMaxLabelLenF',
+                'tmYRMaxTicks', 'tmYRMinLabelSpacingF', 'tmYRMinorLengthF',
+                'tmYRMinorLineColor', 'tmYRMinorOn', 'tmYRMinorOutwardLengthF',
+                'tmYRMinorPerMajor', 'tmYRMinorThicknessF', 'tmYRMinorValues',
+                'tmYRMode', 'tmYROn', 'tmYRPrecision', 'tmYRStyle', 'tmYRTickEndF',
+                'tmYRTickSpacingF', 'tmYRTickStartF', 'tmYRValues', 'tmYUseLeft',
+                'trGridType', 'trLineInterpolationOn',
+                'trXAxisType', 'trXCoordPoints', 'trXInterPoints', 'trXLog',
+                'trXMaxF', 'trXMinF', 'trXReverse', 'trXSamples', 'trXTensionF',
+                'trYAxisType', 'trYCoordPoints', 'trYInterPoints', 'trYLog',
+                'trYMaxF', 'trYMinF', 'trYReverse', 'trYSamples', 'trYTensionF',
+                'txAngleF', 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
+                'txFont', 'HLU-Fonts', 'txFontAspectF', 'txFontColor',
+                'txFontHeightF', 'txFontOpacityF', 'txFontQuality',
+                'txFontThicknessF', 'txFuncCode', 'txJust', 'txPerimColor',
+                'txPerimDashLengthF', 'txPerimDashPattern', 'txPerimOn',
+                'txPerimSpaceF', 'txPerimThicknessF', 'txPosXF', 'txPosYF',
+                'txString', 'vcExplicitLabelBarLabelsOn', 'vcFillArrowEdgeColor',
+                'vcFillArrowEdgeThicknessF', 'vcFillArrowFillColor',
+                'vcFillArrowHeadInteriorXF', 'vcFillArrowHeadMinFracXF',
+                'vcFillArrowHeadMinFracYF', 'vcFillArrowHeadXF', 'vcFillArrowHeadYF',
+                'vcFillArrowMinFracWidthF', 'vcFillArrowWidthF', 'vcFillArrowsOn',
+                'vcFillOverEdge', 'vcGlyphOpacityF', 'vcGlyphStyle',
+                'vcLabelBarEndLabelsOn', 'vcLabelFontColor', 'vcLabelFontHeightF',
+                'vcLabelsOn', 'vcLabelsUseVectorColor', 'vcLevelColors',
+                'vcLevelCount', 'vcLevelPalette', 'vcLevelSelectionMode',
+                'vcLevelSpacingF', 'vcLevels', 'vcLineArrowColor',
+                'vcLineArrowHeadMaxSizeF', 'vcLineArrowHeadMinSizeF',
+                'vcLineArrowThicknessF', 'vcMagnitudeFormat',
+                'vcMagnitudeScaleFactorF', 'vcMagnitudeScaleValueF',
+                'vcMagnitudeScalingMode', 'vcMapDirection', 'vcMaxLevelCount',
+                'vcMaxLevelValF', 'vcMaxMagnitudeF', 'vcMinAnnoAngleF',
+                'vcMinAnnoArrowAngleF', 'vcMinAnnoArrowEdgeColor',
+                'vcMinAnnoArrowFillColor', 'vcMinAnnoArrowLineColor',
+                'vcMinAnnoArrowMinOffsetF', 'vcMinAnnoArrowSpaceF',
+                'vcMinAnnoArrowUseVecColor', 'vcMinAnnoBackgroundColor',
+                'vcMinAnnoConstantSpacingF', 'vcMinAnnoExplicitMagnitudeF',
+                'vcMinAnnoFont', 'vcMinAnnoFontAspectF', 'vcMinAnnoFontColor',
+                'vcMinAnnoFontHeightF', 'vcMinAnnoFontQuality',
+                'vcMinAnnoFontThicknessF', 'vcMinAnnoFuncCode', 'vcMinAnnoJust',
+                'vcMinAnnoOn', 'vcMinAnnoOrientation', 'vcMinAnnoOrthogonalPosF',
+                'vcMinAnnoParallelPosF', 'vcMinAnnoPerimColor', 'vcMinAnnoPerimOn',
+                'vcMinAnnoPerimSpaceF', 'vcMinAnnoPerimThicknessF', 'vcMinAnnoSide',
+                'vcMinAnnoString1', 'vcMinAnnoString1On', 'vcMinAnnoString2',
+                'vcMinAnnoString2On', 'vcMinAnnoTextDirection', 'vcMinAnnoZone',
+                'vcMinDistanceF', 'vcMinFracLengthF', 'vcMinLevelValF',
+                'vcMinMagnitudeF', 'vcMonoFillArrowEdgeColor',
+                'vcMonoFillArrowFillColor', 'vcMonoLineArrowColor',
+                'vcMonoWindBarbColor', 'vcNoDataLabelOn', 'vcNoDataLabelString',
+                'vcPositionMode', 'vcRefAnnoAngleF', 'vcRefAnnoArrowAngleF',
+                'vcRefAnnoArrowEdgeColor', 'vcRefAnnoArrowFillColor',
+                'vcRefAnnoArrowLineColor', 'vcRefAnnoArrowMinOffsetF',
+                'vcRefAnnoArrowSpaceF', 'vcRefAnnoArrowUseVecColor',
+                'vcRefAnnoBackgroundColor', 'vcRefAnnoConstantSpacingF',
+                'vcRefAnnoExplicitMagnitudeF', 'vcRefAnnoFont',
+                'vcRefAnnoFontAspectF', 'vcRefAnnoFontColor', 'vcRefAnnoFontHeightF',
+                'vcRefAnnoFontQuality', 'vcRefAnnoFontThicknessF',
+                'vcRefAnnoFuncCode', 'vcRefAnnoJust', 'vcRefAnnoOn',
+                'vcRefAnnoOrientation', 'vcRefAnnoOrthogonalPosF',
+                'vcRefAnnoParallelPosF', 'vcRefAnnoPerimColor', 'vcRefAnnoPerimOn',
+                'vcRefAnnoPerimSpaceF', 'vcRefAnnoPerimThicknessF', 'vcRefAnnoSide',
+                'vcRefAnnoString1', 'vcRefAnnoString1On', 'vcRefAnnoString2',
+                'vcRefAnnoString2On', 'vcRefAnnoTextDirection', 'vcRefAnnoZone',
+                'vcRefLengthF', 'vcRefMagnitudeF', 'vcScalarFieldData',
+                'vcScalarMissingValColor', 'vcScalarValueFormat',
+                'vcScalarValueScaleFactorF', 'vcScalarValueScaleValueF',
+                'vcScalarValueScalingMode', 'vcSpanLevelPalette', 'vcUseRefAnnoRes',
+                'vcUseScalarArray', 'vcVectorDrawOrder', 'vcVectorFieldData',
+                'vcWindBarbCalmCircleSizeF', 'vcWindBarbColor',
+                'vcWindBarbLineThicknessF', 'vcWindBarbScaleFactorF',
+                'vcWindBarbTickAngleF', 'vcWindBarbTickLengthF',
+                'vcWindBarbTickSpacingF', 'vcZeroFLabelAngleF',
+                'vcZeroFLabelBackgroundColor', 'vcZeroFLabelConstantSpacingF',
+                'vcZeroFLabelFont', 'vcZeroFLabelFontAspectF',
+                'vcZeroFLabelFontColor', 'vcZeroFLabelFontHeightF',
+                'vcZeroFLabelFontQuality', 'vcZeroFLabelFontThicknessF',
+                'vcZeroFLabelFuncCode', 'vcZeroFLabelJust', 'vcZeroFLabelOn',
+                'vcZeroFLabelOrthogonalPosF', 'vcZeroFLabelParallelPosF',
+                'vcZeroFLabelPerimColor', 'vcZeroFLabelPerimOn',
+                'vcZeroFLabelPerimSpaceF', 'vcZeroFLabelPerimThicknessF',
+                'vcZeroFLabelSide', 'vcZeroFLabelString', 'vcZeroFLabelTextDirection',
+                'vcZeroFLabelZone', 'vfCopyData', 'vfDataArray',
+                'vfExchangeDimensions', 'vfExchangeUVData', 'vfMagMaxV', 'vfMagMinV',
+                'vfMissingUValueV', 'vfMissingVValueV', 'vfPolarData',
+                'vfSingleMissingValue', 'vfUDataArray', 'vfUMaxV', 'vfUMinV',
+                'vfVDataArray', 'vfVMaxV', 'vfVMinV', 'vfXArray', 'vfXCActualEndF',
+                'vfXCActualStartF', 'vfXCEndIndex', 'vfXCEndSubsetV', 'vfXCEndV',
+                'vfXCStartIndex', 'vfXCStartSubsetV', 'vfXCStartV', 'vfXCStride',
+                'vfYArray', 'vfYCActualEndF', 'vfYCActualStartF', 'vfYCEndIndex',
+                'vfYCEndSubsetV', 'vfYCEndV', 'vfYCStartIndex', 'vfYCStartSubsetV',
+                'vfYCStartV', 'vfYCStride', 'vpAnnoManagerId', 'vpClipOn',
+                'vpHeightF', 'vpKeepAspect', 'vpOn', 'vpUseSegments', 'vpWidthF',
+                'vpXF', 'vpYF', 'wkAntiAlias', 'wkBackgroundColor', 'wkBackgroundOpacityF',
+                'wkColorMapLen', 'wkColorMap', 'wkColorModel', 'wkDashTableLength',
+                'wkDefGraphicStyleId', 'wkDeviceLowerX', 'wkDeviceLowerY',
+                'wkDeviceUpperX', 'wkDeviceUpperY', 'wkFileName', 'wkFillTableLength',
+                'wkForegroundColor', 'wkFormat', 'wkFullBackground', 'wkGksWorkId',
+                'wkHeight', 'wkMarkerTableLength', 'wkMetaName', 'wkOrientation',
+                'wkPDFFileName', 'wkPDFFormat', 'wkPDFResolution', 'wkPSFileName',
+                'wkPSFormat', 'wkPSResolution', 'wkPaperHeightF', 'wkPaperSize',
+                'wkPaperWidthF', 'wkPause', 'wkTopLevelViews', 'wkViews',
+                'wkVisualType', 'wkWidth', 'wkWindowId', 'wkXColorMode', 'wsCurrentSize',
+                'wsMaximumSize', 'wsThresholdSize', 'xyComputeXMax',
+                'xyComputeXMin', 'xyComputeYMax', 'xyComputeYMin', 'xyCoordData',
+                'xyCoordDataSpec', 'xyCurveDrawOrder', 'xyDashPattern',
+                'xyDashPatterns', 'xyExplicitLabels', 'xyExplicitLegendLabels',
+                'xyLabelMode', 'xyLineColor', 'xyLineColors', 'xyLineDashSegLenF',
+                'xyLineLabelConstantSpacingF', 'xyLineLabelFont',
+                'xyLineLabelFontAspectF', 'xyLineLabelFontColor',
+                'xyLineLabelFontColors', 'xyLineLabelFontHeightF',
+                'xyLineLabelFontQuality', 'xyLineLabelFontThicknessF',
+                'xyLineLabelFuncCode', 'xyLineThicknessF', 'xyLineThicknesses',
+                'xyMarkLineMode', 'xyMarkLineModes', 'xyMarker', 'xyMarkerColor',
+                'xyMarkerColors', 'xyMarkerSizeF', 'xyMarkerSizes',
+                'xyMarkerThicknessF', 'xyMarkerThicknesses', 'xyMarkers',
+                'xyMonoDashPattern', 'xyMonoLineColor', 'xyMonoLineLabelFontColor',
+                'xyMonoLineThickness', 'xyMonoMarkLineMode', 'xyMonoMarker',
+                'xyMonoMarkerColor', 'xyMonoMarkerSize', 'xyMonoMarkerThickness',
+                'xyXIrrTensionF', 'xyXIrregularPoints', 'xyXStyle', 'xyYIrrTensionF',
+                'xyYIrregularPoints', 'xyYStyle'), prefix=r'\b'),
+             Name.Builtin),
+
+            # Booleans
+            (r'\.(True|False)\.', Name.Builtin),
+            # Comparing Operators
+            (r'\.(eq|ne|lt|le|gt|ge|not|and|or|xor)\.', Operator.Word),
+        ],
+
+        'strings': [
+            (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+        ],
+
+        'nums': [
+            (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
+            (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+            (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/nimrod.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/nimrod.py
new file mode 100644
index 00000000..365a8dcc
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/nimrod.py
@@ -0,0 +1,199 @@
+"""
+    pygments.lexers.nimrod
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for the Nim language (formerly known as Nimrod).
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, default, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Error
+
+__all__ = ['NimrodLexer']
+
+
+class NimrodLexer(RegexLexer):
+    """
+    For Nim source code.
+    """
+
+    name = 'Nimrod'
+    url = 'http://nim-lang.org/'
+    aliases = ['nimrod', 'nim']
+    filenames = ['*.nim', '*.nimrod']
+    mimetypes = ['text/x-nim']
+    version_added = '1.5'
+
+    flags = re.MULTILINE | re.IGNORECASE
+
+    def underscorize(words):
+        newWords = []
+        new = []
+        for word in words:
+            for ch in word:
+                new.append(ch)
+                new.append("_?")
+            newWords.append(''.join(new))
+            new = []
+        return "|".join(newWords)
+
+    keywords = [
+        'addr', 'and', 'as', 'asm', 'bind', 'block', 'break', 'case',
+        'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard',
+        'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except',
+        'export', 'finally', 'for', 'if', 'in', 'yield', 'interface',
+        'is', 'isnot', 'iterator', 'let', 'mixin', 'mod',
+        'not', 'notin', 'object', 'of', 'or', 'out', 'ptr', 'raise',
+        'ref', 'return', 'shl', 'shr', 'static', 'try',
+        'tuple', 'type', 'using', 'when', 'while', 'xor'
+    ]
+
+    keywordsPseudo = [
+        'nil', 'true', 'false'
+    ]
+
+    opWords = [
+        'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
+        'notin', 'is', 'isnot'
+    ]
+
+    types = [
+        'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
+        'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
+    ]
+
+    tokens = {
+        'root': [
+            # Comments
+            (r'##\[', String.Doc, 'doccomment'),
+            (r'##.*$', String.Doc),
+            (r'#\[', Comment.Multiline, 'comment'),
+            (r'#.*$', Comment),
+
+            # Pragmas
+            (r'\{\.', String.Other, 'pragma'),
+
+            # Operators
+            (r'[*=><+\-/@$~&%!?|\\\[\]]', Operator),
+            (r'\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;',
+             Punctuation),
+
+            # Case statement branch
+            (r'(\n\s*)(of)(\s)', bygroups(Text.Whitespace, Keyword,
+                                          Text.Whitespace), 'casebranch'),
+
+            # Strings
+            (r'(?:[\w]+)"', String, 'rdqs'),
+            (r'"""', String.Double, 'tdqs'),
+            ('"', String, 'dqs'),
+
+            # Char
+            ("'", String.Char, 'chars'),
+
+            # Keywords
+            (rf'({underscorize(opWords)})\b', Operator.Word),
+            (r'(proc|func|method|macro|template)(\s)(?![(\[\]])',
+             bygroups(Keyword, Text.Whitespace), 'funcname'),
+            (rf'({underscorize(keywords)})\b', Keyword),
+            (r'({})\b'.format(underscorize(['from', 'import', 'include', 'export'])),
+             Keyword.Namespace),
+            (r'(v_?a_?r)\b', Keyword.Declaration),
+            (rf'({underscorize(types)})\b', Name.Builtin),
+            (rf'({underscorize(keywordsPseudo)})\b', Keyword.Pseudo),
+
+            # Identifiers
+            (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
+
+            # Numbers
+            (r'[0-9][0-9_]*(?=([e.]|\'f(32|64)))',
+             Number.Float, ('float-suffix', 'float-number')),
+            (r'0x[a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
+            (r'0b[01][01_]*', Number.Bin, 'int-suffix'),
+            (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
+            (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
+
+            # Whitespace
+            (r'\s+', Text.Whitespace),
+            (r'.+$', Error),
+        ],
+        'chars': [
+            (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
+            (r"'", String.Char, '#pop'),
+            (r".", String.Char)
+        ],
+        'strings': [
+            (r'(?|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
+            (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
+            (r'`\{[^`]*`\}', Text),  # Extern blocks won't be Lexed by Nit
+            (r'[\r\n\t ]+', Text),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/nix.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/nix.py
new file mode 100644
index 00000000..3fa88c65
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/nix.py
@@ -0,0 +1,144 @@
+"""
+    pygments.lexers.nix
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the NixOS Nix language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Literal
+
+__all__ = ['NixLexer']
+
+
+class NixLexer(RegexLexer):
+    """
+    For the Nix language.
+    """
+
+    name = 'Nix'
+    url = 'http://nixos.org/nix/'
+    aliases = ['nixos', 'nix']
+    filenames = ['*.nix']
+    mimetypes = ['text/x-nix']
+    version_added = '2.0'
+
+    keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
+                'else', 'then', '...']
+    builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
+                'map', 'removeAttrs', 'throw', 'toString', 'derivation']
+    operators = ['++', '+', '?', '.', '!', '//', '==', '/',
+                 '!=', '&&', '||', '->', '=', '<', '>', '*', '-']
+
+    punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
+
+    tokens = {
+        'root': [
+            # comments starting with #
+            (r'#.*$', Comment.Single),
+
+            # multiline comments
+            (r'/\*', Comment.Multiline, 'comment'),
+
+            # whitespace
+            (r'\s+', Text),
+
+            # keywords
+            ('({})'.format('|'.join(re.escape(entry) + '\\b' for entry in keywords)), Keyword),
+
+            # highlight the builtins
+            ('({})'.format('|'.join(re.escape(entry) + '\\b' for entry in builtins)),
+             Name.Builtin),
+
+            (r'\b(true|false|null)\b', Name.Constant),
+
+            # floats
+            (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?', Number.Float),
+
+            # integers
+            (r'-?[0-9]+', Number.Integer),
+
+            # paths
+            (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
+            (r'~(\/[\w.+-]+)+', Literal),
+            (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
+
+            # operators
+            ('({})'.format('|'.join(re.escape(entry) for entry in operators)),
+             Operator),
+
+            # word operators
+            (r'\b(or|and)\b', Operator.Word),
+
+            (r'\{', Punctuation, 'block'),
+
+            # punctuations
+            ('({})'.format('|'.join(re.escape(entry) for entry in punctuations)), Punctuation),
+
+            # strings
+            (r'"', String.Double, 'doublequote'),
+            (r"''", String.Multiline, 'multiline'),
+
+            # urls
+            (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
+
+            # names of variables
+            (r'[\w-]+(?=\s*=)', String.Symbol),
+            (r'[a-zA-Z_][\w\'-]*', Text),
+
+            (r"\$\{", String.Interpol, 'antiquote'),
+        ],
+        'comment': [
+            (r'[^/*]+', Comment.Multiline),
+            (r'/\*', Comment.Multiline, '#push'),
+            (r'\*/', Comment.Multiline, '#pop'),
+            (r'[*/]', Comment.Multiline),
+        ],
+        'multiline': [
+            (r"''(\$|'|\\n|\\r|\\t|\\)", String.Escape),
+            (r"''", String.Multiline, '#pop'),
+            (r'\$\{', String.Interpol, 'antiquote'),
+            (r"[^'\$]+", String.Multiline),
+            (r"\$[^\{']", String.Multiline),
+            (r"'[^']", String.Multiline),
+            (r"\$(?=')", String.Multiline),
+        ],
+        'doublequote': [
+            (r'\\(\\|"|\$|n)', String.Escape),
+            (r'"', String.Double, '#pop'),
+            (r'\$\{', String.Interpol, 'antiquote'),
+            (r'[^"\\\$]+', String.Double),
+            (r'\$[^\{"]', String.Double),
+            (r'\$(?=")', String.Double),
+            (r'\\', String.Double),
+        ],
+        'antiquote': [
+            (r"\}", String.Interpol, '#pop'),
+            # TODO: we should probably escape also here ''${ \${
+            (r"\$\{", String.Interpol, '#push'),
+            include('root'),
+        ],
+        'block': [
+            (r"\}", Punctuation, '#pop'),
+            include('root'),
+        ],
+    }
+
+    def analyse_text(text):
+        rv = 0.0
+        # TODO: let/in
+        if re.search(r'import.+?<[^>]+>', text):
+            rv += 0.4
+        if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
+            rv += 0.4
+        if re.search(r'=\s+mkIf\s+', text):
+            rv += 0.4
+        if re.search(r'\{[a-zA-Z,\s]+\}:', text):
+            rv += 0.1
+        return rv
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/numbair.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/numbair.py
new file mode 100644
index 00000000..435863e1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/numbair.py
@@ -0,0 +1,63 @@
+"""
+    pygments.lexers.numbair
+    ~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for other Numba Intermediate Representation.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Whitespace, Name, String,  Punctuation, Keyword, \
+    Operator, Number
+
+__all__ = ["NumbaIRLexer"]
+
+class NumbaIRLexer(RegexLexer):
+    """
+    Lexer for Numba IR
+    """
+    name = 'Numba_IR'
+    url = "https://numba.readthedocs.io/en/stable/developer/architecture.html#stage-2-generate-the-numba-ir"
+    aliases = ['numba_ir', 'numbair']
+    filenames = ['*.numba_ir']
+    mimetypes = ['text/x-numba_ir', 'text/x-numbair']
+    version_added = '2.19'
+
+    identifier = r'\$[a-zA-Z0-9._]+'
+    fun_or_var = r'([a-zA-Z_]+[a-zA-Z0-9]*)'
+
+    tokens = {
+        'root' : [
+            (r'(label)(\ [0-9]+)(:)$',
+                bygroups(Keyword, Name.Label, Punctuation)),
+
+            (r'=', Operator),
+            include('whitespace'),
+            include('keyword'),
+
+            (identifier, Name.Variable),
+            (fun_or_var + r'(\()',
+                bygroups(Name.Function, Punctuation)),
+            (fun_or_var + r'(\=)',
+                bygroups(Name.Attribute, Punctuation)),
+            (fun_or_var, Name.Constant),
+            (r'[0-9]+', Number),
+
+            # 
+            (r'<[^>\n]*>', String),
+
+            (r'[=<>{}\[\]()*.,!\':]|x\b', Punctuation)
+        ],
+
+        'keyword':[
+            (words((
+                'del', 'jump', 'call', 'branch',
+            ), suffix=' '), Keyword),
+        ],
+
+        'whitespace': [
+            (r'(\n|\s)+', Whitespace),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/oberon.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/oberon.py
new file mode 100644
index 00000000..61f3c2d2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/oberon.py
@@ -0,0 +1,120 @@
+"""
+    pygments.lexers.oberon
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Oberon family languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+
+__all__ = ['ComponentPascalLexer']
+
+
+class ComponentPascalLexer(RegexLexer):
+    """
+    For Component Pascal source code.
+    """
+    name = 'Component Pascal'
+    aliases = ['componentpascal', 'cp']
+    filenames = ['*.cp', '*.cps']
+    mimetypes = ['text/x-component-pascal']
+    url = 'https://blackboxframework.org'
+    version_added = '2.1'
+
+    flags = re.MULTILINE | re.DOTALL
+
+    tokens = {
+        'root': [
+            include('whitespace'),
+            include('comments'),
+            include('punctuation'),
+            include('numliterals'),
+            include('strings'),
+            include('operators'),
+            include('builtins'),
+            include('identifiers'),
+        ],
+        'whitespace': [
+            (r'\n+', Text),  # blank lines
+            (r'\s+', Text),  # whitespace
+        ],
+        'comments': [
+            (r'\(\*([^$].*?)\*\)', Comment.Multiline),
+            # TODO: nested comments (* (* ... *) ... (* ... *) *) not supported!
+        ],
+        'punctuation': [
+            (r'[()\[\]{},.:;|]', Punctuation),
+        ],
+        'numliterals': [
+            (r'[0-9A-F]+X\b', Number.Hex),                 # char code
+            (r'[0-9A-F]+[HL]\b', Number.Hex),              # hexadecimal number
+            (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float),  # real number
+            (r'[0-9]+\.[0-9]+', Number.Float),             # real number
+            (r'[0-9]+', Number.Integer),                   # decimal whole number
+        ],
+        'strings': [
+            (r"'[^\n']*'", String),  # single quoted string
+            (r'"[^\n"]*"', String),  # double quoted string
+        ],
+        'operators': [
+            # Arithmetic Operators
+            (r'[+-]', Operator),
+            (r'[*/]', Operator),
+            # Relational Operators
+            (r'[=#<>]', Operator),
+            # Dereferencing Operator
+            (r'\^', Operator),
+            # Logical AND Operator
+            (r'&', Operator),
+            # Logical NOT Operator
+            (r'~', Operator),
+            # Assignment Symbol
+            (r':=', Operator),
+            # Range Constructor
+            (r'\.\.', Operator),
+            (r'\$', Operator),
+        ],
+        'identifiers': [
+            (r'([a-zA-Z_$][\w$]*)', Name),
+        ],
+        'builtins': [
+            (words((
+                'ANYPTR', 'ANYREC', 'BOOLEAN', 'BYTE', 'CHAR', 'INTEGER', 'LONGINT',
+                'REAL', 'SET', 'SHORTCHAR', 'SHORTINT', 'SHORTREAL'
+                ), suffix=r'\b'), Keyword.Type),
+            (words((
+                'ABS', 'ABSTRACT', 'ARRAY', 'ASH', 'ASSERT', 'BEGIN', 'BITS', 'BY',
+                'CAP', 'CASE', 'CHR', 'CLOSE', 'CONST', 'DEC', 'DIV', 'DO', 'ELSE',
+                'ELSIF', 'EMPTY', 'END', 'ENTIER', 'EXCL', 'EXIT', 'EXTENSIBLE', 'FOR',
+                'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED',
+                'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF',
+                'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
+                'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL',
+                'VAR', 'WHILE', 'WITH'
+                ), suffix=r'\b'), Keyword.Reserved),
+            (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
+        ]
+    }
+
+    def analyse_text(text):
+        """The only other lexer using .cp is the C++ one, so we check if for
+        a few common Pascal keywords here. Those are unfortunately quite
+        common across various business languages as well."""
+        result = 0
+        if 'BEGIN' in text:
+            result += 0.01
+        if 'END' in text:
+            result += 0.01
+        if 'PROCEDURE' in text:
+            result += 0.01
+        if 'END' in text:
+            result += 0.01
+
+        return result
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/objective.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/objective.py
new file mode 100644
index 00000000..899c2c44
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/objective.py
@@ -0,0 +1,513 @@
+"""
+    pygments.lexers.objective
+    ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Objective-C family languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
+    inherit, default
+from pygments.token import Text, Keyword, Name, String, Operator, \
+    Number, Punctuation, Literal, Comment, Whitespace
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+
+__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
+
+
+def objective(baselexer):
+    """
+    Generate a subclass of baselexer that accepts the Objective-C syntax
+    extensions.
+    """
+
+    # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
+    # since that's quite common in ordinary C/C++ files.  It's OK to match
+    # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
+    #
+    # The upshot of this is that we CANNOT match @class or @interface
+    _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
+
+    # Matches [ ? identifier  ( identifier ? ] |  identifier? : )
+    # (note the identifier is *optional* when there is a ':'!)
+    _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
+                             r'(?:[a-zA-Z_]\w*\s*\]|'
+                             r'(?:[a-zA-Z_]\w*)?:)')
+
+    class GeneratedObjectiveCVariant(baselexer):
+        """
+        Implements Objective-C syntax on top of an existing C family lexer.
+        """
+
+        tokens = {
+            'statements': [
+                (r'@"', String, 'string'),
+                (r'@(YES|NO)', Number),
+                (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+                (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+                (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+                (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+                (r'@0[0-7]+[Ll]?', Number.Oct),
+                (r'@\d+[Ll]?', Number.Integer),
+                (r'@\(', Literal, 'literal_number'),
+                (r'@\[', Literal, 'literal_array'),
+                (r'@\{', Literal, 'literal_dictionary'),
+                (words((
+                    '@selector', '@private', '@protected', '@public', '@encode',
+                    '@synchronized', '@try', '@throw', '@catch', '@finally',
+                    '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
+                    '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
+                    'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
+                    'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
+                    'out', 'inout', 'release', 'class', '@dynamic', '@optional',
+                    '@required', '@autoreleasepool', '@import'), suffix=r'\b'),
+                 Keyword),
+                (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
+                        'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
+                 Keyword.Type),
+                (r'@(true|false|YES|NO)\n', Name.Builtin),
+                (r'(YES|NO|nil|self|super)\b', Name.Builtin),
+                # Carbon types
+                (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
+                # Carbon built-ins
+                (r'(TRUE|FALSE)\b', Name.Builtin),
+                (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+                 ('#pop', 'oc_classname')),
+                (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
+                 ('#pop', 'oc_forward_classname')),
+                # @ can also prefix other expressions like @{...} or @(...)
+                (r'@', Punctuation),
+                inherit,
+            ],
+            'oc_classname': [
+                # interface definition that inherits
+                (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
+                 bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
+                 ('#pop', 'oc_ivars')),
+                (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
+                 bygroups(Name.Class, Text, Name.Class), '#pop'),
+                # interface definition for a category
+                (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
+                 bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
+                 ('#pop', 'oc_ivars')),
+                (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
+                 bygroups(Name.Class, Text, Name.Label), '#pop'),
+                # simple interface / implementation
+                (r'([a-zA-Z$_][\w$]*)(\s*)(\{)',
+                 bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
+                (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
+            ],
+            'oc_forward_classname': [
+                (r'([a-zA-Z$_][\w$]*)(\s*,\s*)',
+                 bygroups(Name.Class, Text), 'oc_forward_classname'),
+                (r'([a-zA-Z$_][\w$]*)(\s*;?)',
+                 bygroups(Name.Class, Text), '#pop')
+            ],
+            'oc_ivars': [
+                include('whitespace'),
+                include('statements'),
+                (';', Punctuation),
+                (r'\{', Punctuation, '#push'),
+                (r'\}', Punctuation, '#pop'),
+            ],
+            'root': [
+                # methods
+                (r'^([-+])(\s*)'                         # method marker
+                 r'(\(.*?\))?(\s*)'                      # return type
+                 r'([a-zA-Z$_][\w$]*:?)',        # begin of method name
+                 bygroups(Punctuation, Text, using(this),
+                          Text, Name.Function),
+                 'method'),
+                inherit,
+            ],
+            'method': [
+                include('whitespace'),
+                # TODO unsure if ellipses are allowed elsewhere, see
+                # discussion in Issue 789
+                (r',', Punctuation),
+                (r'\.\.\.', Punctuation),
+                (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
+                 bygroups(using(this), Text, Name.Variable)),
+                (r'[a-zA-Z$_][\w$]*:', Name.Function),
+                (';', Punctuation, '#pop'),
+                (r'\{', Punctuation, 'function'),
+                default('#pop'),
+            ],
+            'literal_number': [
+                (r'\(', Punctuation, 'literal_number_inner'),
+                (r'\)', Literal, '#pop'),
+                include('statement'),
+            ],
+            'literal_number_inner': [
+                (r'\(', Punctuation, '#push'),
+                (r'\)', Punctuation, '#pop'),
+                include('statement'),
+            ],
+            'literal_array': [
+                (r'\[', Punctuation, 'literal_array_inner'),
+                (r'\]', Literal, '#pop'),
+                include('statement'),
+            ],
+            'literal_array_inner': [
+                (r'\[', Punctuation, '#push'),
+                (r'\]', Punctuation, '#pop'),
+                include('statement'),
+            ],
+            'literal_dictionary': [
+                (r'\}', Literal, '#pop'),
+                include('statement'),
+            ],
+        }
+
+        def analyse_text(text):
+            if _oc_keywords.search(text):
+                return 1.0
+            elif '@"' in text:  # strings
+                return 0.8
+            elif re.search('@[0-9]+', text):
+                return 0.7
+            elif _oc_message.search(text):
+                return 0.8
+            return 0
+
+        def get_tokens_unprocessed(self, text, stack=('root',)):
+            from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+                COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+            for index, token, value in \
+                    baselexer.get_tokens_unprocessed(self, text, stack):
+                if token is Name or token is Name.Class:
+                    if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+                       or value in COCOA_PRIMITIVES:
+                        token = Name.Builtin.Pseudo
+
+                yield index, token, value
+
+    return GeneratedObjectiveCVariant
+
+
+class ObjectiveCLexer(objective(CLexer)):
+    """
+    For Objective-C source code with preprocessor directives.
+    """
+
+    name = 'Objective-C'
+    url = 'https://developer.apple.com/library/archive/documentation/Cocoa/Conceptual/ProgrammingWithObjectiveC/Introduction/Introduction.html'
+    aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
+    filenames = ['*.m', '*.h']
+    mimetypes = ['text/x-objective-c']
+    version_added = ''
+    priority = 0.05    # Lower than C
+
+
+class ObjectiveCppLexer(objective(CppLexer)):
+    """
+    For Objective-C++ source code with preprocessor directives.
+    """
+
+    name = 'Objective-C++'
+    aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
+    filenames = ['*.mm', '*.hh']
+    mimetypes = ['text/x-objective-c++']
+    version_added = ''
+    priority = 0.05    # Lower than C++
+
+
+class LogosLexer(ObjectiveCppLexer):
+    """
+    For Logos + Objective-C source code with preprocessor directives.
+    """
+
+    name = 'Logos'
+    aliases = ['logos']
+    filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
+    mimetypes = ['text/x-logos']
+    version_added = '1.6'
+    priority = 0.25
+
+    tokens = {
+        'statements': [
+            (r'(%orig|%log)\b', Keyword),
+            (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
+             bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
+            (r'(%init)\b(\()',
+             bygroups(Keyword, Punctuation), 'logos_init_directive'),
+            (r'(%init)(?=\s*;)', bygroups(Keyword)),
+            (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+             bygroups(Keyword, Text, Name.Class), '#pop'),
+            (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+             ('#pop', 'logos_classname')),
+            inherit,
+        ],
+        'logos_init_directive': [
+            (r'\s+', Text),
+            (',', Punctuation, ('logos_init_directive', '#pop')),
+            (r'([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
+             bygroups(Name.Class, Text, Punctuation, Text, Text)),
+            (r'([a-zA-Z$_][\w$]*)', Name.Class),
+            (r'\)', Punctuation, '#pop'),
+        ],
+        'logos_classname': [
+            (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
+             bygroups(Name.Class, Text, Name.Class), '#pop'),
+            (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
+        ],
+        'root': [
+            (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+             'logos_classname'),
+            (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+             bygroups(Keyword, Text, Name.Class)),
+            (r'(%config)(\s*\(\s*)(\w+)(\s*=)(.*?)(\)\s*)',
+             bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
+            (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
+             'function'),
+            (r'(%new)(\s*)(\()(.*?)(\))',
+             bygroups(Keyword, Text, Keyword, String, Keyword)),
+            (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
+            inherit,
+        ],
+    }
+
+    _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
+
+    def analyse_text(text):
+        if LogosLexer._logos_keywords.search(text):
+            return 1.0
+        return 0
+
+
+class SwiftLexer(RegexLexer):
+    """
+    For Swift source.
+    """
+    name = 'Swift'
+    url = 'https://www.swift.org/'
+    filenames = ['*.swift']
+    aliases = ['swift']
+    mimetypes = ['text/x-swift']
+    version_added = '2.0'
+
+    tokens = {
+        'root': [
+            # Whitespace and Comments
+            (r'\n', Text),
+            (r'\s+', Whitespace),
+            (r'//', Comment.Single, 'comment-single'),
+            (r'/\*', Comment.Multiline, 'comment-multi'),
+            (r'#(if|elseif|else|endif|available)\b', Comment.Preproc, 'preproc'),
+
+            # Keywords
+            include('keywords'),
+
+            # Global Types
+            (words((
+                'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
+                'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
+                'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
+                'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
+                'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
+                'EnumerateSequence', 'FilterCollectionView',
+                'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
+                'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
+                'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
+                'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
+                'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
+                'LazyForwardCollection', 'LazyRandomAccessCollection',
+                'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
+                'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
+                'Optional', 'PermutationGenerator', 'QuickLookObject',
+                'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
+                'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
+                'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
+                'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
+                'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
+                'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
+                'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
+                'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
+                # Protocols
+                'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
+                'BidirectionalIndexType', 'BitwiseOperationsType',
+                'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
+                'CollectionType', 'Comparable', 'DebugPrintable',
+                'DictionaryLiteralConvertible', 'Equatable',
+                'ExtendedGraphemeClusterLiteralConvertible',
+                'ExtensibleCollectionType', 'FloatLiteralConvertible',
+                'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
+                'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
+                'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
+                'NilLiteralConvertible', 'OutputStreamType', 'Printable',
+                'RandomAccessIndexType', 'RangeReplaceableCollectionType',
+                'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
+                'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
+                'Streamable', 'Strideable', 'StringInterpolationConvertible',
+                'StringLiteralConvertible', 'UnicodeCodecType',
+                'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
+                '_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
+                '_CollectionType', '_Comparable', '_ExtensibleCollectionType',
+                '_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
+                '_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
+                '_RawOptionSetType', '_SequenceType', '_Sequence_Type',
+                '_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
+                '_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
+                '_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
+                '_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
+                '_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
+                '_SwiftNSStringType', '_UnsignedIntegerType',
+                # Variables
+                'C_ARGC', 'C_ARGV', 'Process',
+                # Typealiases
+                'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
+                'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
+                'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
+                'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
+                'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
+                'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
+                # Foundation/Cocoa
+                'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
+             Name.Builtin),
+            # Functions
+            (words((
+                'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
+                'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
+                'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
+                'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
+                'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
+                'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
+                'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
+                'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
+                'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
+                'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
+                'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
+                'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
+                'withExtendedLifetime', 'withUnsafeMutablePointer',
+                'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
+                'withVaList'), suffix=r'\b'),
+             Name.Builtin.Pseudo),
+
+            # Implicit Block Variables
+            (r'\$\d+', Name.Variable),
+
+            # Binary Literal
+            (r'0b[01_]+', Number.Bin),
+            # Octal Literal
+            (r'0o[0-7_]+', Number.Oct),
+            # Hexadecimal Literal
+            (r'0x[0-9a-fA-F_]+', Number.Hex),
+            # Decimal Literal
+            (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+             r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
+            (r'[0-9][0-9_]*', Number.Integer),
+            # String Literal
+            (r'"""', String, 'string-multi'),
+            (r'"', String, 'string'),
+
+            # Operators and Punctuation
+            (r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
+            (r'[/=\-+!*%<>&|^?~]+', Operator),
+
+            # Identifier
+            (r'[a-zA-Z_]\w*', Name)
+        ],
+        'keywords': [
+            (words((
+                'as', 'async', 'await', 'break', 'case', 'catch', 'continue', 'default', 'defer',
+                'do', 'else', 'fallthrough', 'for', 'guard', 'if', 'in', 'is',
+                'repeat', 'return', '#selector', 'switch', 'throw', 'try',
+                'where', 'while'), suffix=r'\b'),
+             Keyword),
+            (r'@availability\([^)]+\)', Keyword.Reserved),
+            (words((
+                'associativity', 'convenience', 'dynamic', 'didSet', 'final',
+                'get', 'indirect', 'infix', 'inout', 'lazy', 'left', 'mutating',
+                'none', 'nonmutating', 'optional', 'override', 'postfix',
+                'precedence', 'prefix', 'Protocol', 'required', 'rethrows',
+                'right', 'set', 'throws', 'Type', 'unowned', 'weak', 'willSet',
+                '@availability', '@autoclosure', '@noreturn',
+                '@NSApplicationMain', '@NSCopying', '@NSManaged', '@objc',
+                '@UIApplicationMain', '@IBAction', '@IBDesignable',
+                '@IBInspectable', '@IBOutlet'), suffix=r'\b'),
+             Keyword.Reserved),
+            (r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
+             r'|__FILE__|__FUNCTION__|__LINE__|_'
+             r'|#(?:file|line|column|function))\b', Keyword.Constant),
+            (r'import\b', Keyword.Declaration, 'module'),
+            (r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
+             bygroups(Keyword.Declaration, Whitespace, Name.Class)),
+            (r'(func)(\s+)([a-zA-Z_]\w*)',
+             bygroups(Keyword.Declaration, Whitespace, Name.Function)),
+            (r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
+             Whitespace, Name.Variable)),
+            (words((
+                'actor', 'associatedtype', 'class', 'deinit', 'enum', 'extension', 'func', 'import',
+                'init', 'internal', 'let', 'operator', 'private', 'protocol', 'public',
+                'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
+             Keyword.Declaration)
+        ],
+        'comment': [
+            (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
+             Comment.Special)
+        ],
+
+        # Nested
+        'comment-single': [
+            (r'\n', Whitespace, '#pop'),
+            include('comment'),
+            (r'[^\n]+', Comment.Single)
+        ],
+        'comment-multi': [
+            include('comment'),
+            (r'[^*/]+', Comment.Multiline),
+            (r'/\*', Comment.Multiline, '#push'),
+            (r'\*/', Comment.Multiline, '#pop'),
+            (r'[*/]+', Comment.Multiline)
+        ],
+        'module': [
+            (r'\n', Whitespace, '#pop'),
+            (r'[a-zA-Z_]\w*', Name.Class),
+            include('root')
+        ],
+        'preproc': [
+            (r'\n', Whitespace, '#pop'),
+            include('keywords'),
+            (r'[A-Za-z]\w*', Comment.Preproc),
+            include('root')
+        ],
+        'string': [
+            (r'"', String, '#pop'),
+            include("string-common"),
+        ],
+        'string-multi': [
+            (r'"""', String, '#pop'),
+            include("string-common"),
+        ],
+        'string-common': [
+            (r'\\\(', String.Interpol, 'string-intp'),
+            (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+             r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
+            (r'[^\\"]+', String),
+            (r'\\', String)
+        ],
+        'string-intp': [
+            (r'\(', String.Interpol, '#push'),
+            (r'\)', String.Interpol, '#pop'),
+            include('root')
+        ]
+    }
+
+    def get_tokens_unprocessed(self, text):
+        from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+            COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+        for index, token, value in \
+                RegexLexer.get_tokens_unprocessed(self, text):
+            if token is Name or token is Name.Class:
+                if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+                   or value in COCOA_PRIMITIVES:
+                    token = Name.Builtin.Pseudo
+
+            yield index, token, value
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ooc.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ooc.py
new file mode 100644
index 00000000..8a990801
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ooc.py
@@ -0,0 +1,84 @@
+"""
+    pygments.lexers.ooc
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the Ooc language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+
+__all__ = ['OocLexer']
+
+
+class OocLexer(RegexLexer):
+    """
+    For Ooc source code
+    """
+    name = 'Ooc'
+    url = 'https://ooc-lang.github.io/'
+    aliases = ['ooc']
+    filenames = ['*.ooc']
+    mimetypes = ['text/x-ooc']
+    version_added = '1.2'
+
+    tokens = {
+        'root': [
+            (words((
+                'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+                'this', 'super', 'new', 'const', 'final', 'static', 'import',
+                'use', 'extern', 'inline', 'proto', 'break', 'continue',
+                'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
+                'switch', 'case', 'as', 'in', 'version', 'return', 'true',
+                'false', 'null'), prefix=r'\b', suffix=r'\b'),
+             Keyword),
+            (r'include\b', Keyword, 'include'),
+            (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
+             bygroups(Keyword, Text, Keyword, Text, Name.Class)),
+            (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
+             bygroups(Keyword, Text, Name.Function)),
+            (r'\bfunc\b', Keyword),
+            # Note: %= not listed on https://ooc-lang.github.io/docs/lang/operators/
+            (r'//.*', Comment),
+            (r'(?s)/\*.*?\*/', Comment.Multiline),
+            (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
+             r'&&?|\|\|?|\^=?)', Operator),
+            (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
+                                                 Name.Function)),
+            (r'[A-Z][A-Z0-9_]+', Name.Constant),
+            (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
+
+            (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
+             bygroups(Name.Function, Text)),
+            (r'[a-z]\w*', Name.Variable),
+
+            # : introduces types
+            (r'[:(){}\[\];,]', Punctuation),
+
+            (r'0x[0-9a-fA-F]+', Number.Hex),
+            (r'0c[0-9]+', Number.Oct),
+            (r'0b[01]+', Number.Bin),
+            (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
+            (r'[0-9_]+', Number.Decimal),
+
+            (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\"])*"',
+             String.Double),
+            (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+             String.Char),
+            (r'@', Punctuation),  # pointer dereference
+            (r'\.', Punctuation),  # imports or chain operator
+
+            (r'\\[ \t\n]', Text),
+            (r'[ \t]+', Text),
+        ],
+        'include': [
+            (r'[\w/]+', Name),
+            (r',', Punctuation),
+            (r'[ \t]', Text),
+            (r'[;\n]', Text, '#pop'),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/openscad.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/openscad.py
new file mode 100644
index 00000000..b06de227
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/openscad.py
@@ -0,0 +1,96 @@
+"""
+    pygments.lexers.openscad
+    ~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the OpenSCAD languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words, include
+from pygments.token import Text, Comment, Punctuation, Operator, Keyword, Name, Number, Whitespace, Literal, String
+
+__all__ = ['OpenScadLexer']
+
+
+class OpenScadLexer(RegexLexer):
+    """For openSCAD code.
+    """
+    name = "OpenSCAD"
+    url = "https://openscad.org/"
+    aliases = ["openscad"]
+    filenames = ["*.scad"]
+    mimetypes = ["application/x-openscad"]
+    version_added = '2.16'
+
+    tokens = {
+        "root": [
+            (r"[^\S\n]+", Whitespace),
+            (r'//', Comment.Single, 'comment-single'),
+            (r'/\*', Comment.Multiline, 'comment-multi'),
+            (r"[{}\[\]\(\),;:]", Punctuation),
+            (r"[*!#%\-+=?/]", Operator),
+            (r"<=|<|==|!=|>=|>|&&|\|\|", Operator),
+            (r"\$(f[asn]|t|vp[rtd]|children)", Operator),
+            (r"(undef|PI)\b", Keyword.Constant),
+            (
+                r"(use|include)((?:\s|\\\\s)+)",
+                bygroups(Keyword.Namespace, Text),
+                "includes",
+            ),
+            (r"(module)(\s*)([^\s\(]+)",
+             bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
+            (r"(function)(\s*)([^\s\(]+)",
+             bygroups(Keyword.Declaration, Whitespace, Name.Function)),
+            (words(("true", "false"), prefix=r"\b", suffix=r"\b"), Literal),
+            (words((
+                "function", "module", "include", "use", "for",
+                "intersection_for", "if", "else", "return"
+                ), prefix=r"\b", suffix=r"\b"), Keyword
+            ),
+            (words((
+                "circle", "square", "polygon", "text", "sphere", "cube",
+                "cylinder", "polyhedron", "translate", "rotate", "scale",
+                "resize", "mirror", "multmatrix", "color", "offset", "hull",
+                "minkowski", "union", "difference", "intersection", "abs",
+                "sign", "sin", "cos", "tan", "acos", "asin", "atan", "atan2",
+                "floor", "round", "ceil", "ln", "log", "pow", "sqrt", "exp",
+                "rands", "min", "max", "concat", "lookup", "str", "chr",
+                "search", "version", "version_num", "norm", "cross",
+                "parent_module", "echo", "import", "import_dxf",
+                "dxf_linear_extrude", "linear_extrude", "rotate_extrude",
+                "surface", "projection", "render", "dxf_cross",
+                "dxf_dim", "let", "assign", "len"
+                ), prefix=r"\b", suffix=r"\b"),
+                Name.Builtin
+            ),
+            (r"\bchildren\b", Name.Builtin.Pseudo),
+            (r'""".*?"""', String.Double),
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+            (r"-?\d+(\.\d+)?(e[+-]?\d+)?", Number),
+            (r"\w+", Name),
+        ],
+        "includes": [
+            (
+                r"(<)([^>]*)(>)",
+                bygroups(Punctuation, Comment.PreprocFile, Punctuation),
+            ),
+        ],
+        'comment': [
+            (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
+             Comment.Special)
+        ],
+        'comment-single': [
+            (r'\n', Text, '#pop'),
+            include('comment'),
+            (r'[^\n]+', Comment.Single)
+        ],
+        'comment-multi': [
+            include('comment'),
+            (r'[^*/]+', Comment.Multiline),
+            (r'/\*', Comment.Multiline, '#push'),
+            (r'\*/', Comment.Multiline, '#pop'),
+            (r'[*/]', Comment.Multiline)
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/other.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/other.py
new file mode 100644
index 00000000..2b7dfb4a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/other.py
@@ -0,0 +1,41 @@
+"""
+    pygments.lexers.other
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Just export lexer classes previously contained in this module.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+# ruff: noqa: F401
+from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
+from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
+    TcshLexer
+from pygments.lexers.robotframework import RobotFrameworkLexer
+from pygments.lexers.testing import GherkinLexer
+from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
+from pygments.lexers.prolog import LogtalkLexer
+from pygments.lexers.snobol import SnobolLexer
+from pygments.lexers.rebol import RebolLexer
+from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
+from pygments.lexers.modeling import ModelicaLexer
+from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
+    HybrisLexer
+from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
+    AsymptoteLexer, PovrayLexer
+from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
+    GoodDataCLLexer, MaqlLexer
+from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
+from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
+    MscgenLexer, VGLLexer
+from pygments.lexers.basic import CbmBasicV2Lexer
+from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
+from pygments.lexers.ecl import ECLLexer
+from pygments.lexers.urbi import UrbiscriptLexer
+from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
+from pygments.lexers.installers import NSISLexer, RPMSpecLexer
+from pygments.lexers.textedit import AwkLexer
+from pygments.lexers.smv import NuSMVLexer
+
+__all__ = []
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/parasail.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/parasail.py
new file mode 100644
index 00000000..150d6a9c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/parasail.py
@@ -0,0 +1,78 @@
+"""
+    pygments.lexers.parasail
+    ~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for ParaSail.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Literal
+
+__all__ = ['ParaSailLexer']
+
+
+class ParaSailLexer(RegexLexer):
+    """
+    For ParaSail source code.
+    """
+
+    name = 'ParaSail'
+    url = 'http://www.parasail-lang.org'
+    aliases = ['parasail']
+    filenames = ['*.psi', '*.psl']
+    mimetypes = ['text/x-parasail']
+    version_added = '2.1'
+
+    flags = re.MULTILINE
+
+    tokens = {
+        'root': [
+            (r'[^\S\n]+', Text),
+            (r'//.*?\n', Comment.Single),
+            (r'\b(and|or|xor)=', Operator.Word),
+            (r'\b(and(\s+then)?|or(\s+else)?|xor|rem|mod|'
+             r'(is|not)\s+null)\b',
+             Operator.Word),
+            # Keywords
+            (r'\b(abs|abstract|all|block|class|concurrent|const|continue|'
+             r'each|end|exit|extends|exports|forward|func|global|implements|'
+             r'import|in|interface|is|lambda|locked|new|not|null|of|op|'
+             r'optional|private|queued|ref|return|reverse|separate|some|'
+             r'type|until|var|with|'
+             # Control flow
+             r'if|then|else|elsif|case|for|while|loop)\b',
+             Keyword.Reserved),
+            (r'(abstract\s+)?(interface|class|op|func|type)',
+             Keyword.Declaration),
+            # Literals
+            (r'"[^"]*"', String),
+            (r'\\[\'ntrf"0]', String.Escape),
+            (r'#[a-zA-Z]\w*', Literal),       # Enumeration
+            include('numbers'),
+            (r"'[^']'", String.Char),
+            (r'[a-zA-Z]\w*', Name),
+            # Operators and Punctuation
+            (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|'
+             r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\|=|\||/=|\+|-|\*|/|'
+             r'\.\.|<\.\.|\.\.<|<\.\.<)',
+             Operator),
+            (r'(<|>|\[|\]|\(|\)|\||:|;|,|.|\{|\}|->)',
+             Punctuation),
+            (r'\n+', Text),
+        ],
+        'numbers': [
+            (r'\d[0-9_]*#[0-9a-fA-F][0-9a-fA-F_]*#', Number.Hex),  # any base
+            (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),        # C-like hex
+            (r'0[bB][01][01_]*', Number.Bin),                      # C-like bin
+            (r'\d[0-9_]*\.\d[0-9_]*[eE][+-]\d[0-9_]*',             # float exp
+             Number.Float),
+            (r'\d[0-9_]*\.\d[0-9_]*', Number.Float),               # float
+            (r'\d[0-9_]*', Number.Integer),                        # integer
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/parsers.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/parsers.py
new file mode 100644
index 00000000..7a4ed9d1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/parsers.py
@@ -0,0 +1,798 @@
+"""
+    pygments.lexers.parsers
+    ~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for parser generators.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, DelegatingLexer, \
+    include, bygroups, using
+from pygments.token import Punctuation, Other, Text, Comment, Operator, \
+    Keyword, Name, String, Number, Whitespace
+from pygments.lexers.jvm import JavaLexer
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.objective import ObjectiveCLexer
+from pygments.lexers.d import DLexer
+from pygments.lexers.dotnet import CSharpLexer
+from pygments.lexers.ruby import RubyLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
+
+__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
+           'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
+           'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
+           'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
+           'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
+           'AntlrJavaLexer', 'AntlrActionScriptLexer',
+           'TreetopLexer', 'EbnfLexer']
+
+
+class RagelLexer(RegexLexer):
+    """A pure `Ragel `_ lexer.  Use this
+    for fragments of Ragel.  For ``.rl`` files, use
+    :class:`RagelEmbeddedLexer` instead (or one of the
+    language-specific subclasses).
+
+    """
+
+    name = 'Ragel'
+    url = 'http://www.colm.net/open-source/ragel/'
+    aliases = ['ragel']
+    filenames = []
+    version_added = '1.1'
+
+    tokens = {
+        'whitespace': [
+            (r'\s+', Whitespace)
+        ],
+        'comments': [
+            (r'\#.*$', Comment),
+        ],
+        'keywords': [
+            (r'(access|action|alphtype)\b', Keyword),
+            (r'(getkey|write|machine|include)\b', Keyword),
+            (r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
+            (r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
+        ],
+        'numbers': [
+            (r'0x[0-9A-Fa-f]+', Number.Hex),
+            (r'[+-]?[0-9]+', Number.Integer),
+        ],
+        'literals': [
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+            (r'\[(\\\\|\\[^\\]|[^\\\]])*\]', String),          # square bracket literals
+            (r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', String.Regex),  # regular expressions
+        ],
+        'identifiers': [
+            (r'[a-zA-Z_]\w*', Name.Variable),
+        ],
+        'operators': [
+            (r',', Operator),                           # Join
+            (r'\||&|--?', Operator),                    # Union, Intersection and Subtraction
+            (r'\.|<:|:>>?', Operator),                  # Concatention
+            (r':', Operator),                           # Label
+            (r'->', Operator),                          # Epsilon Transition
+            (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator),    # EOF Actions
+            (r'(>|\$|%|<|@|<>)(!|err\b)', Operator),    # Global Error Actions
+            (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator),  # Local Error Actions
+            (r'(>|\$|%|<|@|<>)(~|to\b)', Operator),     # To-State Actions
+            (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator),  # From-State Actions
+            (r'>|@|\$|%', Operator),                    # Transition Actions and Priorities
+            (r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator),  # Repetition
+            (r'!|\^', Operator),                        # Negation
+            (r'\(|\)', Operator),                       # Grouping
+        ],
+        'root': [
+            include('literals'),
+            include('whitespace'),
+            include('comments'),
+            include('keywords'),
+            include('numbers'),
+            include('identifiers'),
+            include('operators'),
+            (r'\{', Punctuation, 'host'),
+            (r'=', Operator),
+            (r';', Punctuation),
+        ],
+        'host': [
+            (r'(' + r'|'.join((  # keep host code in largest possible chunks
+                r'[^{}\'"/#]+',  # exclude unsafe characters
+                r'[^\\]\\[{}]',  # allow escaped { or }
+
+                # strings and comments may safely contain unsafe characters
+                r'"(\\\\|\\[^\\]|[^"\\])*"',
+                r"'(\\\\|\\[^\\]|[^'\\])*'",
+                r'//.*$\n?',            # single line comment
+                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment
+                r'\#.*$\n?',            # ruby comment
+
+                # regular expression: There's no reason for it to start
+                # with a * and this stops confusion with comments.
+                r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
+
+                # / is safe now that we've handled regex and javadoc comments
+                r'/',
+            )) + r')+', Other),
+
+            (r'\{', Punctuation, '#push'),
+            (r'\}', Punctuation, '#pop'),
+        ],
+    }
+
+
+class RagelEmbeddedLexer(RegexLexer):
+    """
+    A lexer for Ragel embedded in a host language file.
+
+    This will only highlight Ragel statements. If you want host language
+    highlighting then call the language-specific Ragel lexer.
+    """
+
+    name = 'Embedded Ragel'
+    aliases = ['ragel-em']
+    filenames = ['*.rl']
+    url = 'http://www.colm.net/open-source/ragel/'
+    version_added = '1.1'
+
+    tokens = {
+        'root': [
+            (r'(' + r'|'.join((   # keep host code in largest possible chunks
+                r'[^%\'"/#]+',    # exclude unsafe characters
+                r'%(?=[^%]|$)',   # a single % sign is okay, just not 2 of them
+
+                # strings and comments may safely contain unsafe characters
+                r'"(\\\\|\\[^\\]|[^"\\])*"',
+                r"'(\\\\|\\[^\\]|[^'\\])*'",
+                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment
+                r'//.*$\n?',  # single line comment
+                r'\#.*$\n?',  # ruby/ragel comment
+                r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',  # regular expression
+
+                # / is safe now that we've handled regex and javadoc comments
+                r'/',
+            )) + r')+', Other),
+
+            # Single Line FSM.
+            # Please don't put a quoted newline in a single line FSM.
+            # That's just mean. It will break this.
+            (r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
+                                                     using(RagelLexer),
+                                                     Punctuation, Text)),
+
+            # Multi Line FSM.
+            (r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
+        ],
+        'multi-line-fsm': [
+            (r'(' + r'|'.join((  # keep ragel code in largest possible chunks.
+                r'(' + r'|'.join((
+                    r'[^}\'"\[/#]',   # exclude unsafe characters
+                    r'\}(?=[^%]|$)',   # } is okay as long as it's not followed by %
+                    r'\}%(?=[^%]|$)',  # ...well, one %'s okay, just not two...
+                    r'[^\\]\\[{}]',   # ...and } is okay if it's escaped
+
+                    # allow / if it's preceded with one of these symbols
+                    # (ragel EOF actions)
+                    r'(>|\$|%|<|@|<>)/',
+
+                    # specifically allow regex followed immediately by *
+                    # so it doesn't get mistaken for a comment
+                    r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/\*',
+
+                    # allow / as long as it's not followed by another / or by a *
+                    r'/(?=[^/*]|$)',
+
+                    # We want to match as many of these as we can in one block.
+                    # Not sure if we need the + sign here,
+                    # does it help performance?
+                )) + r')+',
+
+                # strings and comments may safely contain unsafe characters
+                r'"(\\\\|\\[^\\]|[^"\\])*"',
+                r"'(\\\\|\\[^\\]|[^'\\])*'",
+                r"\[(\\\\|\\[^\\]|[^\]\\])*\]",  # square bracket literal
+                r'/\*(.|\n)*?\*/',          # multi-line javadoc-style comment
+                r'//.*$\n?',                # single line comment
+                r'\#.*$\n?',                # ruby/ragel comment
+            )) + r')+', using(RagelLexer)),
+
+            (r'\}%%', Punctuation, '#pop'),
+        ]
+    }
+
+    def analyse_text(text):
+        return '@LANG: indep' in text
+
+
+class RagelRubyLexer(DelegatingLexer):
+    """
+    A lexer for Ragel in a Ruby host file.
+    """
+
+    name = 'Ragel in Ruby Host'
+    aliases = ['ragel-ruby', 'ragel-rb']
+    filenames = ['*.rl']
+    url = 'http://www.colm.net/open-source/ragel/'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
+
+    def analyse_text(text):
+        return '@LANG: ruby' in text
+
+
+class RagelCLexer(DelegatingLexer):
+    """
+    A lexer for Ragel in a C host file.
+    """
+
+    name = 'Ragel in C Host'
+    aliases = ['ragel-c']
+    filenames = ['*.rl']
+    url = 'http://www.colm.net/open-source/ragel/'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(CLexer, RagelEmbeddedLexer, **options)
+
+    def analyse_text(text):
+        return '@LANG: c' in text
+
+
+class RagelDLexer(DelegatingLexer):
+    """
+    A lexer for Ragel in a D host file.
+    """
+
+    name = 'Ragel in D Host'
+    aliases = ['ragel-d']
+    filenames = ['*.rl']
+    url = 'http://www.colm.net/open-source/ragel/'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(DLexer, RagelEmbeddedLexer, **options)
+
+    def analyse_text(text):
+        return '@LANG: d' in text
+
+
+class RagelCppLexer(DelegatingLexer):
+    """
+    A lexer for Ragel in a C++ host file.
+    """
+
+    name = 'Ragel in CPP Host'
+    aliases = ['ragel-cpp']
+    filenames = ['*.rl']
+    url = 'http://www.colm.net/open-source/ragel/'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(CppLexer, RagelEmbeddedLexer, **options)
+
+    def analyse_text(text):
+        return '@LANG: c++' in text
+
+
+class RagelObjectiveCLexer(DelegatingLexer):
+    """
+    A lexer for Ragel in an Objective C host file.
+    """
+
+    name = 'Ragel in Objective C Host'
+    aliases = ['ragel-objc']
+    filenames = ['*.rl']
+    url = 'http://www.colm.net/open-source/ragel/'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
+
+    def analyse_text(text):
+        return '@LANG: objc' in text
+
+
+class RagelJavaLexer(DelegatingLexer):
+    """
+    A lexer for Ragel in a Java host file.
+    """
+
+    name = 'Ragel in Java Host'
+    aliases = ['ragel-java']
+    filenames = ['*.rl']
+    url = 'http://www.colm.net/open-source/ragel/'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
+
+    def analyse_text(text):
+        return '@LANG: java' in text
+
+
+class AntlrLexer(RegexLexer):
+    """
+    Generic ANTLR Lexer.
+    Should not be called directly, instead
+    use DelegatingLexer for your target language.
+    """
+
+    name = 'ANTLR'
+    aliases = ['antlr']
+    filenames = []
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    _id = r'[A-Za-z]\w*'
+    _TOKEN_REF = r'[A-Z]\w*'
+    _RULE_REF = r'[a-z]\w*'
+    _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
+    _INT = r'[0-9]+'
+
+    tokens = {
+        'whitespace': [
+            (r'\s+', Whitespace),
+        ],
+        'comments': [
+            (r'//.*$', Comment),
+            (r'/\*(.|\n)*?\*/', Comment),
+        ],
+        'root': [
+            include('whitespace'),
+            include('comments'),
+
+            (r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
+             bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
+                      Punctuation)),
+            # optionsSpec
+            (r'options\b', Keyword, 'options'),
+            # tokensSpec
+            (r'tokens\b', Keyword, 'tokens'),
+            # attrScope
+            (r'(scope)(\s*)(' + _id + r')(\s*)(\{)',
+             bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
+                      Punctuation), 'action'),
+            # exception
+            (r'(catch|finally)\b', Keyword, 'exception'),
+            # action
+            (r'(@' + _id + r')(\s*)(::)?(\s*)(' + _id + r')(\s*)(\{)',
+             bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
+                      Name.Label, Whitespace, Punctuation), 'action'),
+            # rule
+            (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
+             bygroups(Keyword, Whitespace, Name.Label, Punctuation),
+             ('rule-alts', 'rule-prelims')),
+        ],
+        'exception': [
+            (r'\n', Whitespace, '#pop'),
+            (r'\s', Whitespace),
+            include('comments'),
+
+            (r'\[', Punctuation, 'nested-arg-action'),
+            (r'\{', Punctuation, 'action'),
+        ],
+        'rule-prelims': [
+            include('whitespace'),
+            include('comments'),
+
+            (r'returns\b', Keyword),
+            (r'\[', Punctuation, 'nested-arg-action'),
+            (r'\{', Punctuation, 'action'),
+            # throwsSpec
+            (r'(throws)(\s+)(' + _id + ')',
+             bygroups(Keyword, Whitespace, Name.Label)),
+            (r'(,)(\s*)(' + _id + ')',
+             bygroups(Punctuation, Whitespace, Name.Label)),  # Additional throws
+            # optionsSpec
+            (r'options\b', Keyword, 'options'),
+            # ruleScopeSpec - scope followed by target language code or name of action
+            # TODO finish implementing other possibilities for scope
+            # L173 ANTLRv3.g from ANTLR book
+            (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
+             'action'),
+            (r'(scope)(\s+)(' + _id + r')(\s*)(;)',
+             bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
+            # ruleAction
+            (r'(@' + _id + r')(\s*)(\{)',
+             bygroups(Name.Label, Whitespace, Punctuation), 'action'),
+            # finished prelims, go to rule alts!
+            (r':', Punctuation, '#pop')
+        ],
+        'rule-alts': [
+            include('whitespace'),
+            include('comments'),
+
+            # These might need to go in a separate 'block' state triggered by (
+            (r'options\b', Keyword, 'options'),
+            (r':', Punctuation),
+
+            # literals
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+            (r'<<([^>]|>[^>])>>', String),
+            # identifiers
+            # Tokens start with capital letter.
+            (r'\$?[A-Z_]\w*', Name.Constant),
+            # Rules start with small letter.
+            (r'\$?[a-z_]\w*', Name.Variable),
+            # operators
+            (r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
+            (r',', Punctuation),
+            (r'\[', Punctuation, 'nested-arg-action'),
+            (r'\{', Punctuation, 'action'),
+            (r';', Punctuation, '#pop')
+        ],
+        'tokens': [
+            include('whitespace'),
+            include('comments'),
+            (r'\{', Punctuation),
+            (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
+             + r')?(\s*)(;)',
+             bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
+                      String, Whitespace, Punctuation)),
+            (r'\}', Punctuation, '#pop'),
+        ],
+        'options': [
+            include('whitespace'),
+            include('comments'),
+            (r'\{', Punctuation),
+            (r'(' + _id + r')(\s*)(=)(\s*)(' +
+             '|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)',
+             bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
+                      Text, Whitespace, Punctuation)),
+            (r'\}', Punctuation, '#pop'),
+        ],
+        'action': [
+            (r'(' + r'|'.join((    # keep host code in largest possible chunks
+                r'[^${}\'"/\\]+',  # exclude unsafe characters
+
+                # strings and comments may safely contain unsafe characters
+                r'"(\\\\|\\[^\\]|[^"\\])*"',
+                r"'(\\\\|\\[^\\]|[^'\\])*'",
+                r'//.*$\n?',            # single line comment
+                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment
+
+                # regular expression: There's no reason for it to start
+                # with a * and this stops confusion with comments.
+                r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
+
+                # backslashes are okay, as long as we are not backslashing a %
+                r'\\(?!%)',
+
+                # Now that we've handled regex and javadoc comments
+                # it's safe to let / through.
+                r'/',
+            )) + r')+', Other),
+            (r'(\\)(%)', bygroups(Punctuation, Other)),
+            (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
+             bygroups(Name.Variable, Punctuation, Name.Property)),
+            (r'\{', Punctuation, '#push'),
+            (r'\}', Punctuation, '#pop'),
+        ],
+        'nested-arg-action': [
+            (r'(' + r'|'.join((    # keep host code in largest possible chunks.
+                r'[^$\[\]\'"/]+',  # exclude unsafe characters
+
+                # strings and comments may safely contain unsafe characters
+                r'"(\\\\|\\[^\\]|[^"\\])*"',
+                r"'(\\\\|\\[^\\]|[^'\\])*'",
+                r'//.*$\n?',            # single line comment
+                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment
+
+                # regular expression: There's no reason for it to start
+                # with a * and this stops confusion with comments.
+                r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
+
+                # Now that we've handled regex and javadoc comments
+                # it's safe to let / through.
+                r'/',
+            )) + r')+', Other),
+
+
+            (r'\[', Punctuation, '#push'),
+            (r'\]', Punctuation, '#pop'),
+            (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
+             bygroups(Name.Variable, Punctuation, Name.Property)),
+            (r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
+        ]
+    }
+
+    def analyse_text(text):
+        return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
+
+
+# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
+
+class AntlrCppLexer(DelegatingLexer):
+    """
+    ANTLR with C++ Target
+    """
+
+    name = 'ANTLR With CPP Target'
+    aliases = ['antlr-cpp']
+    filenames = ['*.G', '*.g']
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(CppLexer, AntlrLexer, **options)
+
+    def analyse_text(text):
+        return AntlrLexer.analyse_text(text) and \
+            re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
+
+
+class AntlrObjectiveCLexer(DelegatingLexer):
+    """
+    ANTLR with Objective-C Target
+    """
+
+    name = 'ANTLR With ObjectiveC Target'
+    aliases = ['antlr-objc']
+    filenames = ['*.G', '*.g']
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(ObjectiveCLexer, AntlrLexer, **options)
+
+    def analyse_text(text):
+        return AntlrLexer.analyse_text(text) and \
+            re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
+
+
+class AntlrCSharpLexer(DelegatingLexer):
+    """
+    ANTLR with C# Target
+    """
+
+    name = 'ANTLR With C# Target'
+    aliases = ['antlr-csharp', 'antlr-c#']
+    filenames = ['*.G', '*.g']
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(CSharpLexer, AntlrLexer, **options)
+
+    def analyse_text(text):
+        return AntlrLexer.analyse_text(text) and \
+            re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
+
+
+class AntlrPythonLexer(DelegatingLexer):
+    """
+    ANTLR with Python Target
+    """
+
+    name = 'ANTLR With Python Target'
+    aliases = ['antlr-python']
+    filenames = ['*.G', '*.g']
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(PythonLexer, AntlrLexer, **options)
+
+    def analyse_text(text):
+        return AntlrLexer.analyse_text(text) and \
+            re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
+
+
+class AntlrJavaLexer(DelegatingLexer):
+    """
+    ANTLR with Java Target
+    """
+
+    name = 'ANTLR With Java Target'
+    aliases = ['antlr-java']
+    filenames = ['*.G', '*.g']
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(JavaLexer, AntlrLexer, **options)
+
+    def analyse_text(text):
+        # Antlr language is Java by default
+        return AntlrLexer.analyse_text(text) and 0.9
+
+
+class AntlrRubyLexer(DelegatingLexer):
+    """
+    ANTLR with Ruby Target
+    """
+
+    name = 'ANTLR With Ruby Target'
+    aliases = ['antlr-ruby', 'antlr-rb']
+    filenames = ['*.G', '*.g']
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(RubyLexer, AntlrLexer, **options)
+
+    def analyse_text(text):
+        return AntlrLexer.analyse_text(text) and \
+            re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
+
+
+class AntlrPerlLexer(DelegatingLexer):
+    """
+    ANTLR with Perl Target
+    """
+
+    name = 'ANTLR With Perl Target'
+    aliases = ['antlr-perl']
+    filenames = ['*.G', '*.g']
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        super().__init__(PerlLexer, AntlrLexer, **options)
+
+    def analyse_text(text):
+        return AntlrLexer.analyse_text(text) and \
+            re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
+
+
+class AntlrActionScriptLexer(DelegatingLexer):
+    """
+    ANTLR with ActionScript Target
+    """
+
+    name = 'ANTLR With ActionScript Target'
+    aliases = ['antlr-actionscript', 'antlr-as']
+    filenames = ['*.G', '*.g']
+    url = 'https://www.antlr.org'
+    version_added = '1.1'
+
+    def __init__(self, **options):
+        from pygments.lexers.actionscript import ActionScriptLexer
+        super().__init__(ActionScriptLexer, AntlrLexer, **options)
+
+    def analyse_text(text):
+        return AntlrLexer.analyse_text(text) and \
+            re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
+
+
+class TreetopBaseLexer(RegexLexer):
+    """
+    A base lexer for `Treetop `_ grammars.
+    Not for direct use; use :class:`TreetopLexer` instead.
+
+    .. versionadded:: 1.6
+    """
+
+    tokens = {
+        'root': [
+            include('space'),
+            (r'require[ \t]+[^\n\r]+[\n\r]', Other),
+            (r'module\b', Keyword.Namespace, 'module'),
+            (r'grammar\b', Keyword, 'grammar'),
+        ],
+        'module': [
+            include('space'),
+            include('end'),
+            (r'module\b', Keyword, '#push'),
+            (r'grammar\b', Keyword, 'grammar'),
+            (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
+        ],
+        'grammar': [
+            include('space'),
+            include('end'),
+            (r'rule\b', Keyword, 'rule'),
+            (r'include\b', Keyword, 'include'),
+            (r'[A-Z]\w*', Name),
+        ],
+        'include': [
+            include('space'),
+            (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
+        ],
+        'rule': [
+            include('space'),
+            include('end'),
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+            (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
+            (r'[A-Za-z_]\w*', Name),
+            (r'[()]', Punctuation),
+            (r'[?+*/&!~]', Operator),
+            (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
+            (r'([0-9]*)(\.\.)([0-9]*)',
+             bygroups(Number.Integer, Operator, Number.Integer)),
+            (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
+            (r'\{', Punctuation, 'inline_module'),
+            (r'\.', String.Regex),
+        ],
+        'inline_module': [
+            (r'\{', Other, 'ruby'),
+            (r'\}', Punctuation, '#pop'),
+            (r'[^{}]+', Other),
+        ],
+        'ruby': [
+            (r'\{', Other, '#push'),
+            (r'\}', Other, '#pop'),
+            (r'[^{}]+', Other),
+        ],
+        'space': [
+            (r'[ \t\n\r]+', Whitespace),
+            (r'#[^\n]*', Comment.Single),
+        ],
+        'end': [
+            (r'end\b', Keyword, '#pop'),
+        ],
+    }
+
+
+class TreetopLexer(DelegatingLexer):
+    """
+    A lexer for Treetop grammars.
+    """
+
+    name = 'Treetop'
+    aliases = ['treetop']
+    filenames = ['*.treetop', '*.tt']
+    url = 'https://cjheath.github.io/treetop'
+    version_added = '1.6'
+
+    def __init__(self, **options):
+        super().__init__(RubyLexer, TreetopBaseLexer, **options)
+
+
+class EbnfLexer(RegexLexer):
+    """
+    Lexer for `ISO/IEC 14977 EBNF
+    `_
+    grammars.
+    """
+
+    name = 'EBNF'
+    aliases = ['ebnf']
+    filenames = ['*.ebnf']
+    mimetypes = ['text/x-ebnf']
+    url = 'https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form'
+    version_added = '2.0'
+
+    tokens = {
+        'root': [
+            include('whitespace'),
+            include('comment_start'),
+            include('identifier'),
+            (r'=', Operator, 'production'),
+        ],
+        'production': [
+            include('whitespace'),
+            include('comment_start'),
+            include('identifier'),
+            (r'"[^"]*"', String.Double),
+            (r"'[^']*'", String.Single),
+            (r'(\?[^?]*\?)', Name.Entity),
+            (r'[\[\]{}(),|]', Punctuation),
+            (r'-', Operator),
+            (r';', Punctuation, '#pop'),
+            (r'\.', Punctuation, '#pop'),
+        ],
+        'whitespace': [
+            (r'\s+', Text),
+        ],
+        'comment_start': [
+            (r'\(\*', Comment.Multiline, 'comment'),
+        ],
+        'comment': [
+            (r'[^*)]', Comment.Multiline),
+            include('comment_start'),
+            (r'\*\)', Comment.Multiline, '#pop'),
+            (r'[*)]', Comment.Multiline),
+        ],
+        'identifier': [
+            (r'([a-zA-Z][\w \-]*)', Keyword),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pascal.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pascal.py
new file mode 100644
index 00000000..5f40dcc8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pascal.py
@@ -0,0 +1,644 @@
+"""
+    pygments.lexers.pascal
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Pascal family languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Error, Whitespace
+from pygments.scanner import Scanner
+
+# compatibility import
+from pygments.lexers.modula2 import Modula2Lexer # noqa: F401
+
+__all__ = ['DelphiLexer', 'PortugolLexer']
+
+
+class PortugolLexer(Lexer):
+    """For Portugol, a Pascal dialect with keywords in Portuguese."""
+    name = 'Portugol'
+    aliases = ['portugol']
+    filenames = ['*.alg', '*.portugol']
+    mimetypes = []
+    url = "https://www.apoioinformatica.inf.br/produtos/visualg/linguagem"
+    version_added = ''
+
+    def __init__(self, **options):
+        Lexer.__init__(self, **options)
+        self.lexer = DelphiLexer(**options, portugol=True)
+
+    def get_tokens_unprocessed(self, text):
+        return self.lexer.get_tokens_unprocessed(text)
+
+
+class DelphiLexer(Lexer):
+    """
+    For Delphi (Borland Object Pascal),
+    Turbo Pascal and Free Pascal source code.
+
+    Additional options accepted:
+
+    `turbopascal`
+        Highlight Turbo Pascal specific keywords (default: ``True``).
+    `delphi`
+        Highlight Borland Delphi specific keywords (default: ``True``).
+    `freepascal`
+        Highlight Free Pascal specific keywords (default: ``True``).
+    `units`
+        A list of units that should be considered builtin, supported are
+        ``System``, ``SysUtils``, ``Classes`` and ``Math``.
+        Default is to consider all of them builtin.
+    """
+    name = 'Delphi'
+    aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
+    filenames = ['*.pas', '*.dpr']
+    mimetypes = ['text/x-pascal']
+    url = 'https://www.embarcadero.com/products/delphi'
+    version_added = ''
+
+    TURBO_PASCAL_KEYWORDS = (
+        'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
+        'const', 'constructor', 'continue', 'destructor', 'div', 'do',
+        'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
+        'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
+        'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
+        'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
+        'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
+        'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
+    )
+
+    DELPHI_KEYWORDS = (
+        'as', 'class', 'except', 'exports', 'finalization', 'finally',
+        'initialization', 'is', 'library', 'on', 'property', 'raise',
+        'threadvar', 'try'
+    )
+
+    FREE_PASCAL_KEYWORDS = (
+        'dispose', 'exit', 'false', 'new', 'true'
+    )
+
+    BLOCK_KEYWORDS = {
+        'begin', 'class', 'const', 'constructor', 'destructor', 'end',
+        'finalization', 'function', 'implementation', 'initialization',
+        'label', 'library', 'operator', 'procedure', 'program', 'property',
+        'record', 'threadvar', 'type', 'unit', 'uses', 'var'
+    }
+
+    FUNCTION_MODIFIERS = {
+        'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
+        'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
+        'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
+        'override', 'assembler'
+    }
+
+    # XXX: those aren't global. but currently we know no way for defining
+    #      them just for the type context.
+    DIRECTIVES = {
+        'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
+        'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
+        'published', 'public'
+    }
+
+    BUILTIN_TYPES = {
+        'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
+        'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
+        'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
+        'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
+        'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
+        'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
+        'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
+        'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
+        'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
+        'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
+        'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
+        'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
+        'widechar', 'widestring', 'word', 'wordbool'
+    }
+
+    BUILTIN_UNITS = {
+        'System': (
+            'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
+            'append', 'arctan', 'assert', 'assigned', 'assignfile',
+            'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
+            'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
+            'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
+            'dispose', 'doubletocomp', 'endthread', 'enummodules',
+            'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
+            'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
+            'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
+            'findresourcehinstance', 'flush', 'frac', 'freemem',
+            'get8087cw', 'getdir', 'getlasterror', 'getmem',
+            'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
+            'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
+            'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
+            'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
+            'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
+            'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
+            'randomize', 'read', 'readln', 'reallocmem',
+            'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
+            'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
+            'set8087cw', 'setlength', 'setlinebreakstyle',
+            'setmemorymanager', 'setstring', 'settextbuf',
+            'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
+            'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
+            'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
+            'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
+            'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
+            'utf8tounicode', 'val', 'vararrayredim', 'varclear',
+            'widecharlentostring', 'widecharlentostrvar',
+            'widechartostring', 'widechartostrvar',
+            'widestringtoucs4string', 'write', 'writeln'
+        ),
+        'SysUtils': (
+            'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
+            'allocmem', 'ansicomparefilename', 'ansicomparestr',
+            'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
+            'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
+            'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
+            'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
+            'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
+            'ansistrscan', 'ansistrupper', 'ansiuppercase',
+            'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
+            'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
+            'callterminateprocs', 'changefileext', 'charlength',
+            'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
+            'comparetext', 'createdir', 'createguid', 'currentyear',
+            'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
+            'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
+            'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
+            'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
+            'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
+            'exceptionerrormessage', 'excludetrailingbackslash',
+            'excludetrailingpathdelimiter', 'expandfilename',
+            'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
+            'extractfiledrive', 'extractfileext', 'extractfilename',
+            'extractfilepath', 'extractrelativepath', 'extractshortpathname',
+            'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
+            'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
+            'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
+            'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
+            'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
+            'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
+            'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
+            'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
+            'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
+            'getenvironmentvariable', 'getfileversion', 'getformatsettings',
+            'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
+            'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
+            'includetrailingbackslash', 'includetrailingpathdelimiter',
+            'incmonth', 'initializepackage', 'interlockeddecrement',
+            'interlockedexchange', 'interlockedexchangeadd',
+            'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
+            'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
+            'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
+            'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
+            'outofmemoryerror', 'quotedstr', 'raiselastoserror',
+            'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
+            'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
+            'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
+            'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
+            'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
+            'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
+            'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
+            'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
+            'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
+            'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
+            'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
+            'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
+            'strtotimedef', 'strupper', 'supports', 'syserrormessage',
+            'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
+            'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
+            'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
+            'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
+            'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
+            'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
+            'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
+            'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
+            'wraptext'
+        ),
+        'Classes': (
+            'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
+            'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
+            'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
+            'groupdescendantswith', 'hextobin', 'identtoint',
+            'initinheritedcomponent', 'inttoident', 'invalidpoint',
+            'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
+            'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
+            'pointsequal', 'readcomponentres', 'readcomponentresex',
+            'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
+            'registerclasses', 'registercomponents', 'registerintegerconsts',
+            'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
+            'teststreamformat', 'unregisterclass', 'unregisterclasses',
+            'unregisterintegerconsts', 'unregistermoduleclasses',
+            'writecomponentresfile'
+        ),
+        'Math': (
+            'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
+            'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
+            'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
+            'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
+            'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
+            'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
+            'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
+            'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
+            'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
+            'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
+            'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
+            'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
+            'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
+            'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
+            'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
+            'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
+            'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
+            'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
+            'tan', 'tanh', 'totalvariance', 'variance'
+        )
+    }
+
+    ASM_REGISTERS = {
+        'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
+        'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
+        'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
+        'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
+        'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
+        'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
+        'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
+        'xmm6', 'xmm7'
+    }
+
+    ASM_INSTRUCTIONS = {
+        'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
+        'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
+        'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
+        'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
+        'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
+        'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
+        'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
+        'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
+        'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
+        'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
+        'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
+        'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
+        'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
+        'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
+        'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
+        'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
+        'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
+        'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
+        'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
+        'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
+        'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
+        'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
+        'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
+        'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
+        'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
+        'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
+        'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
+        'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
+        'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
+        'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
+        'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
+        'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
+        'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
+        'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
+        'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
+        'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
+        'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
+        'xlatb', 'xor'
+    }
+
+    PORTUGOL_KEYWORDS = (
+        'aleatorio',
+        'algoritmo',
+        'arquivo',
+        'ate',
+        'caso',
+        'cronometro',
+        'debug',
+        'e',
+        'eco',
+        'enquanto',
+        'entao',
+        'escolha',
+        'escreva',
+        'escreval',
+        'faca',
+        'falso',
+        'fimalgoritmo',
+        'fimenquanto',
+        'fimescolha',
+        'fimfuncao',
+        'fimpara',
+        'fimprocedimento',
+        'fimrepita',
+        'fimse',
+        'funcao',
+        'inicio',
+        'int',
+        'interrompa',
+        'leia',
+        'limpatela',
+        'mod',
+        'nao',
+        'ou',
+        'outrocaso',
+        'para',
+        'passo',
+        'pausa',
+        'procedimento',
+        'repita',
+        'retorne',
+        'se',
+        'senao',
+        'timer',
+        'var',
+        'vetor',
+        'verdadeiro',
+        'xou',
+        'div',
+        'mod',
+        'abs',
+        'arccos',
+        'arcsen',
+        'arctan',
+        'cos',
+        'cotan',
+        'Exp',
+        'grauprad',
+        'int',
+        'log',
+        'logn',
+        'pi',
+        'quad',
+        'radpgrau',
+        'raizq',
+        'rand',
+        'randi',
+        'sen',
+        'Tan',
+        'asc',
+        'carac',
+        'caracpnum',
+        'compr',
+        'copia',
+        'maiusc',
+        'minusc',
+        'numpcarac',
+        'pos',
+    )
+
+    PORTUGOL_BUILTIN_TYPES = {
+        'inteiro', 'real', 'caractere', 'logico'
+    }
+
+    def __init__(self, **options):
+        Lexer.__init__(self, **options)
+        self.keywords = set()
+        self.builtins = set()
+        if get_bool_opt(options, 'portugol', False):
+            self.keywords.update(self.PORTUGOL_KEYWORDS)
+            self.builtins.update(self.PORTUGOL_BUILTIN_TYPES)
+            self.is_portugol = True
+        else:
+            self.is_portugol = False
+
+            if get_bool_opt(options, 'turbopascal', True):
+                self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
+            if get_bool_opt(options, 'delphi', True):
+                self.keywords.update(self.DELPHI_KEYWORDS)
+            if get_bool_opt(options, 'freepascal', True):
+                self.keywords.update(self.FREE_PASCAL_KEYWORDS)
+            for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
+                self.builtins.update(self.BUILTIN_UNITS[unit])
+
+    def get_tokens_unprocessed(self, text):
+        scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
+        stack = ['initial']
+        in_function_block = False
+        in_property_block = False
+        was_dot = False
+        next_token_is_function = False
+        next_token_is_property = False
+        collect_labels = False
+        block_labels = set()
+        brace_balance = [0, 0]
+
+        while not scanner.eos:
+            token = Error
+
+            if stack[-1] == 'initial':
+                if scanner.scan(r'\s+'):
+                    token = Whitespace
+                elif not self.is_portugol and scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+                    if scanner.match.startswith('$'):
+                        token = Comment.Preproc
+                    else:
+                        token = Comment.Multiline
+                elif scanner.scan(r'//.*?$'):
+                    token = Comment.Single
+                elif self.is_portugol and scanner.scan(r'(<\-)|(>=)|(<=)|%|<|>|-|\+|\*|\=|(<>)|\/|\.|:|,'):
+                    token = Operator
+                elif not self.is_portugol and scanner.scan(r'[-+*\/=<>:;,.@\^]'):
+                    token = Operator
+                    # stop label highlighting on next ";"
+                    if collect_labels and scanner.match == ';':
+                        collect_labels = False
+                elif scanner.scan(r'[\(\)\[\]]+'):
+                    token = Punctuation
+                    # abort function naming ``foo = Function(...)``
+                    next_token_is_function = False
+                    # if we are in a function block we count the open
+                    # braces because ootherwise it's impossible to
+                    # determine the end of the modifier context
+                    if in_function_block or in_property_block:
+                        if scanner.match == '(':
+                            brace_balance[0] += 1
+                        elif scanner.match == ')':
+                            brace_balance[0] -= 1
+                        elif scanner.match == '[':
+                            brace_balance[1] += 1
+                        elif scanner.match == ']':
+                            brace_balance[1] -= 1
+                elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+                    lowercase_name = scanner.match.lower()
+                    if lowercase_name == 'result':
+                        token = Name.Builtin.Pseudo
+                    elif lowercase_name in self.keywords:
+                        token = Keyword
+                        # if we are in a special block and a
+                        # block ending keyword occurs (and the parenthesis
+                        # is balanced) we end the current block context
+                        if self.is_portugol:
+                            if lowercase_name in ('funcao', 'procedimento'):
+                                in_function_block = True
+                                next_token_is_function = True
+                        else:
+                            if (in_function_block or in_property_block) and \
+                                    lowercase_name in self.BLOCK_KEYWORDS and \
+                                    brace_balance[0] <= 0 and \
+                                    brace_balance[1] <= 0:
+                                in_function_block = False
+                                in_property_block = False
+                                brace_balance = [0, 0]
+                                block_labels = set()
+                            if lowercase_name in ('label', 'goto'):
+                                collect_labels = True
+                            elif lowercase_name == 'asm':
+                                stack.append('asm')
+                            elif lowercase_name == 'property':
+                                in_property_block = True
+                                next_token_is_property = True
+                            elif lowercase_name in ('procedure', 'operator',
+                                                    'function', 'constructor',
+                                                    'destructor'):
+                                in_function_block = True
+                                next_token_is_function = True
+                    # we are in a function block and the current name
+                    # is in the set of registered modifiers. highlight
+                    # it as pseudo keyword
+                    elif not self.is_portugol and in_function_block and \
+                            lowercase_name in self.FUNCTION_MODIFIERS:
+                        token = Keyword.Pseudo
+                    # if we are in a property highlight some more
+                    # modifiers
+                    elif not self.is_portugol and in_property_block and \
+                            lowercase_name in ('read', 'write'):
+                        token = Keyword.Pseudo
+                        next_token_is_function = True
+                    # if the last iteration set next_token_is_function
+                    # to true we now want this name highlighted as
+                    # function. so do that and reset the state
+                    elif next_token_is_function:
+                        # Look if the next token is a dot. If yes it's
+                        # not a function, but a class name and the
+                        # part after the dot a function name
+                        if not self.is_portugol and scanner.test(r'\s*\.\s*'):
+                            token = Name.Class
+                        # it's not a dot, our job is done
+                        else:
+                            token = Name.Function
+                            next_token_is_function = False
+
+                            if self.is_portugol:
+                                block_labels.add(scanner.match.lower())
+
+                    # same for properties
+                    elif not self.is_portugol and next_token_is_property:
+                        token = Name.Property
+                        next_token_is_property = False
+                    # Highlight this token as label and add it
+                    # to the list of known labels
+                    elif not self.is_portugol and collect_labels:
+                        token = Name.Label
+                        block_labels.add(scanner.match.lower())
+                    # name is in list of known labels
+                    elif lowercase_name in block_labels:
+                        token = Name.Label
+                    elif self.is_portugol and lowercase_name in self.PORTUGOL_BUILTIN_TYPES:
+                        token = Keyword.Type
+                    elif not self.is_portugol and lowercase_name in self.BUILTIN_TYPES:
+                        token = Keyword.Type
+                    elif not self.is_portugol and lowercase_name in self.DIRECTIVES:
+                        token = Keyword.Pseudo
+                    # builtins are just builtins if the token
+                    # before isn't a dot
+                    elif not self.is_portugol and not was_dot and lowercase_name in self.builtins:
+                        token = Name.Builtin
+                    else:
+                        token = Name
+                elif self.is_portugol and scanner.scan(r"\""):
+                    token = String
+                    stack.append('string')
+                elif not self.is_portugol and scanner.scan(r"'"):
+                    token = String
+                    stack.append('string')
+                elif not self.is_portugol and scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
+                    token = String.Char
+                elif not self.is_portugol and scanner.scan(r'\$[0-9A-Fa-f]+'):
+                    token = Number.Hex
+                elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+                    token = Number.Integer
+                elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+                    token = Number.Float
+                else:
+                    # if the stack depth is deeper than once, pop
+                    if len(stack) > 1:
+                        stack.pop()
+                    scanner.get_char()
+
+            elif stack[-1] == 'string':
+                if self.is_portugol:
+                    if scanner.scan(r"''"):
+                        token = String.Escape
+                    elif scanner.scan(r"\""):
+                        token = String
+                        stack.pop()
+                    elif scanner.scan(r"[^\"]*"):
+                        token = String
+                    else:
+                        scanner.get_char()
+                        stack.pop()
+                else:
+                    if scanner.scan(r"''"):
+                        token = String.Escape
+                    elif scanner.scan(r"'"):
+                        token = String
+                        stack.pop()
+                    elif scanner.scan(r"[^']*"):
+                        token = String
+                    else:
+                        scanner.get_char()
+                        stack.pop()
+            elif not self.is_portugol and stack[-1] == 'asm':
+                if scanner.scan(r'\s+'):
+                    token = Whitespace
+                elif scanner.scan(r'end'):
+                    token = Keyword
+                    stack.pop()
+                elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+                    if scanner.match.startswith('$'):
+                        token = Comment.Preproc
+                    else:
+                        token = Comment.Multiline
+                elif scanner.scan(r'//.*?$'):
+                    token = Comment.Single
+                elif scanner.scan(r"'"):
+                    token = String
+                    stack.append('string')
+                elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
+                    token = Name.Label
+                elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+                    lowercase_name = scanner.match.lower()
+                    if lowercase_name in self.ASM_INSTRUCTIONS:
+                        token = Keyword
+                    elif lowercase_name in self.ASM_REGISTERS:
+                        token = Name.Builtin
+                    else:
+                        token = Name
+                elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
+                    token = Operator
+                elif scanner.scan(r'[\(\)\[\]]+'):
+                    token = Punctuation
+                elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+                    token = Number.Hex
+                elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+                    token = Number.Integer
+                elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+                    token = Number.Float
+                else:
+                    scanner.get_char()
+                    stack.pop()
+
+            # save the dot!!!11
+            if not self.is_portugol and scanner.match.strip():
+                was_dot = scanner.match == '.'
+
+            yield scanner.start_pos, token, scanner.match or ''
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pawn.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pawn.py
new file mode 100644
index 00000000..99d9c963
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pawn.py
@@ -0,0 +1,202 @@
+"""
+    pygments.lexers.pawn
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the Pawn languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+from pygments.util import get_bool_opt
+
+__all__ = ['SourcePawnLexer', 'PawnLexer']
+
+
+class SourcePawnLexer(RegexLexer):
+    """
+    For SourcePawn source code with preprocessor directives.
+    """
+    name = 'SourcePawn'
+    aliases = ['sp']
+    filenames = ['*.sp']
+    mimetypes = ['text/x-sourcepawn']
+    url = 'https://github.com/alliedmodders/sourcepawn'
+    version_added = '1.6'
+
+    #: optional Comment or Whitespace
+    _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+    #: only one /* */ style comment
+    _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+    tokens = {
+        'root': [
+            # preprocessor directives: without whitespace
+            (r'^#if\s+0', Comment.Preproc, 'if0'),
+            ('^#', Comment.Preproc, 'macro'),
+            # or with whitespace
+            ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+            ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+            (r'\n', Text),
+            (r'\s+', Text),
+            (r'\\\n', Text),  # line continuation
+            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+            (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+            (r'[{}]', Punctuation),
+            (r'L?"', String, 'string'),
+            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+            (r'0[0-7]+[LlUu]*', Number.Oct),
+            (r'\d+[LlUu]*', Number.Integer),
+            (r'[~!%^&*+=|?:<>/-]', Operator),
+            (r'[()\[\],.;]', Punctuation),
+            (r'(case|const|continue|native|'
+             r'default|else|enum|for|if|new|operator|'
+             r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
+            (r'(bool|Float)\b', Keyword.Type),
+            (r'(true|false)\b', Keyword.Constant),
+            (r'[a-zA-Z_]\w*', Name),
+        ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+            (r'[^\\"\n]+', String),  # all other characters
+            (r'\\\n', String),       # line continuation
+            (r'\\', String),         # stray backslash
+        ],
+        'macro': [
+            (r'[^/\n]+', Comment.Preproc),
+            (r'/\*(.|\n)*?\*/', Comment.Multiline),
+            (r'//.*?\n', Comment.Single, '#pop'),
+            (r'/', Comment.Preproc),
+            (r'(?<=\\)\n', Comment.Preproc),
+            (r'\n', Comment.Preproc, '#pop'),
+        ],
+        'if0': [
+            (r'^\s*#if.*?(?/-]', Operator),
+            (r'[()\[\],.;]', Punctuation),
+            (r'(switch|case|default|const|new|static|char|continue|break|'
+             r'if|else|for|while|do|operator|enum|'
+             r'public|return|sizeof|tagof|state|goto)\b', Keyword),
+            (r'(bool|Float)\b', Keyword.Type),
+            (r'(true|false)\b', Keyword.Constant),
+            (r'[a-zA-Z_]\w*', Name),
+        ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+            (r'[^\\"\n]+', String),  # all other characters
+            (r'\\\n', String),       # line continuation
+            (r'\\', String),         # stray backslash
+        ],
+        'macro': [
+            (r'[^/\n]+', Comment.Preproc),
+            (r'/\*(.|\n)*?\*/', Comment.Multiline),
+            (r'//.*?\n', Comment.Single, '#pop'),
+            (r'/', Comment.Preproc),
+            (r'(?<=\\)\n', Comment.Preproc),
+            (r'\n', Comment.Preproc, '#pop'),
+        ],
+        'if0': [
+            (r'^\s*#if.*?(?<-]', Operator),
+            (r'[a-zA-Z][a-zA-Z0-9_-]*', Name),
+            (r'\?[a-zA-Z][a-zA-Z0-9_-]*', Name.Variable),
+            (r'[0-9]+\.[0-9]+', Number.Float),
+            (r'[0-9]+', Number.Integer),
+        ],
+        'keywords': [
+            (words((
+                ':requirements', ':types', ':constants',
+                ':predicates', ':functions', ':action', ':agent',
+                ':parameters', ':precondition', ':effect',
+                ':durative-action', ':duration', ':condition',
+                ':derived', ':domain', ':objects', ':init',
+                ':goal', ':metric', ':length', ':serial', ':parallel',
+                # the following are requirements
+                ':strips', ':typing', ':negative-preconditions',
+                ':disjunctive-preconditions', ':equality',
+                ':existential-preconditions', ':universal-preconditions',
+                ':conditional-effects', ':fluents', ':numeric-fluents',
+                ':object-fluents', ':adl', ':durative-actions',
+                ':continuous-effects', ':derived-predicates',
+                ':time-intial-literals', ':preferences',
+                ':constraints', ':action-costs', ':multi-agent',
+                ':unfactored-privacy', ':factored-privacy',
+                ':non-deterministic'
+                ), suffix=r'\b'), Keyword)
+        ],
+        'builtins': [
+            (words((
+                'define', 'domain', 'object', 'either', 'and',
+                'forall', 'preference', 'imply', 'or', 'exists',
+                'not', 'when', 'assign', 'scale-up', 'scale-down',
+                'increase', 'decrease', 'at', 'over', 'start',
+                'end', 'all', 'problem', 'always', 'sometime',
+                'within', 'at-most-once', 'sometime-after',
+                'sometime-before', 'always-within', 'hold-during',
+                'hold-after', 'minimize', 'maximize',
+                'total-time', 'is-violated'), suffix=r'\b'),
+                Name.Builtin)
+        ]
+    }
+
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/perl.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/perl.py
new file mode 100644
index 00000000..33f91f58
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/perl.py
@@ -0,0 +1,733 @@
+"""
+    pygments.lexers.perl
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Perl, Raku and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+    using, this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Whitespace
+from pygments.util import shebang_matches
+
+__all__ = ['PerlLexer', 'Perl6Lexer']
+
+
+class PerlLexer(RegexLexer):
+    """
+    For Perl source code.
+    """
+
+    name = 'Perl'
+    url = 'https://www.perl.org'
+    aliases = ['perl', 'pl']
+    filenames = ['*.pl', '*.pm', '*.t', '*.perl']
+    mimetypes = ['text/x-perl', 'application/x-perl']
+    version_added = ''
+
+    flags = re.DOTALL | re.MULTILINE
+    # TODO: give this to a perl guy who knows how to parse perl...
+    tokens = {
+        'balanced-regex': [
+            (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
+            (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
+            (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+            (r'\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*', String.Regex, '#pop'),
+            (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
+            (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
+            (r'\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*', String.Regex, '#pop'),
+            (r'@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', String.Regex, '#pop'),
+            (r'%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', String.Regex, '#pop'),
+            (r'\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*', String.Regex, '#pop'),
+        ],
+        'root': [
+            (r'\A\#!.+?$', Comment.Hashbang),
+            (r'\#.*?$', Comment.Single),
+            (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
+            (words((
+                'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
+                'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
+                'unless', 'until', 'while', 'print', 'new', 'BEGIN',
+                'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
+             Keyword),
+            (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
+             bygroups(Keyword, Whitespace, Name, Whitespace, Punctuation, Whitespace), 'format'),
+            (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
+            # common delimiters
+            (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
+                String.Regex),
+            (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
+            (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
+            (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
+                String.Regex),
+            (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
+                String.Regex),
+            # balanced delimiters
+            (r's\{(\\\\|\\[^\\]|[^\\}])*\}\s*', String.Regex, 'balanced-regex'),
+            (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
+            (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
+                'balanced-regex'),
+            (r's\((\\\\|\\[^\\]|[^\\)])*\)\s*', String.Regex,
+                'balanced-regex'),
+
+            (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
+            (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+            (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
+                String.Regex),
+            (r'\s+', Whitespace),
+            (words((
+                'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
+                'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
+                'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
+                'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
+                'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
+                'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
+                'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
+                'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
+                'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
+                'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
+                'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
+                'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
+                'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
+                'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'oct', 'open',
+                'opendir', 'ord', 'our', 'pack', 'pipe', 'pop', 'pos', 'printf',
+                'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
+                'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename',
+                'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
+                'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
+                'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
+                'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
+                'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
+                'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
+                'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
+                'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
+                'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
+             Name.Builtin),
+            (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
+            (r'(<<)([\'"]?)([a-zA-Z_]\w*)(\2;?\n.*?\n)(\3)(\n)',
+             bygroups(String, String, String.Delimiter, String, String.Delimiter, Whitespace)),
+            (r'__END__', Comment.Preproc, 'end-part'),
+            (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
+            (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
+            (r'[$@%#]+', Name.Variable, 'varname'),
+            (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+            (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+            (r'0b[01]+(_[01]+)*', Number.Bin),
+            (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+             Number.Float),
+            (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+            (r'\d+(_\d+)*', Number.Integer),
+            (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+            (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
+            (r'<([^\s>]+)>', String.Regex),
+            (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
+            (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
+            (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
+            (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
+            (r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
+            (r'(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
+             bygroups(Keyword, Whitespace, Name.Namespace)),
+            (r'(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
+             bygroups(Keyword, Whitespace, Name.Namespace)),
+            (r'(sub)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
+            (words((
+                'no', 'package', 'require', 'use'), suffix=r'\b'),
+             Keyword),
+            (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
+             r'!~|&&?|\|\||\.{1,3})', Operator),
+            (r'[-+/*%=<>&^|!\\~]=?', Operator),
+            (r'[()\[\]:;,<>/?{}]', Punctuation),  # yes, there's no shortage
+                                                  # of punctuation in Perl!
+            (r'(?=\w)', Name, 'name'),
+        ],
+        'format': [
+            (r'\.\n', String.Interpol, '#pop'),
+            (r'[^\n]*\n', String.Interpol),
+        ],
+        'varname': [
+            (r'\s+', Whitespace),
+            (r'\{', Punctuation, '#pop'),    # hash syntax?
+            (r'\)|,', Punctuation, '#pop'),  # argument specifier
+            (r'\w+::', Name.Namespace),
+            (r'[\w:]+', Name.Variable, '#pop'),
+        ],
+        'name': [
+            (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)', Name.Namespace, '#pop'),
+            (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::', Name.Namespace, '#pop'),
+            (r'[\w:]+', Name, '#pop'),
+            (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
+            (r'(?=\W)', Text, '#pop'),
+        ],
+        'funcname': [
+            (r'[a-zA-Z_]\w*[!?]?', Name.Function),
+            (r'\s+', Whitespace),
+            # argument declaration
+            (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Whitespace)),
+            (r';', Punctuation, '#pop'),
+            (r'.*?\{', Punctuation, '#pop'),
+        ],
+        'cb-string': [
+            (r'\\[{}\\]', String.Other),
+            (r'\\', String.Other),
+            (r'\{', String.Other, 'cb-string'),
+            (r'\}', String.Other, '#pop'),
+            (r'[^{}\\]+', String.Other)
+        ],
+        'rb-string': [
+            (r'\\[()\\]', String.Other),
+            (r'\\', String.Other),
+            (r'\(', String.Other, 'rb-string'),
+            (r'\)', String.Other, '#pop'),
+            (r'[^()]+', String.Other)
+        ],
+        'sb-string': [
+            (r'\\[\[\]\\]', String.Other),
+            (r'\\', String.Other),
+            (r'\[', String.Other, 'sb-string'),
+            (r'\]', String.Other, '#pop'),
+            (r'[^\[\]]+', String.Other)
+        ],
+        'lt-string': [
+            (r'\\[<>\\]', String.Other),
+            (r'\\', String.Other),
+            (r'\<', String.Other, 'lt-string'),
+            (r'\>', String.Other, '#pop'),
+            (r'[^<>]+', String.Other)
+        ],
+        'end-part': [
+            (r'.+', Comment.Preproc, '#pop')
+        ]
+    }
+
+    def analyse_text(text):
+        if shebang_matches(text, r'perl'):
+            return True
+
+        result = 0
+
+        if re.search(r'(?:my|our)\s+[$@%(]', text):
+            result += 0.9
+
+        if ':=' in text:
+            # := is not valid Perl, but it appears in unicon, so we should
+            # become less confident if we think we found Perl with :=
+            result /= 2
+
+        return result
+
+
+class Perl6Lexer(ExtendedRegexLexer):
+    """
+    For Raku (a.k.a. Perl 6) source code.
+    """
+
+    name = 'Perl6'
+    url = 'https://www.raku.org'
+    aliases = ['perl6', 'pl6', 'raku']
+    filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
+                 '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod',
+                 '*.rakutest', '*.rakudoc']
+    mimetypes = ['text/x-perl6', 'application/x-perl6']
+    version_added = '2.0'
+    flags = re.MULTILINE | re.DOTALL
+
+    PERL6_IDENTIFIER_RANGE = r"['\w:-]"
+
+    PERL6_KEYWORDS = (
+        #Phasers
+        'BEGIN','CATCH','CHECK','CLOSE','CONTROL','DOC','END','ENTER','FIRST',
+        'INIT','KEEP','LAST','LEAVE','NEXT','POST','PRE','QUIT','UNDO',
+        #Keywords
+        'anon','augment','but','class','constant','default','does','else',
+        'elsif','enum','for','gather','given','grammar','has','if','import',
+        'is','let','loop','made','make','method','module','multi','my','need',
+        'orwith','our','proceed','proto','repeat','require','return',
+        'return-rw','returns','role','rule','state','sub','submethod','subset',
+        'succeed','supersede','token','try','unit','unless','until','use',
+        'when','while','with','without',
+        #Traits
+        'export','native','repr','required','rw','symbol',
+    )
+
+    PERL6_BUILTINS = (
+        'ACCEPTS','abs','abs2rel','absolute','accept','accessed','acos',
+        'acosec','acosech','acosh','acotan','acotanh','acquire','act','action',
+        'actions','add','add_attribute','add_enum_value','add_fallback',
+        'add_method','add_parent','add_private_method','add_role','add_trustee',
+        'adverb','after','all','allocate','allof','allowed','alternative-names',
+        'annotations','antipair','antipairs','any','anyof','app_lifetime',
+        'append','arch','archname','args','arity','Array','asec','asech','asin',
+        'asinh','ASSIGN-KEY','ASSIGN-POS','assuming','ast','at','atan','atan2',
+        'atanh','AT-KEY','atomic-assign','atomic-dec-fetch','atomic-fetch',
+        'atomic-fetch-add','atomic-fetch-dec','atomic-fetch-inc',
+        'atomic-fetch-sub','atomic-inc-fetch','AT-POS','attributes','auth',
+        'await','backtrace','Bag','BagHash','bail-out','base','basename',
+        'base-repeating','batch','BIND-KEY','BIND-POS','bind-stderr',
+        'bind-stdin','bind-stdout','bind-udp','bits','bless','block','Bool',
+        'bool-only','bounds','break','Bridge','broken','BUILD','build-date',
+        'bytes','cache','callframe','calling-package','CALL-ME','callsame',
+        'callwith','can','cancel','candidates','cando','can-ok','canonpath',
+        'caps','caption','Capture','cas','catdir','categorize','categorize-list',
+        'catfile','catpath','cause','ceiling','cglobal','changed','Channel',
+        'chars','chdir','child','child-name','child-typename','chmod','chomp',
+        'chop','chr','chrs','chunks','cis','classify','classify-list','cleanup',
+        'clone','close','closed','close-stdin','cmp-ok','code','codes','collate',
+        'column','comb','combinations','command','comment','compiler','Complex',
+        'compose','compose_type','composer','condition','config',
+        'configure_destroy','configure_type_checking','conj','connect',
+        'constraints','construct','contains','contents','copy','cos','cosec',
+        'cosech','cosh','cotan','cotanh','count','count-only','cpu-cores',
+        'cpu-usage','CREATE','create_type','cross','cue','curdir','curupdir','d',
+        'Date','DateTime','day','daycount','day-of-month','day-of-week',
+        'day-of-year','days-in-month','declaration','decode','decoder','deepmap',
+        'default','defined','DEFINITE','delayed','DELETE-KEY','DELETE-POS',
+        'denominator','desc','DESTROY','destroyers','devnull','diag',
+        'did-you-mean','die','dies-ok','dir','dirname','dir-sep','DISTROnames',
+        'do','does','does-ok','done','done-testing','duckmap','dynamic','e',
+        'eager','earlier','elems','emit','enclosing','encode','encoder',
+        'encoding','end','ends-with','enum_from_value','enum_value_list',
+        'enum_values','enums','eof','EVAL','eval-dies-ok','EVALFILE',
+        'eval-lives-ok','exception','excludes-max','excludes-min','EXISTS-KEY',
+        'EXISTS-POS','exit','exitcode','exp','expected','explicitly-manage',
+        'expmod','extension','f','fail','fails-like','fc','feature','file',
+        'filename','find_method','find_method_qualified','finish','first','flat',
+        'flatmap','flip','floor','flunk','flush','fmt','format','formatter',
+        'freeze','from','from-list','from-loop','from-posix','full',
+        'full-barrier','get','get_value','getc','gist','got','grab','grabpairs',
+        'grep','handle','handled','handles','hardware','has_accessor','Hash',
+        'head','headers','hh-mm-ss','hidden','hides','hour','how','hyper','id',
+        'illegal','im','in','indent','index','indices','indir','infinite',
+        'infix','infix:<+>','infix:<->','install_method_cache','Instant',
+        'instead','Int','int-bounds','interval','in-timezone','invalid-str',
+        'invert','invocant','IO','IO::Notification.watch-path','is_trusted',
+        'is_type','isa','is-absolute','isa-ok','is-approx','is-deeply',
+        'is-hidden','is-initial-thread','is-int','is-lazy','is-leap-year',
+        'isNaN','isnt','is-prime','is-relative','is-routine','is-setting',
+        'is-win','item','iterator','join','keep','kept','KERNELnames','key',
+        'keyof','keys','kill','kv','kxxv','l','lang','last','lastcall','later',
+        'lazy','lc','leading','level','like','line','lines','link','List',
+        'listen','live','lives-ok','local','lock','log','log10','lookup','lsb',
+        'made','MAIN','make','Map','match','max','maxpairs','merge','message',
+        'method','method_table','methods','migrate','min','minmax','minpairs',
+        'minute','misplaced','Mix','MixHash','mkdir','mode','modified','month',
+        'move','mro','msb','multi','multiness','my','name','named','named_names',
+        'narrow','nativecast','native-descriptor','nativesizeof','new','new_type',
+        'new-from-daycount','new-from-pairs','next','nextcallee','next-handle',
+        'nextsame','nextwith','NFC','NFD','NFKC','NFKD','nl-in','nl-out',
+        'nodemap','nok','none','norm','not','note','now','nude','Num',
+        'numerator','Numeric','of','offset','offset-in-hours','offset-in-minutes',
+        'ok','old','on-close','one','on-switch','open','opened','operation',
+        'optional','ord','ords','orig','os-error','osname','out-buffer','pack',
+        'package','package-kind','package-name','packages','pair','pairs',
+        'pairup','parameter','params','parent','parent-name','parents','parse',
+        'parse-base','parsefile','parse-names','parts','pass','path','path-sep',
+        'payload','peer-host','peer-port','periods','perl','permutations','phaser',
+        'pick','pickpairs','pid','placeholder','plan','plus','polar','poll',
+        'polymod','pop','pos','positional','posix','postfix','postmatch',
+        'precomp-ext','precomp-target','pred','prefix','prematch','prepend',
+        'print','printf','print-nl','print-to','private','private_method_table',
+        'proc','produce','Promise','prompt','protect','pull-one','push',
+        'push-all','push-at-least','push-exactly','push-until-lazy','put',
+        'qualifier-type','quit','r','race','radix','rand','range','Rat','raw',
+        're','read','readchars','readonly','ready','Real','reallocate','reals',
+        'reason','rebless','receive','recv','redispatcher','redo','reduce',
+        'rel2abs','relative','release','rename','repeated','replacement',
+        'report','reserved','resolve','restore','result','resume','rethrow',
+        'reverse','right','rindex','rmdir','role','roles_to_compose','rolish',
+        'roll','rootdir','roots','rotate','rotor','round','roundrobin',
+        'routine-type','run','rwx','s','samecase','samemark','samewith','say',
+        'schedule-on','scheduler','scope','sec','sech','second','seek','self',
+        'send','Set','set_hidden','set_name','set_package','set_rw','set_value',
+        'SetHash','set-instruments','setup_finalization','shape','share','shell',
+        'shift','sibling','sigil','sign','signal','signals','signature','sin',
+        'sinh','sink','sink-all','skip','skip-at-least','skip-at-least-pull-one',
+        'skip-one','skip-rest','sleep','sleep-timer','sleep-until','Slip','slurp',
+        'slurp-rest','slurpy','snap','snapper','so','socket-host','socket-port',
+        'sort','source','source-package','spawn','SPEC','splice','split',
+        'splitdir','splitpath','sprintf','spurt','sqrt','squish','srand','stable',
+        'start','started','starts-with','status','stderr','stdout','Str',
+        'sub_signature','subbuf','subbuf-rw','subname','subparse','subst',
+        'subst-mutate','substr','substr-eq','substr-rw','subtest','succ','sum',
+        'Supply','symlink','t','tail','take','take-rw','tan','tanh','tap',
+        'target','target-name','tc','tclc','tell','then','throttle','throw',
+        'throws-like','timezone','tmpdir','to','today','todo','toggle','to-posix',
+        'total','trailing','trans','tree','trim','trim-leading','trim-trailing',
+        'truncate','truncated-to','trusts','try_acquire','trying','twigil','type',
+        'type_captures','typename','uc','udp','uncaught_handler','unimatch',
+        'uniname','uninames','uniparse','uniprop','uniprops','unique','unival',
+        'univals','unlike','unlink','unlock','unpack','unpolar','unshift',
+        'unwrap','updir','USAGE','use-ok','utc','val','value','values','VAR',
+        'variable','verbose-config','version','VMnames','volume','vow','w','wait',
+        'warn','watch','watch-path','week','weekday-of-month','week-number',
+        'week-year','WHAT','when','WHERE','WHEREFORE','WHICH','WHO',
+        'whole-second','WHY','wordcase','words','workaround','wrap','write',
+        'write-to','x','yada','year','yield','yyyy-mm-dd','z','zip','zip-latest',
+
+    )
+
+    PERL6_BUILTIN_CLASSES = (
+        #Booleans
+        'False','True',
+        #Classes
+        'Any','Array','Associative','AST','atomicint','Attribute','Backtrace',
+        'Backtrace::Frame','Bag','Baggy','BagHash','Blob','Block','Bool','Buf',
+        'Callable','CallFrame','Cancellation','Capture','CArray','Channel','Code',
+        'compiler','Complex','ComplexStr','Cool','CurrentThreadScheduler',
+        'Cursor','Date','Dateish','DateTime','Distro','Duration','Encoding',
+        'Exception','Failure','FatRat','Grammar','Hash','HyperWhatever','Instant',
+        'Int','int16','int32','int64','int8','IntStr','IO','IO::ArgFiles',
+        'IO::CatHandle','IO::Handle','IO::Notification','IO::Path',
+        'IO::Path::Cygwin','IO::Path::QNX','IO::Path::Unix','IO::Path::Win32',
+        'IO::Pipe','IO::Socket','IO::Socket::Async','IO::Socket::INET','IO::Spec',
+        'IO::Spec::Cygwin','IO::Spec::QNX','IO::Spec::Unix','IO::Spec::Win32',
+        'IO::Special','Iterable','Iterator','Junction','Kernel','Label','List',
+        'Lock','Lock::Async','long','longlong','Macro','Map','Match',
+        'Metamodel::AttributeContainer','Metamodel::C3MRO','Metamodel::ClassHOW',
+        'Metamodel::EnumHOW','Metamodel::Finalization','Metamodel::MethodContainer',
+        'Metamodel::MROBasedMethodDispatch','Metamodel::MultipleInheritance',
+        'Metamodel::Naming','Metamodel::Primitives','Metamodel::PrivateMethodContainer',
+        'Metamodel::RoleContainer','Metamodel::Trusting','Method','Mix','MixHash',
+        'Mixy','Mu','NFC','NFD','NFKC','NFKD','Nil','Num','num32','num64',
+        'Numeric','NumStr','ObjAt','Order','Pair','Parameter','Perl','Pod::Block',
+        'Pod::Block::Code','Pod::Block::Comment','Pod::Block::Declarator',
+        'Pod::Block::Named','Pod::Block::Para','Pod::Block::Table','Pod::Heading',
+        'Pod::Item','Pointer','Positional','PositionalBindFailover','Proc',
+        'Proc::Async','Promise','Proxy','PseudoStash','QuantHash','Range','Rat',
+        'Rational','RatStr','Real','Regex','Routine','Scalar','Scheduler',
+        'Semaphore','Seq','Set','SetHash','Setty','Signature','size_t','Slip',
+        'Stash','Str','StrDistance','Stringy','Sub','Submethod','Supplier',
+        'Supplier::Preserving','Supply','Systemic','Tap','Telemetry',
+        'Telemetry::Instrument::Thread','Telemetry::Instrument::Usage',
+        'Telemetry::Period','Telemetry::Sampler','Thread','ThreadPoolScheduler',
+        'UInt','uint16','uint32','uint64','uint8','Uni','utf8','Variable',
+        'Version','VM','Whatever','WhateverCode','WrapHandle'
+    )
+
+    PERL6_OPERATORS = (
+        'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
+        'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
+        'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
+        '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
+        '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
+        'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
+        '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
+        '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
+        '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
+        'not', '<==', '==>', '<<==', '==>>','unicmp',
+    )
+
+    # Perl 6 has a *lot* of possible bracketing characters
+    # this list was lifted from STD.pm6 (https://github.com/perl6/std)
+    PERL6_BRACKETS = {
+        '\u0028': '\u0029', '\u003c': '\u003e', '\u005b': '\u005d',
+        '\u007b': '\u007d', '\u00ab': '\u00bb', '\u0f3a': '\u0f3b',
+        '\u0f3c': '\u0f3d', '\u169b': '\u169c', '\u2018': '\u2019',
+        '\u201a': '\u2019', '\u201b': '\u2019', '\u201c': '\u201d',
+        '\u201e': '\u201d', '\u201f': '\u201d', '\u2039': '\u203a',
+        '\u2045': '\u2046', '\u207d': '\u207e', '\u208d': '\u208e',
+        '\u2208': '\u220b', '\u2209': '\u220c', '\u220a': '\u220d',
+        '\u2215': '\u29f5', '\u223c': '\u223d', '\u2243': '\u22cd',
+        '\u2252': '\u2253', '\u2254': '\u2255', '\u2264': '\u2265',
+        '\u2266': '\u2267', '\u2268': '\u2269', '\u226a': '\u226b',
+        '\u226e': '\u226f', '\u2270': '\u2271', '\u2272': '\u2273',
+        '\u2274': '\u2275', '\u2276': '\u2277', '\u2278': '\u2279',
+        '\u227a': '\u227b', '\u227c': '\u227d', '\u227e': '\u227f',
+        '\u2280': '\u2281', '\u2282': '\u2283', '\u2284': '\u2285',
+        '\u2286': '\u2287', '\u2288': '\u2289', '\u228a': '\u228b',
+        '\u228f': '\u2290', '\u2291': '\u2292', '\u2298': '\u29b8',
+        '\u22a2': '\u22a3', '\u22a6': '\u2ade', '\u22a8': '\u2ae4',
+        '\u22a9': '\u2ae3', '\u22ab': '\u2ae5', '\u22b0': '\u22b1',
+        '\u22b2': '\u22b3', '\u22b4': '\u22b5', '\u22b6': '\u22b7',
+        '\u22c9': '\u22ca', '\u22cb': '\u22cc', '\u22d0': '\u22d1',
+        '\u22d6': '\u22d7', '\u22d8': '\u22d9', '\u22da': '\u22db',
+        '\u22dc': '\u22dd', '\u22de': '\u22df', '\u22e0': '\u22e1',
+        '\u22e2': '\u22e3', '\u22e4': '\u22e5', '\u22e6': '\u22e7',
+        '\u22e8': '\u22e9', '\u22ea': '\u22eb', '\u22ec': '\u22ed',
+        '\u22f0': '\u22f1', '\u22f2': '\u22fa', '\u22f3': '\u22fb',
+        '\u22f4': '\u22fc', '\u22f6': '\u22fd', '\u22f7': '\u22fe',
+        '\u2308': '\u2309', '\u230a': '\u230b', '\u2329': '\u232a',
+        '\u23b4': '\u23b5', '\u2768': '\u2769', '\u276a': '\u276b',
+        '\u276c': '\u276d', '\u276e': '\u276f', '\u2770': '\u2771',
+        '\u2772': '\u2773', '\u2774': '\u2775', '\u27c3': '\u27c4',
+        '\u27c5': '\u27c6', '\u27d5': '\u27d6', '\u27dd': '\u27de',
+        '\u27e2': '\u27e3', '\u27e4': '\u27e5', '\u27e6': '\u27e7',
+        '\u27e8': '\u27e9', '\u27ea': '\u27eb', '\u2983': '\u2984',
+        '\u2985': '\u2986', '\u2987': '\u2988', '\u2989': '\u298a',
+        '\u298b': '\u298c', '\u298d': '\u298e', '\u298f': '\u2990',
+        '\u2991': '\u2992', '\u2993': '\u2994', '\u2995': '\u2996',
+        '\u2997': '\u2998', '\u29c0': '\u29c1', '\u29c4': '\u29c5',
+        '\u29cf': '\u29d0', '\u29d1': '\u29d2', '\u29d4': '\u29d5',
+        '\u29d8': '\u29d9', '\u29da': '\u29db', '\u29f8': '\u29f9',
+        '\u29fc': '\u29fd', '\u2a2b': '\u2a2c', '\u2a2d': '\u2a2e',
+        '\u2a34': '\u2a35', '\u2a3c': '\u2a3d', '\u2a64': '\u2a65',
+        '\u2a79': '\u2a7a', '\u2a7d': '\u2a7e', '\u2a7f': '\u2a80',
+        '\u2a81': '\u2a82', '\u2a83': '\u2a84', '\u2a8b': '\u2a8c',
+        '\u2a91': '\u2a92', '\u2a93': '\u2a94', '\u2a95': '\u2a96',
+        '\u2a97': '\u2a98', '\u2a99': '\u2a9a', '\u2a9b': '\u2a9c',
+        '\u2aa1': '\u2aa2', '\u2aa6': '\u2aa7', '\u2aa8': '\u2aa9',
+        '\u2aaa': '\u2aab', '\u2aac': '\u2aad', '\u2aaf': '\u2ab0',
+        '\u2ab3': '\u2ab4', '\u2abb': '\u2abc', '\u2abd': '\u2abe',
+        '\u2abf': '\u2ac0', '\u2ac1': '\u2ac2', '\u2ac3': '\u2ac4',
+        '\u2ac5': '\u2ac6', '\u2acd': '\u2ace', '\u2acf': '\u2ad0',
+        '\u2ad1': '\u2ad2', '\u2ad3': '\u2ad4', '\u2ad5': '\u2ad6',
+        '\u2aec': '\u2aed', '\u2af7': '\u2af8', '\u2af9': '\u2afa',
+        '\u2e02': '\u2e03', '\u2e04': '\u2e05', '\u2e09': '\u2e0a',
+        '\u2e0c': '\u2e0d', '\u2e1c': '\u2e1d', '\u2e20': '\u2e21',
+        '\u3008': '\u3009', '\u300a': '\u300b', '\u300c': '\u300d',
+        '\u300e': '\u300f', '\u3010': '\u3011', '\u3014': '\u3015',
+        '\u3016': '\u3017', '\u3018': '\u3019', '\u301a': '\u301b',
+        '\u301d': '\u301e', '\ufd3e': '\ufd3f', '\ufe17': '\ufe18',
+        '\ufe35': '\ufe36', '\ufe37': '\ufe38', '\ufe39': '\ufe3a',
+        '\ufe3b': '\ufe3c', '\ufe3d': '\ufe3e', '\ufe3f': '\ufe40',
+        '\ufe41': '\ufe42', '\ufe43': '\ufe44', '\ufe47': '\ufe48',
+        '\ufe59': '\ufe5a', '\ufe5b': '\ufe5c', '\ufe5d': '\ufe5e',
+        '\uff08': '\uff09', '\uff1c': '\uff1e', '\uff3b': '\uff3d',
+        '\uff5b': '\uff5d', '\uff5f': '\uff60', '\uff62': '\uff63',
+    }
+
+    def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
+        if boundary_regex_fragment is None:
+            return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
+                suffix + r')\b'
+        else:
+            return r'(? 0:
+                    next_open_pos = text.find(opening_chars, search_pos + n_chars)
+                    next_close_pos = text.find(closing_chars, search_pos + n_chars)
+
+                    if next_close_pos == -1:
+                        next_close_pos = len(text)
+                        nesting_level = 0
+                    elif next_open_pos != -1 and next_open_pos < next_close_pos:
+                        nesting_level += 1
+                        search_pos = next_open_pos
+                    else:  # next_close_pos < next_open_pos
+                        nesting_level -= 1
+                        search_pos = next_close_pos
+
+                end_pos = next_close_pos
+
+            if end_pos < 0:     # if we didn't find a closer, just highlight the
+                                # rest of the text in this class
+                end_pos = len(text)
+
+            if adverbs is not None and re.search(r':to\b', adverbs):
+                heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
+                end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
+                                        r'\s*$', text[end_pos:], re.MULTILINE)
+
+                if end_heredoc:
+                    end_pos += end_heredoc.end()
+                else:
+                    end_pos = len(text)
+
+            yield match.start(), token_class, text[match.start():end_pos + n_chars]
+            context.pos = end_pos + n_chars
+
+        return callback
+
+    def opening_brace_callback(lexer, match, context):
+        stack = context.stack
+
+        yield match.start(), Text, context.text[match.start():match.end()]
+        context.pos = match.end()
+
+        # if we encounter an opening brace and we're one level
+        # below a token state, it means we need to increment
+        # the nesting level for braces so we know later when
+        # we should return to the token rules.
+        if len(stack) > 2 and stack[-2] == 'token':
+            context.perl6_token_nesting_level += 1
+
+    def closing_brace_callback(lexer, match, context):
+        stack = context.stack
+
+        yield match.start(), Text, context.text[match.start():match.end()]
+        context.pos = match.end()
+
+        # if we encounter a free closing brace and we're one level
+        # below a token state, it means we need to check the nesting
+        # level to see if we need to return to the token state.
+        if len(stack) > 2 and stack[-2] == 'token':
+            context.perl6_token_nesting_level -= 1
+            if context.perl6_token_nesting_level == 0:
+                stack.pop()
+
+    def embedded_perl6_callback(lexer, match, context):
+        context.perl6_token_nesting_level = 1
+        yield match.start(), Text, context.text[match.start():match.end()]
+        context.pos = match.end()
+        context.stack.append('root')
+
+    # If you're modifying these rules, be careful if you need to process '{' or '}'
+    # characters. We have special logic for processing these characters (due to the fact
+    # that you can nest Perl 6 code in regex blocks), so if you need to process one of
+    # them, make sure you also process the corresponding one!
+    tokens = {
+        'common': [
+            (r'#[`|=](?P(?P[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
+             brackets_callback(Comment.Multiline)),
+            (r'#[^\n]*$', Comment.Single),
+            (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
+            (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
+            (r'^=.*?\n\s*?\n', Comment.Multiline),
+            (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
+             bygroups(Keyword, Name), 'token-sym-brackets'),
+            (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + r')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
+             bygroups(Keyword, Name), 'pre-token'),
+            # deal with a special case in the Perl 6 grammar (role q { ... })
+            (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Whitespace, Name, Whitespace)),
+            (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
+            (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
+             Name.Builtin),
+            (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
+            # copied from PerlLexer
+            (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
+             Name.Variable),
+            (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
+            (r'::\?\w+', Name.Variable.Global),
+            (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
+             Name.Variable.Global),
+            (r'\$(?:<.*?>)+', Name.Variable),
+            (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P:[\w\s:]+)?\s*(?P(?P[^0-9a-zA-Z:\s])'
+             r'(?P=first_char)*)', brackets_callback(String)),
+            # copied from PerlLexer
+            (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+            (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+            (r'0b[01]+(_[01]+)*', Number.Bin),
+            (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+             Number.Float),
+            (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+            (r'\d+(_\d+)*', Number.Integer),
+            (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+            (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+            (r'm\w+(?=\()', Name),
+            (r'(?:m|ms|rx)\s*(?P:[\w\s:]+)?\s*(?P(?P[^\w:\s])'
+             r'(?P=first_char)*)', brackets_callback(String.Regex)),
+            (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
+             String.Regex),
+            (r'<[^\s=].*?\S>', String),
+            (_build_word_match(PERL6_OPERATORS), Operator),
+            (r'\w' + PERL6_IDENTIFIER_RANGE + '*', Name),
+            (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+        ],
+        'root': [
+            include('common'),
+            (r'\{', opening_brace_callback),
+            (r'\}', closing_brace_callback),
+            (r'.+?', Text),
+        ],
+        'pre-token': [
+            include('common'),
+            (r'\{', Text, ('#pop', 'token')),
+            (r'.+?', Text),
+        ],
+        'token-sym-brackets': [
+            (r'(?P(?P[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
+             brackets_callback(Name), ('#pop', 'pre-token')),
+            default(('#pop', 'pre-token')),
+        ],
+        'token': [
+            (r'\}', Text, '#pop'),
+            (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
+            # make sure that quotes in character classes aren't treated as strings
+            (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
+            # make sure that '#' characters in quotes aren't treated as comments
+            (r"(?my|our)\s+)?(?:module|class|role|enum|grammar)', line)
+            if class_decl:
+                if saw_perl_decl or class_decl.group('scope') is not None:
+                    return True
+                rating = 0.05
+                continue
+            break
+
+        if ':=' in text:
+            # Same logic as above for PerlLexer
+            rating /= 2
+
+        return rating
+
+    def __init__(self, **options):
+        super().__init__(**options)
+        self.encoding = options.get('encoding', 'utf-8')
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/phix.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/phix.py
new file mode 100644
index 00000000..f0b03775
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/phix.py
@@ -0,0 +1,363 @@
+"""
+    pygments.lexers.phix
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Phix.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Whitespace
+
+__all__ = ['PhixLexer']
+
+
+class PhixLexer(RegexLexer):
+    """
+    Pygments Lexer for Phix files (.exw).
+    See http://phix.x10.mx
+    """
+
+    name = 'Phix'
+    url = 'http://phix.x10.mx'
+    aliases = ['phix']
+    filenames = ['*.exw']
+    mimetypes = ['text/x-phix']
+    version_added = '2.14'
+
+    flags = re.MULTILINE    # nb: **NOT** re.DOTALL! (totally spanners comment handling)
+
+    preproc = (
+        'ifdef', 'elsifdef', 'elsedef'
+    )
+    # Note these lists are auto-generated by pwa/p2js.exw, when pwa\src\p2js_keywords.e (etc)
+    #     change, though of course subsequent copy/commit/pull requests are all manual steps.
+    types = (
+        'string', 'nullable_string', 'atom_string', 'atom', 'bool', 'boolean',
+        'cdCanvan', 'cdCanvas', 'complex', 'CURLcode', 'dictionary', 'int',
+        'integer', 'Ihandle', 'Ihandles', 'Ihandln', 'mpfr', 'mpq', 'mpz',
+        'mpz_or_string', 'number', 'rid_string', 'seq', 'sequence', 'timedate',
+        'object'
+    )
+    keywords = (
+        'abstract', 'class', 'continue', 'export', 'extends', 'nullable',
+        'private', 'public', 'static', 'struct', 'trace',
+        'and', 'break', 'by', 'case', 'catch', 'const', 'constant', 'debug',
+        'default', 'do', 'else', 'elsif', 'end', 'enum', 'exit', 'fallthru',
+        'fallthrough', 'for', 'forward', 'function', 'global', 'if', 'in',
+        'include', 'js', 'javascript', 'javascript_semantics', 'let', 'not',
+        'or', 'procedure', 'profile', 'profile_time', 'return', 'safe_mode',
+        'switch', 'then', 'to', 'try', 'type', 'type_check', 'until', 'warning',
+        'while', 'with', 'without', 'xor'
+    )
+    routines = (
+        'abort', 'abs', 'adjust_timedate', 'and_bits', 'and_bitsu', 'apply',
+        'append', 'arccos', 'arcsin', 'arctan', 'assert', 'atan2',
+        'atom_to_float32', 'atom_to_float64', 'bankers_rounding', 'beep',
+        'begins', 'binary_search', 'bits_to_int', 'bk_color', 'bytes_to_int',
+        'call_func', 'call_proc', 'cdCanvasActivate', 'cdCanvasArc',
+        'cdCanvasBegin', 'cdCanvasBox', 'cdCanvasChord', 'cdCanvasCircle',
+        'cdCanvasClear', 'cdCanvasEnd', 'cdCanvasFlush', 'cdCanvasFont',
+        'cdCanvasGetImageRGB', 'cdCanvasGetSize', 'cdCanvasGetTextAlignment',
+        'cdCanvasGetTextSize', 'cdCanvasLine', 'cdCanvasMark',
+        'cdCanvasMarkSize', 'cdCanvasMultiLineVectorText', 'cdCanvasPixel',
+        'cdCanvasRect', 'cdCanvasRoundedBox', 'cdCanvasRoundedRect',
+        'cdCanvasSector', 'cdCanvasSetAttribute', 'cdCanvasSetBackground',
+        'cdCanvasSetFillMode', 'cdCanvasSetForeground',
+        'cdCanvasSetInteriorStyle', 'cdCanvasSetLineStyle',
+        'cdCanvasSetLineWidth', 'cdCanvasSetTextAlignment', 'cdCanvasText',
+        'cdCanvasSetTextOrientation', 'cdCanvasGetTextOrientation',
+        'cdCanvasVectorText', 'cdCanvasVectorTextDirection',
+        'cdCanvasVectorTextSize', 'cdCanvasVertex', 'cdCreateCanvas',
+        'cdDecodeAlpha', 'cdDecodeColor', 'cdDecodeColorAlpha', 'cdEncodeAlpha',
+        'cdEncodeColor', 'cdEncodeColorAlpha', 'cdKillCanvas', 'cdVersion',
+        'cdVersionDate', 'ceil', 'change_timezone', 'choose', 'clear_screen',
+        'columnize', 'command_line', 'compare', 'complex_abs', 'complex_add',
+        'complex_arg', 'complex_conjugate', 'complex_cos', 'complex_cosh',
+        'complex_div', 'complex_exp', 'complex_imag', 'complex_inv',
+        'complex_log', 'complex_mul', 'complex_neg', 'complex_new',
+        'complex_norm', 'complex_power', 'complex_rho', 'complex_real',
+        'complex_round', 'complex_sin', 'complex_sinh', 'complex_sprint',
+        'complex_sqrt', 'complex_sub', 'complex_theta', 'concat', 'cos',
+        'crash', 'custom_sort', 'date', 'day_of_week', 'day_of_year',
+        'days_in_month', 'decode_base64', 'decode_flags', 'deep_copy', 'deld',
+        'deserialize', 'destroy_dict', 'destroy_queue', 'destroy_stack',
+        'dict_name', 'dict_size', 'elapsed', 'elapsed_short', 'encode_base64',
+        'equal', 'even', 'exp', 'extract', 'factorial', 'factors',
+        'file_size_k', 'find', 'find_all', 'find_any', 'find_replace', 'filter',
+        'flatten', 'float32_to_atom', 'float64_to_atom', 'floor',
+        'format_timedate', 'free_console', 'from_polar', 'gcd', 'get_file_base',
+        'get_file_extension', 'get_file_name', 'get_file_name_and_path',
+        'get_file_path', 'get_file_path_and_name', 'get_maxprime', 'get_prime',
+        'get_primes', 'get_primes_le', 'get_proper_dir', 'get_proper_path',
+        'get_rand', 'get_routine_info', 'get_test_abort', 'get_test_logfile',
+        'get_test_pause', 'get_test_verbosity', 'get_tzid', 'getd', 'getdd',
+        'getd_all_keys', 'getd_by_index', 'getd_index', 'getd_partial_key',
+        'glAttachShader', 'glBindBuffer', 'glBindTexture', 'glBufferData',
+        'glCanvasSpecialText', 'glClear', 'glClearColor', 'glColor',
+        'glCompileShader', 'glCreateBuffer', 'glCreateProgram',
+        'glCreateShader', 'glCreateTexture', 'glDeleteProgram',
+        'glDeleteShader', 'glDrawArrays', 'glEnable',
+        'glEnableVertexAttribArray', 'glFloat32Array', 'glInt32Array',
+        'glFlush', 'glGetAttribLocation', 'glGetError', 'glGetProgramInfoLog',
+        'glGetProgramParameter', 'glGetShaderInfoLog', 'glGetShaderParameter',
+        'glGetUniformLocation', 'glLinkProgram', 'glLoadIdentity',
+        'glMatrixMode', 'glOrtho', 'glRotatef', 'glShadeModel',
+        'glShaderSource', 'glSimpleA7texcoords', 'glTexImage2Dc',
+        'glTexParameteri', 'glTranslate', 'glUniform1f', 'glUniform1i',
+        'glUniformMatrix4fv', 'glUseProgram', 'glVertex',
+        'glVertexAttribPointer', 'glViewport', 'head', 'hsv_to_rgb', 'iff',
+        'iif', 'include_file', 'incl0de_file', 'insert', 'instance',
+        'int_to_bits', 'int_to_bytes', 'is_dict', 'is_integer', 's_leap_year',
+        'is_prime', 'is_prime2', 'islower', 'isupper', 'Icallback',
+        'iup_isdouble', 'iup_isprint', 'iup_XkeyBase', 'IupAppend', 'IupAlarm',
+        'IupBackgroundBox', 'IupButton', 'IupCalendar', 'IupCanvas',
+        'IupClipboard', 'IupClose', 'IupCloseOnEscape', 'IupControlsOpen',
+        'IupDatePick', 'IupDestroy', 'IupDialog', 'IupDrawArc', 'IupDrawBegin',
+        'IupDrawEnd', 'IupDrawGetSize', 'IupDrawGetTextSize', 'IupDrawLine',
+        'IupDrawRectangle', 'IupDrawText', 'IupExpander', 'IupFill',
+        'IupFlatLabel', 'IupFlatList', 'IupFlatTree', 'IupFlush', 'IupFrame',
+        'IupGetAttribute', 'IupGetAttributeId', 'IupGetAttributePtr',
+        'IupGetBrother', 'IupGetChild', 'IupGetChildCount', 'IupGetClassName',
+        'IupGetDialog', 'IupGetDialogChild', 'IupGetDouble', 'IupGetFocus',
+        'IupGetGlobal', 'IupGetGlobalInt', 'IupGetGlobalIntInt', 'IupGetInt',
+        'IupGetInt2', 'IupGetIntId', 'IupGetIntInt', 'IupGetParent',
+        'IupGLCanvas', 'IupGLCanvasOpen', 'IupGLMakeCurrent', 'IupGraph',
+        'IupHbox', 'IupHide', 'IupImage', 'IupImageRGBA', 'IupItem',
+        'iupKeyCodeToName', 'IupLabel', 'IupLink', 'IupList', 'IupMap',
+        'IupMenu', 'IupMenuItem', 'IupMessage', 'IupMessageDlg', 'IupMultiBox',
+        'IupMultiLine', 'IupNextField', 'IupNormaliser', 'IupOpen',
+        'IupPlayInput', 'IupPopup', 'IupPreviousField', 'IupProgressBar',
+        'IupRadio', 'IupRecordInput', 'IupRedraw', 'IupRefresh',
+        'IupRefreshChildren', 'IupSeparator', 'IupSetAttribute',
+        'IupSetAttributes', 'IupSetAttributeHandle', 'IupSetAttributeId',
+        'IupSetAttributePtr', 'IupSetCallback', 'IupSetCallbacks',
+        'IupSetDouble', 'IupSetFocus', 'IupSetGlobal', 'IupSetGlobalInt',
+        'IupSetGlobalFunction', 'IupSetHandle', 'IupSetInt',
+        'IupSetStrAttribute', 'IupSetStrGlobal', 'IupShow', 'IupShowXY',
+        'IupSplit', 'IupStoreAttribute', 'IupSubmenu', 'IupTable',
+        'IupTableClearSelected', 'IupTableClick_cb', 'IupTableGetSelected',
+        'IupTableResize_cb', 'IupTableSetData', 'IupTabs', 'IupText',
+        'IupTimer', 'IupToggle', 'IupTreeAddNodes', 'IupTreeView', 'IupUpdate',
+        'IupValuator', 'IupVbox', 'join', 'join_by', 'join_path', 'k_perm',
+        'largest', 'lcm', 'length', 'log', 'log10', 'log2', 'lower',
+        'm4_crossProduct', 'm4_inverse', 'm4_lookAt', 'm4_multiply',
+        'm4_normalize', 'm4_perspective', 'm4_subtractVectors', 'm4_xRotate',
+        'm4_yRotate', 'machine_bits', 'machine_word', 'match', 'match_all',
+        'match_replace', 'max', 'maxsq', 'min', 'minsq', 'mod', 'mpfr_add',
+        'mpfr_ceil', 'mpfr_cmp', 'mpfr_cmp_si', 'mpfr_const_pi', 'mpfr_div',
+        'mpfr_div_si', 'mpfr_div_z', 'mpfr_floor', 'mpfr_free', 'mpfr_get_d',
+        'mpfr_get_default_precision', 'mpfr_get_default_rounding_mode',
+        'mpfr_get_fixed', 'mpfr_get_precision', 'mpfr_get_si', 'mpfr_init',
+        'mpfr_inits', 'mpfr_init_set', 'mpfr_init_set_q', 'mpfr_init_set_z',
+        'mpfr_mul', 'mpfr_mul_si', 'mpfr_pow_si', 'mpfr_set', 'mpfr_set_d',
+        'mpfr_set_default_precision', 'mpfr_set_default_rounding_mode',
+        'mpfr_set_precision', 'mpfr_set_q', 'mpfr_set_si', 'mpfr_set_str',
+        'mpfr_set_z', 'mpfr_si_div', 'mpfr_si_sub', 'mpfr_sqrt', 'mpfr_sub',
+        'mpfr_sub_si', 'mpq_abs', 'mpq_add', 'mpq_add_si', 'mpq_canonicalize',
+        'mpq_cmp', 'mpq_cmp_si', 'mpq_div', 'mpq_div_2exp', 'mpq_free',
+        'mpq_get_den', 'mpq_get_num', 'mpq_get_str', 'mpq_init', 'mpq_init_set',
+        'mpq_init_set_si', 'mpq_init_set_str', 'mpq_init_set_z', 'mpq_inits',
+        'mpq_inv', 'mpq_mul', 'mpq_neg', 'mpq_set', 'mpq_set_si', 'mpq_set_str',
+        'mpq_set_z', 'mpq_sub', 'mpz_abs', 'mpz_add', 'mpz_addmul',
+        'mpz_addmul_ui', 'mpz_addmul_si', 'mpz_add_si', 'mpz_add_ui', 'mpz_and',
+        'mpz_bin_uiui', 'mpz_cdiv_q', 'mpz_cmp', 'mpz_cmp_si', 'mpz_divexact',
+        'mpz_divexact_ui', 'mpz_divisible_p', 'mpz_divisible_ui_p', 'mpz_even',
+        'mpz_fac_ui', 'mpz_factorstring', 'mpz_fdiv_q', 'mpz_fdiv_q_2exp',
+        'mpz_fdiv_q_ui', 'mpz_fdiv_qr', 'mpz_fdiv_r', 'mpz_fdiv_ui',
+        'mpz_fib_ui', 'mpz_fib2_ui', 'mpz_fits_atom', 'mpz_fits_integer',
+        'mpz_free', 'mpz_gcd', 'mpz_gcd_ui', 'mpz_get_atom', 'mpz_get_integer',
+        'mpz_get_short_str', 'mpz_get_str', 'mpz_init', 'mpz_init_set',
+        'mpz_inits', 'mpz_invert', 'mpz_lcm', 'mpz_lcm_ui', 'mpz_max',
+        'mpz_min', 'mpz_mod', 'mpz_mod_ui', 'mpz_mul', 'mpz_mul_2exp',
+        'mpz_mul_d', 'mpz_mul_si', 'mpz_neg', 'mpz_nthroot', 'mpz_odd',
+        'mpz_pollard_rho', 'mpz_pow_ui', 'mpz_powm', 'mpz_powm_ui', 'mpz_prime',
+        'mpz_prime_factors', 'mpz_prime_mr', 'mpz_rand', 'mpz_rand_ui',
+        'mpz_re_compose', 'mpz_remove', 'mpz_scan0', 'mpz_scan1', 'mpz_set',
+        'mpz_set_d', 'mpz_set_si', 'mpz_set_str', 'mpz_set_v', 'mpz_sign',
+        'mpz_sizeinbase', 'mpz_sqrt', 'mpz_sub', 'mpz_sub_si', 'mpz_sub_ui',
+        'mpz_si_sub', 'mpz_tdiv_q_2exp', 'mpz_tdiv_r_2exp', 'mpz_tstbit',
+        'mpz_ui_pow_ui', 'mpz_xor', 'named_dict', 'new_dict', 'new_queue',
+        'new_stack', 'not_bits', 'not_bitsu', 'odd', 'or_all', 'or_allu',
+        'or_bits', 'or_bitsu', 'ord', 'ordinal', 'ordinant',
+        'override_timezone', 'pad', 'pad_head', 'pad_tail', 'parse_date_string',
+        'papply', 'peep', 'peepn', 'peep_dict', 'permute', 'permutes',
+        'platform', 'pop', 'popn', 'pop_dict', 'power', 'pp', 'ppEx', 'ppExf',
+        'ppf', 'ppOpt', 'pq_add', 'pq_destroy', 'pq_empty', 'pq_new', 'pq_peek',
+        'pq_pop', 'pq_pop_data', 'pq_size', 'prepend', 'prime_factors',
+        'printf', 'product', 'proper', 'push', 'pushn', 'putd', 'puts',
+        'queue_empty', 'queue_size', 'rand', 'rand_range', 'reinstate',
+        'remainder', 'remove', 'remove_all', 'repeat', 'repeatch', 'replace',
+        'requires', 'reverse', 'rfind', 'rgb', 'rmatch', 'rmdr', 'rnd', 'round',
+        'routine_id', 'scanf', 'serialize', 'series', 'set_rand',
+        'set_test_abort', 'set_test_logfile', 'set_test_module',
+        'set_test_pause', 'set_test_verbosity', 'set_timedate_formats',
+        'set_timezone', 'setd', 'setd_default', 'shorten', 'sha256',
+        'shift_bits', 'shuffle', 'sign', 'sin', 'smallest', 'sort',
+        'sort_columns', 'speak', 'splice', 'split', 'split_any', 'split_by',
+        'sprint', 'sprintf', 'sq_abs', 'sq_add', 'sq_and', 'sq_and_bits',
+        'sq_arccos', 'sq_arcsin', 'sq_arctan', 'sq_atom', 'sq_ceil', 'sq_cmp',
+        'sq_cos', 'sq_div', 'sq_even', 'sq_eq', 'sq_floor', 'sq_floor_div',
+        'sq_ge', 'sq_gt', 'sq_int', 'sq_le', 'sq_log', 'sq_log10', 'sq_log2',
+        'sq_lt', 'sq_max', 'sq_min', 'sq_mod', 'sq_mul', 'sq_ne', 'sq_not',
+        'sq_not_bits', 'sq_odd', 'sq_or', 'sq_or_bits', 'sq_power', 'sq_rand',
+        'sq_remainder', 'sq_rmdr', 'sq_rnd', 'sq_round', 'sq_seq', 'sq_sign',
+        'sq_sin', 'sq_sqrt', 'sq_str', 'sq_sub', 'sq_tan', 'sq_trunc',
+        'sq_uminus', 'sq_xor', 'sq_xor_bits', 'sqrt', 'square_free',
+        'stack_empty', 'stack_size', 'substitute', 'substitute_all', 'sum',
+        'tail', 'tan', 'test_equal', 'test_fail', 'test_false',
+        'test_not_equal', 'test_pass', 'test_summary', 'test_true',
+        'text_color', 'throw', 'time', 'timedate_diff', 'timedelta',
+        'to_integer', 'to_number', 'to_rgb', 'to_string', 'traverse_dict',
+        'traverse_dict_partial_key', 'trim', 'trim_head', 'trim_tail', 'trunc',
+        'tagset', 'tagstart', 'typeof', 'unique', 'unix_dict', 'upper',
+        'utf8_to_utf32', 'utf32_to_utf8', 'version', 'vlookup', 'vslice',
+        'wglGetProcAddress', 'wildcard_file', 'wildcard_match', 'with_rho',
+        'with_theta', 'xml_new_doc', 'xml_new_element', 'xml_set_attribute',
+        'xml_sprint', 'xor_bits', 'xor_bitsu',
+        'accept', 'allocate', 'allocate_string', 'allow_break', 'ARM',
+        'atom_to_float80', 'c_func', 'c_proc', 'call_back', 'chdir',
+        'check_break', 'clearDib', 'close', 'closesocket', 'console',
+        'copy_file', 'create', 'create_directory', 'create_thread',
+        'curl_easy_cleanup', 'curl_easy_get_file', 'curl_easy_init',
+        'curl_easy_perform', 'curl_easy_perform_ex', 'curl_easy_setopt',
+        'curl_easy_strerror', 'curl_global_cleanup', 'curl_global_init',
+        'curl_slist_append', 'curl_slist_free_all', 'current_dir', 'cursor',
+        'define_c_func', 'define_c_proc', 'delete', 'delete_cs', 'delete_file',
+        'dir', 'DLL', 'drawDib', 'drawShadedPolygonToDib', 'ELF32', 'ELF64',
+        'enter_cs', 'eval', 'exit_thread', 'free', 'file_exists', 'final',
+        'float80_to_atom', 'format', 'get_bytes', 'get_file_date',
+        'get_file_size', 'get_file_type', 'get_interpreter', 'get_key',
+        'get_socket_error', 'get_text', 'get_thread_exitcode', 'get_thread_id',
+        'getc', 'getenv', 'gets', 'getsockaddr', 'glBegin', 'glCallList',
+        'glFrustum', 'glGenLists', 'glGetString', 'glLight', 'glMaterial',
+        'glNewList', 'glNormal', 'glPopMatrix', 'glPushMatrix', 'glRotate',
+        'glEnd', 'glEndList', 'glTexImage2D', 'goto', 'GUI', 'icons', 'ilASM',
+        'include_files', 'include_paths', 'init_cs', 'ip_to_string',
+        'IupConfig', 'IupConfigDialogClosed', 'IupConfigDialogShow',
+        'IupConfigGetVariableInt', 'IupConfigLoad', 'IupConfigSave',
+        'IupConfigSetVariableInt', 'IupExitLoop', 'IupFileDlg', 'IupFileList',
+        'IupGLSwapBuffers', 'IupHelp', 'IupLoopStep', 'IupMainLoop',
+        'IupNormalizer', 'IupPlot', 'IupPlotAdd', 'IupPlotBegin', 'IupPlotEnd',
+        'IupPlotInsert', 'IupSaveImage', 'IupTreeGetUserId', 'IupUser',
+        'IupVersion', 'IupVersionDate', 'IupVersionNumber', 'IupVersionShow',
+        'killDib', 'leave_cs', 'listen', 'manifest', 'mem_copy', 'mem_set',
+        'mpfr_gamma', 'mpfr_printf', 'mpfr_sprintf', 'mpz_export', 'mpz_import',
+        'namespace', 'new', 'newDib', 'open', 'open_dll', 'PE32', 'PE64',
+        'peek', 'peek_string', 'peek1s', 'peek1u', 'peek2s', 'peek2u', 'peek4s',
+        'peek4u', 'peek8s', 'peek8u', 'peekNS', 'peekns', 'peeknu', 'poke',
+        'poke2', 'poke4', 'poke8', 'pokeN', 'poke_string', 'poke_wstring',
+        'position', 'progress', 'prompt_number', 'prompt_string', 'read_file',
+        'read_lines', 'recv', 'resume_thread', 'seek', 'select', 'send',
+        'setHandler', 'shutdown', 'sleep', 'SO', 'sockaddr_in', 'socket',
+        'split_path', 'suspend_thread', 'system', 'system_exec', 'system_open',
+        'system_wait', 'task_clock_start', 'task_clock_stop', 'task_create',
+        'task_delay', 'task_list', 'task_schedule', 'task_self', 'task_status',
+        'task_suspend', 'task_yield', 'thread_safe_string', 'try_cs',
+        'utf8_to_utf16', 'utf16_to_utf8', 'utf16_to_utf32', 'utf32_to_utf16',
+        'video_config', 'WSACleanup', 'wait_thread', 'walk_dir', 'where',
+        'write_lines', 'wait_key'
+    )
+    constants = (
+        'ANY_QUEUE', 'ASCENDING', 'BLACK', 'BLOCK_CURSOR', 'BLUE',
+        'BRIGHT_CYAN', 'BRIGHT_BLUE', 'BRIGHT_GREEN', 'BRIGHT_MAGENTA',
+        'BRIGHT_RED', 'BRIGHT_WHITE', 'BROWN', 'C_DWORD', 'C_INT', 'C_POINTER',
+        'C_USHORT', 'C_WORD', 'CD_AMBER', 'CD_BLACK', 'CD_BLUE', 'CD_BOLD',
+        'CD_BOLD_ITALIC', 'CD_BOX', 'CD_CENTER', 'CD_CIRCLE', 'CD_CLOSED_LINES',
+        'CD_CONTINUOUS', 'CD_CUSTOM', 'CD_CYAN', 'CD_DARK_BLUE', 'CD_DARK_CYAN',
+        'CD_DARK_GRAY', 'CD_DARK_GREY', 'CD_DARK_GREEN', 'CD_DARK_MAGENTA',
+        'CD_DARK_RED', 'CD_DARK_YELLOW', 'CD_DASH_DOT', 'CD_DASH_DOT_DOT',
+        'CD_DASHED', 'CD_DBUFFER', 'CD_DEG2RAD', 'CD_DIAMOND', 'CD_DOTTED',
+        'CD_EAST', 'CD_EVENODD', 'CD_FILL', 'CD_GL', 'CD_GRAY', 'CD_GREY',
+        'CD_GREEN', 'CD_HATCH', 'CD_HOLLOW', 'CD_HOLLOW_BOX',
+        'CD_HOLLOW_CIRCLE', 'CD_HOLLOW_DIAMOND', 'CD_INDIGO', 'CD_ITALIC',
+        'CD_IUP', 'CD_IUPDBUFFER', 'CD_LIGHT_BLUE', 'CD_LIGHT_GRAY',
+        'CD_LIGHT_GREY', 'CD_LIGHT_GREEN', 'CD_LIGHT_PARCHMENT', 'CD_MAGENTA',
+        'CD_NAVY', 'CD_NORTH', 'CD_NORTH_EAST', 'CD_NORTH_WEST', 'CD_OLIVE',
+        'CD_OPEN_LINES', 'CD_ORANGE', 'CD_PARCHMENT', 'CD_PATTERN',
+        'CD_PRINTER', 'CD_PURPLE', 'CD_PLAIN', 'CD_PLUS', 'CD_QUERY',
+        'CD_RAD2DEG', 'CD_RED', 'CD_SILVER', 'CD_SOLID', 'CD_SOUTH_EAST',
+        'CD_SOUTH_WEST', 'CD_STAR', 'CD_STIPPLE', 'CD_STRIKEOUT',
+        'CD_UNDERLINE', 'CD_WEST', 'CD_WHITE', 'CD_WINDING', 'CD_VIOLET',
+        'CD_X', 'CD_YELLOW', 'CURLE_OK', 'CURLOPT_MAIL_FROM',
+        'CURLOPT_MAIL_RCPT', 'CURLOPT_PASSWORD', 'CURLOPT_READDATA',
+        'CURLOPT_READFUNCTION', 'CURLOPT_SSL_VERIFYPEER',
+        'CURLOPT_SSL_VERIFYHOST', 'CURLOPT_UPLOAD', 'CURLOPT_URL',
+        'CURLOPT_USE_SSL', 'CURLOPT_USERNAME', 'CURLOPT_VERBOSE',
+        'CURLOPT_WRITEFUNCTION', 'CURLUSESSL_ALL', 'CYAN', 'D_NAME',
+        'D_ATTRIBUTES', 'D_SIZE', 'D_YEAR', 'D_MONTH', 'D_DAY', 'D_HOUR',
+        'D_MINUTE', 'D_SECOND', 'D_CREATION', 'D_LASTACCESS', 'D_MODIFICATION',
+        'DT_YEAR', 'DT_MONTH', 'DT_DAY', 'DT_HOUR', 'DT_MINUTE', 'DT_SECOND',
+        'DT_DOW', 'DT_MSEC', 'DT_DOY', 'DT_GMT', 'EULER', 'E_CODE', 'E_ADDR',
+        'E_LINE', 'E_RTN', 'E_NAME', 'E_FILE', 'E_PATH', 'E_USER', 'false',
+        'False', 'FALSE', 'FIFO_QUEUE', 'FILETYPE_DIRECTORY', 'FILETYPE_FILE',
+        'GET_EOF', 'GET_FAIL', 'GET_IGNORE', 'GET_SUCCESS',
+        'GL_AMBIENT_AND_DIFFUSE', 'GL_ARRAY_BUFFER', 'GL_CLAMP',
+        'GL_CLAMP_TO_BORDER', 'GL_CLAMP_TO_EDGE', 'GL_COLOR_BUFFER_BIT',
+        'GL_COMPILE', 'GL_COMPILE_STATUS', 'GL_CULL_FACE',
+        'GL_DEPTH_BUFFER_BIT', 'GL_DEPTH_TEST', 'GL_EXTENSIONS', 'GL_FLAT',
+        'GL_FLOAT', 'GL_FRAGMENT_SHADER', 'GL_FRONT', 'GL_LIGHT0',
+        'GL_LIGHTING', 'GL_LINEAR', 'GL_LINK_STATUS', 'GL_MODELVIEW',
+        'GL_NEAREST', 'GL_NO_ERROR', 'GL_NORMALIZE', 'GL_POSITION',
+        'GL_PROJECTION', 'GL_QUAD_STRIP', 'GL_QUADS', 'GL_RENDERER',
+        'GL_REPEAT', 'GL_RGB', 'GL_RGBA', 'GL_SMOOTH', 'GL_STATIC_DRAW',
+        'GL_TEXTURE_2D', 'GL_TEXTURE_MAG_FILTER', 'GL_TEXTURE_MIN_FILTER',
+        'GL_TEXTURE_WRAP_S', 'GL_TEXTURE_WRAP_T', 'GL_TRIANGLES',
+        'GL_UNSIGNED_BYTE', 'GL_VENDOR', 'GL_VERSION', 'GL_VERTEX_SHADER',
+        'GRAY', 'GREEN', 'GT_LF_STRIPPED', 'GT_WHOLE_FILE', 'INVLN10',
+        'IUP_CLOSE', 'IUP_CONTINUE', 'IUP_DEFAULT', 'IUP_BLACK', 'IUP_BLUE',
+        'IUP_BUTTON1', 'IUP_BUTTON3', 'IUP_CENTER', 'IUP_CYAN', 'IUP_DARK_BLUE',
+        'IUP_DARK_CYAN', 'IUP_DARK_GRAY', 'IUP_DARK_GREY', 'IUP_DARK_GREEN',
+        'IUP_DARK_MAGENTA', 'IUP_DARK_RED', 'IUP_GRAY', 'IUP_GREY', 'IUP_GREEN',
+        'IUP_IGNORE', 'IUP_INDIGO', 'IUP_MAGENTA', 'IUP_MASK_INT',
+        'IUP_MASK_UINT', 'IUP_MOUSEPOS', 'IUP_NAVY', 'IUP_OLIVE', 'IUP_RECTEXT',
+        'IUP_RED', 'IUP_LIGHT_BLUE', 'IUP_LIGHT_GRAY', 'IUP_LIGHT_GREY',
+        'IUP_LIGHT_GREEN', 'IUP_ORANGE', 'IUP_PARCHMENT', 'IUP_PURPLE',
+        'IUP_SILVER', 'IUP_TEAL', 'IUP_VIOLET', 'IUP_WHITE', 'IUP_YELLOW',
+        'K_BS', 'K_cA', 'K_cC', 'K_cD', 'K_cF5', 'K_cK', 'K_cM', 'K_cN', 'K_cO',
+        'K_cP', 'K_cR', 'K_cS', 'K_cT', 'K_cW', 'K_CR', 'K_DEL', 'K_DOWN',
+        'K_END', 'K_ESC', 'K_F1', 'K_F2', 'K_F3', 'K_F4', 'K_F5', 'K_F6',
+        'K_F7', 'K_F8', 'K_F9', 'K_F10', 'K_F11', 'K_F12', 'K_HOME', 'K_INS',
+        'K_LEFT', 'K_MIDDLE', 'K_PGDN', 'K_PGUP', 'K_RIGHT', 'K_SP', 'K_TAB',
+        'K_UP', 'K_h', 'K_i', 'K_j', 'K_p', 'K_r', 'K_s', 'JS', 'LIFO_QUEUE',
+        'LINUX', 'MAX_HEAP', 'MAGENTA', 'MIN_HEAP', 'Nan', 'NO_CURSOR', 'null',
+        'NULL', 'PI', 'pp_Ascii', 'pp_Brkt', 'pp_Date', 'pp_File', 'pp_FltFmt',
+        'pp_Indent', 'pp_IntCh', 'pp_IntFmt', 'pp_Maxlen', 'pp_Nest',
+        'pp_Pause', 'pp_Q22', 'pp_StrFmt', 'RED', 'SEEK_OK', 'SLASH',
+        'TEST_ABORT', 'TEST_CRASH', 'TEST_PAUSE', 'TEST_PAUSE_FAIL',
+        'TEST_QUIET', 'TEST_SHOW_ALL', 'TEST_SHOW_FAILED', 'TEST_SUMMARY',
+        'true', 'True', 'TRUE', 'VC_SCRNLINES', 'WHITE', 'WINDOWS', 'YELLOW'
+    )
+
+    tokens = {
+        'root': [
+            (r"\s+", Whitespace),
+            (r'/\*|--/\*|#\[', Comment.Multiline, 'comment'),
+            (r'(?://|--|#!).*$', Comment.Single),
+#Alt:
+#           (r'//.*$|--.*$|#!.*$', Comment.Single),
+            (r'"([^"\\]|\\.)*"', String.Other),
+            (r'\'[^\']*\'', String.Other),
+            (r'`[^`]*`', String.Other),
+
+            (words(types, prefix=r'\b', suffix=r'\b'), Name.Function),
+            (words(routines, prefix=r'\b', suffix=r'\b'), Name.Function),
+            (words(preproc, prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
+            (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
+            (words(constants, prefix=r'\b', suffix=r'\b'), Name.Constant),
+            # Aside: Phix only supports/uses the ascii/non-unicode tilde
+            (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|\.(){},?:\[\]$\\;#]', Operator),
+            (r'[\w-]+', Text)
+        ],
+        'comment': [
+            (r'[^*/#]+', Comment.Multiline),
+            (r'/\*|#\[', Comment.Multiline, '#push'),
+            (r'\*/|#\]', Comment.Multiline, '#pop'),
+            (r'[*/#]', Comment.Multiline)
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/php.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/php.py
new file mode 100644
index 00000000..82d4aeb3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/php.py
@@ -0,0 +1,334 @@
+"""
+    pygments.lexers.php
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for PHP and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, default, \
+    using, this, words, do_insertions, line_re
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Other, Generic
+from pygments.util import get_bool_opt, get_list_opt, shebang_matches
+
+__all__ = ['ZephirLexer', 'PsyshConsoleLexer', 'PhpLexer']
+
+
+class ZephirLexer(RegexLexer):
+    """
+    For Zephir language source code.
+
+    Zephir is a compiled high level language aimed
+    to the creation of C-extensions for PHP.
+    """
+
+    name = 'Zephir'
+    url = 'http://zephir-lang.com/'
+    aliases = ['zephir']
+    filenames = ['*.zep']
+    version_added = '2.0'
+
+    zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
+    zephir_type = ['bit', 'bits', 'string']
+
+    flags = re.DOTALL | re.MULTILINE
+
+    tokens = {
+        'commentsandwhitespace': [
+            (r'\s+', Text),
+            (r'//.*?\n', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline)
+        ],
+        'slashstartsregex': [
+            include('commentsandwhitespace'),
+            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+             r'([gim]+\b|\B)', String.Regex, '#pop'),
+            (r'/', Operator, '#pop'),
+            default('#pop')
+        ],
+        'badregex': [
+            (r'\n', Text, '#pop')
+        ],
+        'root': [
+            (r'^(?=\s|/)', Text, 'slashstartsregex'),
+            include('commentsandwhitespace'),
+            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+             r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+            (r'[})\].]', Punctuation),
+            (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
+             r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+             r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
+             r'empty)\b', Keyword, 'slashstartsregex'),
+            (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+            (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
+             r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
+             r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
+             r'transient|volatile|readonly)\b', Keyword.Reserved),
+            (r'(true|false|null|undefined)\b', Keyword.Constant),
+            (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
+             r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
+             r'window)\b', Name.Builtin),
+            (r'[$a-zA-Z_][\w\\]*', Name.Other),
+            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+            (r'0x[0-9a-fA-F]+', Number.Hex),
+            (r'[0-9]+', Number.Integer),
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+        ]
+    }
+
+
+class PsyshConsoleLexer(Lexer):
+    """
+    For PsySH console output, such as:
+
+    .. sourcecode:: psysh
+
+        >>> $greeting = function($name): string {
+        ...     return "Hello, {$name}";
+        ... };
+        => Closure($name): string {#2371 …3}
+        >>> $greeting('World')
+        => "Hello, World"
+    """
+    name = 'PsySH console session for PHP'
+    url = 'https://psysh.org/'
+    aliases = ['psysh']
+    version_added = '2.7'
+
+    def __init__(self, **options):
+        options['startinline'] = True
+        Lexer.__init__(self, **options)
+
+    def get_tokens_unprocessed(self, text):
+        phplexer = PhpLexer(**self.options)
+        curcode = ''
+        insertions = []
+        for match in line_re.finditer(text):
+            line = match.group()
+            if line.startswith('>>> ') or line.startswith('... '):
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt, line[:4])]))
+                curcode += line[4:]
+            elif line.rstrip() == '...':
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt, '...')]))
+                curcode += line[3:]
+            else:
+                if curcode:
+                    yield from do_insertions(
+                        insertions, phplexer.get_tokens_unprocessed(curcode))
+                    curcode = ''
+                    insertions = []
+                yield match.start(), Generic.Output, line
+        if curcode:
+            yield from do_insertions(insertions,
+                                     phplexer.get_tokens_unprocessed(curcode))
+
+
+class PhpLexer(RegexLexer):
+    """
+    For PHP source code.
+    For PHP embedded in HTML, use the `HtmlPhpLexer`.
+
+    Additional options accepted:
+
+    `startinline`
+        If given and ``True`` the lexer starts highlighting with
+        php code (i.e.: no starting ``>> from pygments.lexers._php_builtins import MODULES
+            >>> MODULES.keys()
+            ['PHP Options/Info', 'Zip', 'dba', ...]
+
+        In fact the names of those modules match the module names from
+        the php documentation.
+    """
+
+    name = 'PHP'
+    url = 'https://www.php.net/'
+    aliases = ['php', 'php3', 'php4', 'php5']
+    filenames = ['*.php', '*.php[345]', '*.inc']
+    mimetypes = ['text/x-php']
+    version_added = ''
+
+    # Note that a backslash is included, PHP uses a backslash as a namespace
+    # separator.
+    _ident_inner = r'(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*'
+    # But not inside strings.
+    _ident_nons = r'(?:[_a-z]|[^\x00-\x7f])(?:\w|[^\x00-\x7f])*'
+
+    flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+    tokens = {
+        'root': [
+            (r'<\?(php)?', Comment.Preproc, 'php'),
+            (r'[^<]+', Other),
+            (r'<', Other)
+        ],
+        'php': [
+            (r'\?>', Comment.Preproc, '#pop'),
+            (r'(<<<)([\'"]?)(' + _ident_nons + r')(\2\n.*?\n\s*)(\3)(;?)(\n)',
+             bygroups(String, String, String.Delimiter, String, String.Delimiter,
+                      Punctuation, Text)),
+            (r'\s+', Text),
+            (r'#\[', Punctuation, 'attribute'),
+            (r'#.*?\n', Comment.Single),
+            (r'//.*?\n', Comment.Single),
+            # put the empty comment here, it is otherwise seen as
+            # the start of a docstring
+            (r'/\*\*/', Comment.Multiline),
+            (r'/\*\*.*?\*/', String.Doc),
+            (r'/\*.*?\*/', Comment.Multiline),
+            (r'(->|::)(\s*)(' + _ident_nons + ')',
+             bygroups(Operator, Text, Name.Attribute)),
+            (r'[~!%^&*+=|:.<>/@-]+', Operator),
+            (r'\?', Operator),  # don't add to the charclass above!
+            (r'[\[\]{}();,]+', Punctuation),
+            (r'(new)(\s+)(class)\b', bygroups(Keyword, Text, Keyword)),
+            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+            (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
+            (r'(function)(\s+)(&?)(\s*)',
+             bygroups(Keyword, Text, Operator, Text), 'functionname'),
+            (r'(const)(\s+)(' + _ident_inner + ')',
+             bygroups(Keyword, Text, Name.Constant)),
+            (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
+             r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
+             r'FALSE|print|for|require|continue|foreach|require_once|'
+             r'declare|return|default|static|do|switch|die|stdClass|'
+             r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
+             r'virtual|endfor|include_once|while|endforeach|global|'
+             r'endif|list|endswitch|new|endwhile|not|'
+             r'array|E_ALL|NULL|final|php_user_filter|interface|'
+             r'implements|public|private|protected|abstract|clone|try|'
+             r'catch|throw|this|use|namespace|trait|yield|'
+             r'finally|match)\b', Keyword),
+            (r'(true|false|null)\b', Keyword.Constant),
+            include('magicconstants'),
+            (r'\$\{', Name.Variable, 'variablevariable'),
+            (r'\$+' + _ident_inner, Name.Variable),
+            (_ident_inner, Name.Other),
+            (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
+            (r'\d+e[+-]?[0-9]+', Number.Float),
+            (r'0[0-7]+', Number.Oct),
+            (r'0x[a-f0-9]+', Number.Hex),
+            (r'\d+', Number.Integer),
+            (r'0b[01]+', Number.Bin),
+            (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
+            (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
+            (r'"', String.Double, 'string'),
+        ],
+        'variablevariable': [
+            (r'\}', Name.Variable, '#pop'),
+            include('php')
+        ],
+        'magicfuncs': [
+            # source: http://php.net/manual/en/language.oop5.magic.php
+            (words((
+                '__construct', '__destruct', '__call', '__callStatic', '__get', '__set',
+                '__isset', '__unset', '__sleep', '__wakeup', '__toString', '__invoke',
+                '__set_state', '__clone', '__debugInfo',), suffix=r'\b'),
+             Name.Function.Magic),
+        ],
+        'magicconstants': [
+            # source: http://php.net/manual/en/language.constants.predefined.php
+            (words((
+                '__LINE__', '__FILE__', '__DIR__', '__FUNCTION__', '__CLASS__',
+                '__TRAIT__', '__METHOD__', '__NAMESPACE__',),
+                suffix=r'\b'),
+             Name.Constant),
+        ],
+        'classname': [
+            (_ident_inner, Name.Class, '#pop')
+        ],
+        'functionname': [
+            include('magicfuncs'),
+            (_ident_inner, Name.Function, '#pop'),
+            default('#pop')
+        ],
+        'string': [
+            (r'"', String.Double, '#pop'),
+            (r'[^{$"\\]+', String.Double),
+            (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
+            (r'\$' + _ident_nons + r'(\[\S+?\]|->' + _ident_nons + ')?',
+             String.Interpol),
+            (r'(\{\$\{)(.*?)(\}\})',
+             bygroups(String.Interpol, using(this, _startinline=True),
+                      String.Interpol)),
+            (r'(\{)(\$.*?)(\})',
+             bygroups(String.Interpol, using(this, _startinline=True),
+                      String.Interpol)),
+            (r'(\$\{)(\S+)(\})',
+             bygroups(String.Interpol, Name.Variable, String.Interpol)),
+            (r'[${\\]', String.Double)
+        ],
+        'attribute': [
+            (r'\]', Punctuation, '#pop'),
+            (r'\(', Punctuation, 'attributeparams'),
+            (_ident_inner, Name.Decorator),
+            include('php')
+        ],
+        'attributeparams': [
+            (r'\)', Punctuation, '#pop'),
+            include('php')
+        ],
+    }
+
+    def __init__(self, **options):
+        self.funcnamehighlighting = get_bool_opt(
+            options, 'funcnamehighlighting', True)
+        self.disabledmodules = get_list_opt(
+            options, 'disabledmodules', ['unknown'])
+        self.startinline = get_bool_opt(options, 'startinline', False)
+
+        # private option argument for the lexer itself
+        if '_startinline' in options:
+            self.startinline = options.pop('_startinline')
+
+        # collect activated functions in a set
+        self._functions = set()
+        if self.funcnamehighlighting:
+            from pygments.lexers._php_builtins import MODULES
+            for key, value in MODULES.items():
+                if key not in self.disabledmodules:
+                    self._functions.update(value)
+        RegexLexer.__init__(self, **options)
+
+    def get_tokens_unprocessed(self, text):
+        stack = ['root']
+        if self.startinline:
+            stack.append('php')
+        for index, token, value in \
+                RegexLexer.get_tokens_unprocessed(self, text, stack):
+            if token is Name.Other:
+                if value in self._functions:
+                    yield index, Name.Builtin, value
+                    continue
+            yield index, token, value
+
+    def analyse_text(text):
+        if shebang_matches(text, r'php'):
+            return True
+        rv = 0.0
+        if re.search(r'<\?(?!xml)', text):
+            rv += 0.3
+        return rv
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pointless.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pointless.py
new file mode 100644
index 00000000..adedb757
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pointless.py
@@ -0,0 +1,70 @@
+"""
+    pygments.lexers.pointless
+    ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Pointless.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
+    Punctuation, String, Text
+
+__all__ = ['PointlessLexer']
+
+
+class PointlessLexer(RegexLexer):
+    """
+    For Pointless source code.
+    """
+
+    name = 'Pointless'
+    url = 'https://ptls.dev'
+    aliases = ['pointless']
+    filenames = ['*.ptls']
+    version_added = '2.7'
+
+    ops = words([
+        "+", "-", "*", "/", "**", "%", "+=", "-=", "*=",
+        "/=", "**=", "%=", "|>", "=", "==", "!=", "<", ">",
+        "<=", ">=", "=>", "$", "++",
+    ])
+
+    keywords = words([
+        "if", "then", "else", "where", "with", "cond",
+        "case", "and", "or", "not", "in", "as", "for",
+        "requires", "throw", "try", "catch", "when",
+        "yield", "upval",
+    ], suffix=r'\b')
+
+    tokens = {
+        'root': [
+            (r'[ \n\r]+', Text),
+            (r'--.*$', Comment.Single),
+            (r'"""', String, 'multiString'),
+            (r'"', String, 'string'),
+            (r'[\[\](){}:;,.]', Punctuation),
+            (ops, Operator),
+            (keywords, Keyword),
+            (r'\d+|\d*\.\d+', Number),
+            (r'(true|false)\b', Name.Builtin),
+            (r'[A-Z][a-zA-Z0-9]*\b', String.Symbol),
+            (r'output\b', Name.Variable.Magic),
+            (r'(export|import)\b', Keyword.Namespace),
+            (r'[a-z][a-zA-Z0-9]*\b', Name.Variable)
+        ],
+        'multiString': [
+            (r'\\.', String.Escape),
+            (r'"""', String, '#pop'),
+            (r'"', String),
+            (r'[^\\"]+', String),
+        ],
+        'string': [
+            (r'\\.', String.Escape),
+            (r'"', String, '#pop'),
+            (r'\n', Error),
+            (r'[^\\"]+', String),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pony.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pony.py
new file mode 100644
index 00000000..055423a4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/pony.py
@@ -0,0 +1,93 @@
+"""
+    pygments.lexers.pony
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Pony and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+
+__all__ = ['PonyLexer']
+
+
+class PonyLexer(RegexLexer):
+    """
+    For Pony source code.
+    """
+
+    name = 'Pony'
+    aliases = ['pony']
+    filenames = ['*.pony']
+    url = 'https://www.ponylang.io'
+    version_added = '2.4'
+
+    _caps = r'(iso|trn|ref|val|box|tag)'
+
+    tokens = {
+        'root': [
+            (r'\n', Text),
+            (r'[^\S\n]+', Text),
+            (r'//.*\n', Comment.Single),
+            (r'/\*', Comment.Multiline, 'nested_comment'),
+            (r'"""(?:.|\n)*?"""', String.Doc),
+            (r'"', String, 'string'),
+            (r'\'.*\'', String.Char),
+            (r'=>|[]{}:().~;,|&!^?[]', Punctuation),
+            (words((
+                'addressof', 'and', 'as', 'consume', 'digestof', 'is', 'isnt',
+                'not', 'or'),
+                suffix=r'\b'),
+             Operator.Word),
+            (r'!=|==|<<|>>|[-+/*%=<>]', Operator),
+            (words((
+                'box', 'break', 'compile_error', 'compile_intrinsic',
+                'continue', 'do', 'else', 'elseif', 'embed', 'end', 'error',
+                'for', 'if', 'ifdef', 'in', 'iso', 'lambda', 'let', 'match',
+                'object', 'recover', 'ref', 'repeat', 'return', 'tag', 'then',
+                'this', 'trn', 'try', 'until', 'use', 'var', 'val', 'where',
+                'while', 'with', '#any', '#read', '#send', '#share'),
+                suffix=r'\b'),
+             Keyword),
+            (r'(actor|class|struct|primitive|interface|trait|type)((?:\s)+)',
+             bygroups(Keyword, Text), 'typename'),
+            (r'(new|fun|be)((?:\s)+)', bygroups(Keyword, Text), 'methodname'),
+            (words((
+                'I8', 'U8', 'I16', 'U16', 'I32', 'U32', 'I64', 'U64', 'I128',
+                'U128', 'ILong', 'ULong', 'ISize', 'USize', 'F32', 'F64',
+                'Bool', 'Pointer', 'None', 'Any', 'Array', 'String',
+                'Iterator'),
+                suffix=r'\b'),
+             Name.Builtin.Type),
+            (r'_?[A-Z]\w*', Name.Type),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+            (r'0x[0-9a-fA-F]+', Number.Hex),
+            (r'\d+', Number.Integer),
+            (r'(true|false)\b', Name.Builtin),
+            (r'_\d*', Name),
+            (r'_?[a-z][\w\']*', Name)
+        ],
+        'typename': [
+            (_caps + r'?((?:\s)*)(_?[A-Z]\w*)',
+             bygroups(Keyword, Text, Name.Class), '#pop')
+        ],
+        'methodname': [
+            (_caps + r'?((?:\s)*)(_?[a-z]\w*)',
+             bygroups(Keyword, Text, Name.Function), '#pop')
+        ],
+        'nested_comment': [
+            (r'[^*/]+', Comment.Multiline),
+            (r'/\*', Comment.Multiline, '#push'),
+            (r'\*/', Comment.Multiline, '#pop'),
+            (r'[*/]', Comment.Multiline)
+        ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'\\"', String),
+            (r'[^\\"]+', String)
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/praat.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/praat.py
new file mode 100644
index 00000000..054f5b61
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/praat.py
@@ -0,0 +1,303 @@
+"""
+    pygments.lexers.praat
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Praat
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, bygroups, include
+from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, \
+    Number, Operator, Whitespace
+
+__all__ = ['PraatLexer']
+
+
+class PraatLexer(RegexLexer):
+    """
+    For Praat scripts.
+    """
+
+    name = 'Praat'
+    url = 'http://www.praat.org'
+    aliases = ['praat']
+    filenames = ['*.praat', '*.proc', '*.psc']
+    version_added = '2.1'
+
+    keywords = (
+        'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
+        'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
+        'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
+        'editor', 'endeditor', 'clearinfo',
+    )
+
+    functions_string = (
+        'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
+        'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
+        'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
+        'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
+    )
+
+    functions_numeric = (
+        'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
+        'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
+        'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
+        'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
+        'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
+        'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
+        'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
+        'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
+        'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
+        'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
+        'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
+        'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
+        'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
+        'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
+        'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
+        'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
+        'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
+        'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
+        'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
+        'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
+        'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
+        'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
+        'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
+        'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
+        'writeInfo', 'writeInfoLine',
+    )
+
+    functions_array = (
+        'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
+    )
+
+    objects = (
+        'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
+        'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
+        'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
+        'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
+        'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
+        'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
+        'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
+        'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
+        'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
+        'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
+        'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
+        'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
+        'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
+        'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
+        'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
+        'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
+        'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
+        'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
+        'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
+        'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
+        'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
+        'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
+        'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
+        'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
+        'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
+        'Weight', 'WordList',
+    )
+
+    variables_numeric = (
+        'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
+    )
+
+    variables_string = (
+        'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
+        'preferencesDirectory', 'newline', 'temporaryDirectory',
+        'defaultDirectory',
+    )
+
+    object_attributes = (
+        'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
+    )
+
+    tokens = {
+        'root': [
+            (r'(\s+)(#.*?$)',  bygroups(Whitespace, Comment.Single)),
+            (r'^#.*?$',        Comment.Single),
+            (r';[^\n]*',       Comment.Single),
+            (r'\s+',           Whitespace),
+
+            (r'\bprocedure\b', Keyword,       'procedure_definition'),
+            (r'\bcall\b',      Keyword,       'procedure_call'),
+            (r'@',             Name.Function, 'procedure_call'),
+
+            include('function_call'),
+
+            (words(keywords, suffix=r'\b'), Keyword),
+
+            (r'(\bform\b)(\s+)([^\n]+)',
+             bygroups(Keyword, Whitespace, String), 'old_form'),
+
+            (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
+             r'include|execute|system(?:_nocheck)?)(\s+)',
+             bygroups(Keyword, Whitespace), 'string_unquoted'),
+
+            (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Whitespace, Name.Label)),
+
+            include('variable_name'),
+            include('number'),
+
+            (r'"', String, 'string'),
+
+            (words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
+
+            (r'\b[A-Z]', Keyword, 'command'),
+            (r'(\.{3}|[)(,])', Punctuation),
+        ],
+        'command': [
+            (r'( ?[\w()-]+ ?)', Keyword),
+
+            include('string_interpolated'),
+
+            (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
+            (r':', Keyword, ('#pop', 'comma_list')),
+            (r'\s', Whitespace, '#pop'),
+        ],
+        'procedure_call': [
+            (r'\s+', Whitespace),
+            (r'([\w.]+)(?:(:)|(?:(\s*)(\()))',
+             bygroups(Name.Function, Punctuation,
+                      Text.Whitespace, Punctuation), '#pop'),
+            (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
+        ],
+        'procedure_definition': [
+            (r'\s', Whitespace),
+            (r'([\w.]+)(\s*?[(:])',
+             bygroups(Name.Function, Whitespace), '#pop'),
+            (r'([\w.]+)([^\n]*)',
+             bygroups(Name.Function, Text), '#pop'),
+        ],
+        'function_call': [
+            (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
+            (words(functions_array, suffix=r'#(?=\s*[:(])'),   Name.Function, 'function'),
+            (words(functions_numeric, suffix=r'(?=\s*[:(])'),  Name.Function, 'function'),
+        ],
+        'function': [
+            (r'\s+',   Whitespace),
+            (r':',     Punctuation, ('#pop', 'comma_list')),
+            (r'\s*\(', Punctuation, ('#pop', 'comma_list')),
+        ],
+        'comma_list': [
+            (r'(\s*\n\s*)(\.{3})', bygroups(Whitespace, Punctuation)),
+
+            (r'(\s*)(?:([)\]])|(\n))', bygroups(
+                Whitespace, Punctuation, Whitespace), '#pop'),
+
+            (r'\s+', Whitespace),
+            (r'"',   String, 'string'),
+            (r'\b(if|then|else|fi|endif)\b', Keyword),
+
+            include('function_call'),
+            include('variable_name'),
+            include('operator'),
+            include('number'),
+
+            (r'[()]', Text),
+            (r',', Punctuation),
+        ],
+        'old_arguments': [
+            (r'\n', Whitespace, '#pop'),
+
+            include('variable_name'),
+            include('operator'),
+            include('number'),
+
+            (r'"', String, 'string'),
+            (r'[^\n]', Text),
+        ],
+        'number': [
+            (r'\n', Whitespace, '#pop'),
+            (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
+        ],
+        'object_reference': [
+            include('string_interpolated'),
+            (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
+
+            (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
+
+            (r'\$', Name.Builtin),
+            (r'\[', Text, '#pop'),
+        ],
+        'variable_name': [
+            include('operator'),
+            include('number'),
+
+            (words(variables_string,  suffix=r'\$'), Name.Variable.Global),
+            (words(variables_numeric,
+             suffix=r'(?=[^a-zA-Z0-9_."\'$#\[:(]|\s|^|$)'),
+             Name.Variable.Global),
+
+            (words(objects, prefix=r'\b', suffix=r"(_)"),
+             bygroups(Name.Builtin, Name.Builtin),
+             'object_reference'),
+
+            (r'\.?_?[a-z][\w.]*(\$|#)?', Text),
+            (r'[\[\]]', Punctuation, 'comma_list'),
+
+            include('string_interpolated'),
+        ],
+        'operator': [
+            (r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)',       Operator),
+            (r'(?', Punctuation),
+            (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
+             r'\\[0-7]+\\|\\["\\abcefnrstv]|[^\\"])*"', String.Double),
+            (r"'(?:''|[^'])*'", String.Atom),  # quoted atom
+            # Needs to not be followed by an atom.
+            # (r'=(?=\s|[a-zA-Z\[])', Operator),
+            (r'is\b', Operator),
+            (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
+             Operator),
+            (r'(mod|div|not)\b', Operator),
+            (r'_', Keyword),  # The don't-care variable
+            (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
+            (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+             r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+             r'(\s*)(:-|-->)',
+             bygroups(Name.Function, Text, Operator)),  # function defn
+            (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+             r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+             r'(\s*)(\()',
+             bygroups(Name.Function, Text, Punctuation)),
+            (r'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+             r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
+             String.Atom),  # atom, characters
+            # This one includes !
+            (r'[#&*+\-./:<=>?@\\^~\u00a1-\u00bf\u2010-\u303f]+',
+             String.Atom),  # atom, graphics
+            (r'[A-Z_]\w*', Name.Variable),
+            (r'\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
+        ],
+        'nested-comment': [
+            (r'\*/', Comment.Multiline, '#pop'),
+            (r'/\*', Comment.Multiline, '#push'),
+            (r'[^*/]+', Comment.Multiline),
+            (r'[*/]', Comment.Multiline),
+        ],
+    }
+
+    def analyse_text(text):
+        """Competes with IDL and Visual Prolog on *.pro"""
+        if ':-' in text:
+            # Visual Prolog also uses :-
+            return 0.5
+        else:
+            return 0
+
+
+class LogtalkLexer(RegexLexer):
+    """
+    For Logtalk source code.
+    """
+
+    name = 'Logtalk'
+    url = 'http://logtalk.org/'
+    aliases = ['logtalk']
+    filenames = ['*.lgt', '*.logtalk']
+    mimetypes = ['text/x-logtalk']
+    version_added = '0.10'
+
+    tokens = {
+        'root': [
+            # Directives
+            (r'^\s*:-\s', Punctuation, 'directive'),
+            # Comments
+            (r'%.*?\n', Comment),
+            (r'/\*(.|\n)*?\*/', Comment),
+            # Whitespace
+            (r'\n', Text),
+            (r'\s+', Text),
+            # Numbers
+            (r"0'[\\]?.", Number),
+            (r'0b[01]+', Number.Bin),
+            (r'0o[0-7]+', Number.Oct),
+            (r'0x[0-9a-fA-F]+', Number.Hex),
+            (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+            # Variables
+            (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+            # Event handlers
+            (r'(after|before)(?=[(])', Keyword),
+            # Message forwarding handler
+            (r'forward(?=[(])', Keyword),
+            # Execution-context methods
+            (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
+            # Reflection
+            (r'(current_predicate|predicate_property)(?=[(])', Keyword),
+            # DCGs and term expansion
+            (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
+            # Entity
+            (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+            (r'(object|protocol|category)_property(?=[(])', Keyword),
+            # Entity relations
+            (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
+            (r'extends_(object|protocol|category)(?=[(])', Keyword),
+            (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
+            (r'(instantiat|specializ)es_class(?=[(])', Keyword),
+            # Events
+            (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
+            # Flags
+            (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
+            # Compiling, loading, and library paths
+            (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
+            (r'\blogtalk_make\b', Keyword),
+            # Database
+            (r'(clause|retract(all)?)(?=[(])', Keyword),
+            (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
+            # Control constructs
+            (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
+            (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
+            (r'(uninstantiation|type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
+            # All solutions
+            (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
+            # Multi-threading predicates
+            (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+            # Engine predicates
+            (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
+            # Term unification
+            (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
+            # Term creation and decomposition
+            (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
+            # Evaluable functors
+            (r'(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
+            (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
+            (r'(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword),
+            # Other arithmetic functors
+            (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
+            # Term testing
+            (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
+            # Term comparison
+            (r'compare(?=[(])', Keyword),
+            # Stream selection and control
+            (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
+            (r'(open|close)(?=[(])', Keyword),
+            (r'flush_output(?=[(])', Keyword),
+            (r'(at_end_of_stream|flush_output)\b', Keyword),
+            (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+            # Character and byte input/output
+            (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
+            (r'\bnl\b', Keyword),
+            # Term input/output
+            (r'read(_term)?(?=[(])', Keyword),
+            (r'write(q|_(canonical|term))?(?=[(])', Keyword),
+            (r'(current_)?op(?=[(])', Keyword),
+            (r'(current_)?char_conversion(?=[(])', Keyword),
+            # Atomic term processing
+            (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
+            (r'(char_code|sub_atom)(?=[(])', Keyword),
+            (r'number_c(har|ode)s(?=[(])', Keyword),
+            # Implementation defined hooks functions
+            (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
+            (r'\bhalt\b', Keyword),
+            (r'halt(?=[(])', Keyword),
+            # Message sending operators
+            (r'(::|:|\^\^)', Operator),
+            # External call
+            (r'[{}]', Keyword),
+            # Logic and control
+            (r'(ignore|once)(?=[(])', Keyword),
+            (r'\brepeat\b', Keyword),
+            # Sorting
+            (r'(key)?sort(?=[(])', Keyword),
+            # Bitwise functors
+            (r'(>>|<<|/\\|\\\\|\\)', Operator),
+            # Predicate aliases
+            (r'\bas\b', Operator),
+            # Arithmetic evaluation
+            (r'\bis\b', Keyword),
+            # Arithmetic comparison
+            (r'(=:=|=\\=|<|=<|>=|>)', Operator),
+            # Term creation and decomposition
+            (r'=\.\.', Operator),
+            # Term unification
+            (r'(=|\\=)', Operator),
+            # Term comparison
+            (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
+            # Evaluable functors
+            (r'(//|[-+*/])', Operator),
+            (r'\b(e|pi|div|mod|rem)\b', Operator),
+            # Other arithmetic functors
+            (r'\b\*\*\b', Operator),
+            # DCG rules
+            (r'-->', Operator),
+            # Control constructs
+            (r'([!;]|->)', Operator),
+            # Logic and control
+            (r'\\+', Operator),
+            # Mode operators
+            (r'[?@]', Operator),
+            # Existential quantifier
+            (r'\^', Operator),
+            # Punctuation
+            (r'[()\[\],.|]', Text),
+            # Atoms
+            (r"[a-z][a-zA-Z0-9_]*", Text),
+            (r"'", String, 'quoted_atom'),
+            # Double-quoted terms
+            (r'"', String, 'double_quoted_term'),
+        ],
+
+        'quoted_atom': [
+            (r"''", String),
+            (r"'", String, '#pop'),
+            (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+            (r"[^\\'\n]+", String),
+            (r'\\', String),
+        ],
+
+        'double_quoted_term': [
+            (r'""', String),
+            (r'"', String, '#pop'),
+            (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+            (r'[^\\"\n]+', String),
+            (r'\\', String),
+        ],
+
+        'directive': [
+            # Conditional compilation directives
+            (r'(el)?if(?=[(])', Keyword, 'root'),
+            (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
+            # Entity directives
+            (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+            (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
+            # Predicate scope directives
+            (r'(public|protected|private)(?=[(])', Keyword, 'root'),
+            # Other directives
+            (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
+            (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
+            (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
+            (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
+            (r'op(?=[(])', Keyword, 'root'),
+            (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
+            (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
+            (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
+        ],
+
+        'entityrelations': [
+            (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+            # Numbers
+            (r"0'[\\]?.", Number),
+            (r'0b[01]+', Number.Bin),
+            (r'0o[0-7]+', Number.Oct),
+            (r'0x[0-9a-fA-F]+', Number.Hex),
+            (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+            # Variables
+            (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+            # Atoms
+            (r"[a-z][a-zA-Z0-9_]*", Text),
+            (r"'", String, 'quoted_atom'),
+            # Double-quoted terms
+            (r'"', String, 'double_quoted_term'),
+            # End of entity-opening directive
+            (r'([)]\.)', Text, 'root'),
+            # Scope operator
+            (r'(::)', Operator),
+            # Punctuation
+            (r'[()\[\],.|]', Text),
+            # Comments
+            (r'%.*?\n', Comment),
+            (r'/\*(.|\n)*?\*/', Comment),
+            # Whitespace
+            (r'\n', Text),
+            (r'\s+', Text),
+        ]
+    }
+
+    def analyse_text(text):
+        if ':- object(' in text:
+            return 1.0
+        elif ':- protocol(' in text:
+            return 1.0
+        elif ':- category(' in text:
+            return 1.0
+        elif re.search(r'^:-\s[a-z]', text, re.M):
+            return 0.9
+        else:
+            return 0.0
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/promql.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/promql.py
new file mode 100644
index 00000000..cad3c254
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/promql.py
@@ -0,0 +1,176 @@
+"""
+    pygments.lexers.promql
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Prometheus Query Language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, default, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+    Punctuation, String, Whitespace
+
+__all__ = ["PromQLLexer"]
+
+
+class PromQLLexer(RegexLexer):
+    """
+    For PromQL queries.
+
+    For details about the grammar see:
+    https://github.com/prometheus/prometheus/tree/master/promql/parser
+
+    .. versionadded: 2.7
+    """
+
+    name = "PromQL"
+    url = 'https://prometheus.io/docs/prometheus/latest/querying/basics/'
+    aliases = ["promql"]
+    filenames = ["*.promql"]
+    version_added = ''
+
+    base_keywords = (
+        words(
+            (
+                "bool",
+                "by",
+                "group_left",
+                "group_right",
+                "ignoring",
+                "offset",
+                "on",
+                "without",
+            ),
+            suffix=r"\b",
+        ),
+        Keyword,
+    )
+
+    aggregator_keywords = (
+        words(
+            (
+                "sum",
+                "min",
+                "max",
+                "avg",
+                "group",
+                "stddev",
+                "stdvar",
+                "count",
+                "count_values",
+                "bottomk",
+                "topk",
+                "quantile",
+            ),
+            suffix=r"\b",
+        ),
+        Keyword,
+    )
+
+    function_keywords = (
+        words(
+            (
+                "abs",
+                "absent",
+                "absent_over_time",
+                "avg_over_time",
+                "ceil",
+                "changes",
+                "clamp_max",
+                "clamp_min",
+                "count_over_time",
+                "day_of_month",
+                "day_of_week",
+                "days_in_month",
+                "delta",
+                "deriv",
+                "exp",
+                "floor",
+                "histogram_quantile",
+                "holt_winters",
+                "hour",
+                "idelta",
+                "increase",
+                "irate",
+                "label_join",
+                "label_replace",
+                "ln",
+                "log10",
+                "log2",
+                "max_over_time",
+                "min_over_time",
+                "minute",
+                "month",
+                "predict_linear",
+                "quantile_over_time",
+                "rate",
+                "resets",
+                "round",
+                "scalar",
+                "sort",
+                "sort_desc",
+                "sqrt",
+                "stddev_over_time",
+                "stdvar_over_time",
+                "sum_over_time",
+                "time",
+                "timestamp",
+                "vector",
+                "year",
+            ),
+            suffix=r"\b",
+        ),
+        Keyword.Reserved,
+    )
+
+    tokens = {
+        "root": [
+            (r"\n", Whitespace),
+            (r"\s+", Whitespace),
+            (r",", Punctuation),
+            # Keywords
+            base_keywords,
+            aggregator_keywords,
+            function_keywords,
+            # Offsets
+            (r"[1-9][0-9]*[smhdwy]", String),
+            # Numbers
+            (r"-?[0-9]+\.[0-9]+", Number.Float),
+            (r"-?[0-9]+", Number.Integer),
+            # Comments
+            (r"#.*?$", Comment.Single),
+            # Operators
+            (r"(\+|\-|\*|\/|\%|\^)", Operator),
+            (r"==|!=|>=|<=|<|>", Operator),
+            (r"and|or|unless", Operator.Word),
+            # Metrics
+            (r"[_a-zA-Z][a-zA-Z0-9_]+", Name.Variable),
+            # Params
+            (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)),
+            # Other states
+            (r"\(", Operator, "function"),
+            (r"\)", Operator),
+            (r"\{", Punctuation, "labels"),
+            (r"\[", Punctuation, "range"),
+        ],
+        "labels": [
+            (r"\}", Punctuation, "#pop"),
+            (r"\n", Whitespace),
+            (r"\s+", Whitespace),
+            (r",", Punctuation),
+            (r'([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|\')(.*?)("|\')',
+             bygroups(Name.Label, Whitespace, Operator, Whitespace,
+                      Punctuation, String, Punctuation)),
+        ],
+        "range": [
+            (r"\]", Punctuation, "#pop"),
+            (r"[1-9][0-9]*[smhdwy]", String),
+        ],
+        "function": [
+            (r"\)", Operator, "#pop"),
+            (r"\(", Operator, "#push"),
+            default("#pop"),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/prql.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/prql.py
new file mode 100644
index 00000000..ee95d2d4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/prql.py
@@ -0,0 +1,251 @@
+"""
+    pygments.lexers.prql
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for the PRQL query language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, combined, words, include, bygroups
+from pygments.token import Comment, Literal, Keyword, Name, Number, Operator, \
+    Punctuation, String, Text, Whitespace
+
+__all__ = ['PrqlLexer']
+
+
+class PrqlLexer(RegexLexer):
+    """
+    For PRQL source code.
+
+    grammar: https://github.com/PRQL/prql/tree/main/grammars
+    """
+
+    name = 'PRQL'
+    url = 'https://prql-lang.org/'
+    aliases = ['prql']
+    filenames = ['*.prql']
+    mimetypes = ['application/prql', 'application/x-prql']
+    version_added = '2.17'
+
+    builtinTypes = words((
+        "bool",
+        "int",
+        "int8", "int16", "int32", "int64", "int128",
+        "float",
+        "text",
+        "set"), suffix=r'\b')
+
+    def innerstring_rules(ttype):
+        return [
+            # the new style '{}'.format(...) string formatting
+            (r'\{'
+             r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?'  # field name
+             r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
+             r'\}', String.Interpol),
+
+            (r'[^\\\'"%{\n]+', ttype),
+            (r'[\'"\\]', ttype),
+            (r'%|(\{{1,2})', ttype)
+        ]
+
+    def fstring_rules(ttype):
+        return [
+            (r'\}', String.Interpol),
+            (r'\{', String.Interpol, 'expr-inside-fstring'),
+            (r'[^\\\'"{}\n]+', ttype),
+            (r'[\'"\\]', ttype),
+        ]
+
+    tokens = {
+        'root': [
+
+            # Comments
+            (r'#!.*', String.Doc),
+            (r'#.*', Comment.Single),
+
+            # Whitespace
+            (r'\s+', Whitespace),
+
+            # Modules
+            (r'^(\s*)(module)(\s*)',
+             bygroups(Whitespace, Keyword.Namespace, Whitespace),
+             'imports'),
+
+            (builtinTypes, Keyword.Type),
+
+            # Main
+            (r'^prql ', Keyword.Reserved),
+
+            ('let', Keyword.Declaration),
+
+            include('keywords'),
+            include('expr'),
+
+            # Transforms
+            (r'^[A-Za-z_][a-zA-Z0-9_]*', Keyword),
+        ],
+        'expr': [
+            # non-raw f-strings
+            ('(f)(""")', bygroups(String.Affix, String.Double),
+             combined('fstringescape', 'tdqf')),
+            ("(f)(''')", bygroups(String.Affix, String.Single),
+             combined('fstringescape', 'tsqf')),
+            ('(f)(")', bygroups(String.Affix, String.Double),
+             combined('fstringescape', 'dqf')),
+            ("(f)(')", bygroups(String.Affix, String.Single),
+             combined('fstringescape', 'sqf')),
+
+            # non-raw s-strings
+            ('(s)(""")', bygroups(String.Affix, String.Double),
+             combined('stringescape', 'tdqf')),
+            ("(s)(''')", bygroups(String.Affix, String.Single),
+             combined('stringescape', 'tsqf')),
+            ('(s)(")', bygroups(String.Affix, String.Double),
+             combined('stringescape', 'dqf')),
+            ("(s)(')", bygroups(String.Affix, String.Single),
+             combined('stringescape', 'sqf')),
+
+            # raw strings
+            ('(?i)(r)(""")',
+             bygroups(String.Affix, String.Double), 'tdqs'),
+            ("(?i)(r)(''')",
+             bygroups(String.Affix, String.Single), 'tsqs'),
+            ('(?i)(r)(")',
+             bygroups(String.Affix, String.Double), 'dqs'),
+            ("(?i)(r)(')",
+             bygroups(String.Affix, String.Single), 'sqs'),
+
+            # non-raw strings
+            ('"""', String.Double, combined('stringescape', 'tdqs')),
+            ("'''", String.Single, combined('stringescape', 'tsqs')),
+            ('"', String.Double, combined('stringescape', 'dqs')),
+            ("'", String.Single, combined('stringescape', 'sqs')),
+
+            # Time and dates
+            (r'@\d{4}-\d{2}-\d{2}T\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?', Literal.Date),
+            (r'@\d{4}-\d{2}-\d{2}', Literal.Date),
+            (r'@\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?', Literal.Date),
+
+            (r'[^\S\n]+', Text),
+            include('numbers'),
+            (r'->|=>|==|!=|>=|<=|~=|&&|\|\||\?\?|\/\/', Operator),
+            (r'[-~+/*%=<>&^|.@]', Operator),
+            (r'[]{}:(),;[]', Punctuation),
+            include('functions'),
+
+            # Variable Names
+            (r'[A-Za-z_][a-zA-Z0-9_]*', Name.Variable),
+        ],
+        'numbers': [
+            (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
+             r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
+            (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
+            (r'0[oO](?:_?[0-7])+', Number.Oct),
+            (r'0[bB](?:_?[01])+', Number.Bin),
+            (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
+            (r'\d(?:_?\d)*', Number.Integer),
+        ],
+        'fstringescape': [
+            include('stringescape'),
+        ],
+        'bytesescape': [
+            (r'\\([\\bfnrt"\']|\n|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+        ],
+        'stringescape': [
+            (r'\\(N\{.*?\}|u\{[a-fA-F0-9]{1,6}\})', String.Escape),
+            include('bytesescape')
+        ],
+        'fstrings-single': fstring_rules(String.Single),
+        'fstrings-double': fstring_rules(String.Double),
+        'strings-single': innerstring_rules(String.Single),
+        'strings-double': innerstring_rules(String.Double),
+        'dqf': [
+            (r'"', String.Double, '#pop'),
+            (r'\\\\|\\"|\\\n', String.Escape),  # included here for raw strings
+            include('fstrings-double')
+        ],
+        'sqf': [
+            (r"'", String.Single, '#pop'),
+            (r"\\\\|\\'|\\\n", String.Escape),  # included here for raw strings
+            include('fstrings-single')
+        ],
+        'dqs': [
+            (r'"', String.Double, '#pop'),
+            (r'\\\\|\\"|\\\n', String.Escape),  # included here for raw strings
+            include('strings-double')
+        ],
+        'sqs': [
+            (r"'", String.Single, '#pop'),
+            (r"\\\\|\\'|\\\n", String.Escape),  # included here for raw strings
+            include('strings-single')
+        ],
+        'tdqf': [
+            (r'"""', String.Double, '#pop'),
+            include('fstrings-double'),
+            (r'\n', String.Double)
+        ],
+        'tsqf': [
+            (r"'''", String.Single, '#pop'),
+            include('fstrings-single'),
+            (r'\n', String.Single)
+        ],
+        'tdqs': [
+            (r'"""', String.Double, '#pop'),
+            include('strings-double'),
+            (r'\n', String.Double)
+        ],
+        'tsqs': [
+            (r"'''", String.Single, '#pop'),
+            include('strings-single'),
+            (r'\n', String.Single)
+        ],
+
+        'expr-inside-fstring': [
+            (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+            # without format specifier
+            (r'(=\s*)?'         # debug (https://bugs.python.org/issue36817)
+             r'\}', String.Interpol, '#pop'),
+            # with format specifier
+            # we'll catch the remaining '}' in the outer scope
+            (r'(=\s*)?'         # debug (https://bugs.python.org/issue36817)
+             r':', String.Interpol, '#pop'),
+            (r'\s+', Whitespace),  # allow new lines
+            include('expr'),
+        ],
+        'expr-inside-fstring-inner': [
+            (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+            (r'[])}]', Punctuation, '#pop'),
+            (r'\s+', Whitespace),  # allow new lines
+            include('expr'),
+        ],
+        'keywords': [
+            (words((
+                'into', 'case', 'type', 'module', 'internal',
+            ), suffix=r'\b'),
+                Keyword),
+            (words(('true', 'false', 'null'), suffix=r'\b'), Keyword.Constant),
+        ],
+        'functions': [
+            (words((
+                "min", "max", "sum", "average", "stddev", "every", "any",
+                "concat_array", "count", "lag", "lead", "first", "last",
+                "rank", "rank_dense", "row_number", "round", "as", "in",
+                "tuple_every", "tuple_map", "tuple_zip", "_eq", "_is_null",
+                "from_text", "lower", "upper", "read_parquet", "read_csv"),
+                suffix=r'\b'),
+             Name.Function),
+        ],
+
+        'comment': [
+            (r'-(?!\})', Comment.Multiline),
+            (r'\{-', Comment.Multiline, 'comment'),
+            (r'[^-}]', Comment.Multiline),
+            (r'-\}', Comment.Multiline, '#pop'),
+        ],
+
+        'imports': [
+            (r'\w+(\.\w+)*', Name.Class, '#pop'),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ptx.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ptx.py
new file mode 100644
index 00000000..784ca13a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ptx.py
@@ -0,0 +1,119 @@
+"""
+    pygments.lexers.ptx
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexer for other PTX language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Comment, Keyword, Name, String, Number, \
+    Punctuation, Whitespace, Operator
+
+__all__ = ["PtxLexer"]
+
+
+class PtxLexer(RegexLexer):
+    """
+    For NVIDIA `PTX `_
+    source.
+    """
+    name = 'PTX'
+    url = "https://docs.nvidia.com/cuda/parallel-thread-execution/"
+    filenames = ['*.ptx']
+    aliases = ['ptx']
+    mimetypes = ['text/x-ptx']
+    version_added = '2.16'
+
+    #: optional Comment or Whitespace
+    string = r'"[^"]*?"'
+    followsym = r'[a-zA-Z0-9_$]'
+    identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
+    block_label = r'(' + identifier + r'|(\d+))'
+
+    tokens = {
+        'root': [
+            include('whitespace'),
+
+            (block_label + r'\s*:', Name.Label),
+
+            include('keyword'),
+
+            (r'%' + identifier, Name.Variable),
+            (r'%\d+', Name.Variable.Anonymous),
+            (r'c?' + string, String),
+            (identifier, Name.Variable),
+            (r';', Punctuation),
+            (r'[*+-/]', Operator),
+
+            (r'0[xX][a-fA-F0-9]+', Number),
+            (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
+
+            (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
+
+        ],
+        'whitespace': [
+            (r'(\n|\s+)+', Whitespace),
+            (r'//.*?\n', Comment)
+        ],
+
+        'keyword': [
+            # Instruction keywords
+            (words((
+                'abs', 'discard', 'min', 'shf', 'vadd',
+                'activemask', 'div', 'mma', 'shfl', 'vadd2',
+                'add', 'dp2a', 'mov', 'shl', 'vadd4',
+                'addc', 'dp4a', 'movmatrix', 'shr', 'vavrg2',
+                'alloca', 'elect', 'mul', 'sin', 'vavrg4',
+                'and', 'ex2', 'mul24', 'slct', 'vmad',
+                'applypriority', 'exit', 'multimem', 'sqrt', 'vmax',
+                'atom', 'fence', 'nanosleep', 'st', 'vmax2',
+                'bar', 'fma', 'neg', 'stackrestore', 'vmax4',
+                'barrier', 'fns', 'not', 'stacksave', 'vmin',
+                'bfe', 'getctarank', 'or', 'stmatrix', 'vmin2',
+                'bfi', 'griddepcontrol', 'pmevent', 'sub', 'vmin4',
+                'bfind', 'isspacep', 'popc', 'subc', 'vote',
+                'bmsk', 'istypep', 'prefetch', 'suld', 'vset',
+                'bra', 'ld', 'prefetchu', 'suq', 'vset2',
+                'brev', 'ldmatrix', 'prmt', 'sured', 'vset4',
+                'brkpt', 'ldu', 'rcp', 'sust', 'vshl',
+                'brx', 'lg2', 'red', 'szext', 'vshr',
+                'call', 'lop3', 'redux', 'tanh', 'vsub',
+                'clz', 'mad', 'rem', 'testp', 'vsub2',
+                'cnot', 'mad24', 'ret', 'tex', 'vsub4',
+                'copysign', 'madc', 'rsqrt', 'tld4', 'wgmma',
+                'cos', 'mapa', 'sad', 'trap', 'wmma',
+                'cp', 'match', 'selp', 'txq', 'xor',
+                'createpolicy', 'max', 'set', 'vabsdiff', 'cvt',
+                'mbarrier', 'setmaxnreg', 'vabsdiff2', 'cvta',
+                'membar', 'setp', 'vabsdiff4')), Keyword),
+            # State Spaces and Suffixes
+            (words((
+                'reg', '.sreg', '.const', '.global',
+                '.local', '.param', '.shared', '.tex',
+                '.wide', '.loc'
+            )), Keyword.Pseudo),
+            # PTX Directives
+            (words((
+                '.address_size', '.explicitcluster', '.maxnreg', '.section',
+                '.alias', '.extern', '.maxntid', '.shared',
+                '.align', '.file', '.minnctapersm', '.sreg',
+                '.branchtargets', '.func', '.noreturn', '.target',
+                '.callprototype', '.global', '.param', '.tex',
+                '.calltargets', '.loc', '.pragma', '.version',
+                '.common', '.local', '.reg', '.visible',
+                '.const', '.maxclusterrank', '.reqnctapercluster', '.weak',
+                '.entry', '.maxnctapersm', '.reqntid')), Keyword.Reserved),
+            # Fundamental Types
+            (words((
+                '.s8', '.s16', '.s32', '.s64',
+                '.u8', '.u16', '.u32', '.u64',
+                '.f16', '.f16x2', '.f32', '.f64',
+                '.b8', '.b16', '.b32', '.b64',
+                '.pred'
+            )), Keyword.Type)
+        ],
+
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/python.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/python.py
new file mode 100644
index 00000000..805f6ff2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/python.py
@@ -0,0 +1,1201 @@
+"""
+    pygments.lexers.python
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Python and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import keyword
+
+from pygments.lexer import DelegatingLexer, RegexLexer, include, \
+    bygroups, using, default, words, combined, this
+from pygments.util import get_bool_opt, shebang_matches
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Generic, Other, Error, Whitespace
+from pygments import unistring as uni
+
+__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
+           'Python2Lexer', 'Python2TracebackLexer',
+           'CythonLexer', 'DgLexer', 'NumPyLexer']
+
+
+class PythonLexer(RegexLexer):
+    """
+    For Python source code (version 3.x).
+
+    .. versionchanged:: 2.5
+       This is now the default ``PythonLexer``.  It is still available as the
+       alias ``Python3Lexer``.
+    """
+
+    name = 'Python'
+    url = 'https://www.python.org'
+    aliases = ['python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark', 'pyi']
+    filenames = [
+        '*.py',
+        '*.pyw',
+        # Type stubs
+        '*.pyi',
+        # Jython
+        '*.jy',
+        # Sage
+        '*.sage',
+        # SCons
+        '*.sc',
+        'SConstruct',
+        'SConscript',
+        # Skylark/Starlark (used by Bazel, Buck, and Pants)
+        '*.bzl',
+        'BUCK',
+        'BUILD',
+        'BUILD.bazel',
+        'WORKSPACE',
+        # Twisted Application infrastructure
+        '*.tac',
+    ]
+    mimetypes = ['text/x-python', 'application/x-python',
+                 'text/x-python3', 'application/x-python3']
+    version_added = '0.10'
+
+    uni_name = f"[{uni.xid_start}][{uni.xid_continue}]*"
+
+    def innerstring_rules(ttype):
+        return [
+            # the old style '%s' % (...) string formatting (still valid in Py3)
+            (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+             '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
+            # the new style '{}'.format(...) string formatting
+            (r'\{'
+             r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?'  # field name
+             r'(\![sra])?'                       # conversion
+             r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
+             r'\}', String.Interpol),
+
+            # backslashes, quotes and formatting signs must be parsed one at a time
+            (r'[^\\\'"%{\n]+', ttype),
+            (r'[\'"\\]', ttype),
+            # unhandled string formatting sign
+            (r'%|(\{{1,2})', ttype)
+            # newlines are an error (use "nl" state)
+        ]
+
+    def fstring_rules(ttype):
+        return [
+            # Assuming that a '}' is the closing brace after format specifier.
+            # Sadly, this means that we won't detect syntax error. But it's
+            # more important to parse correct syntax correctly, than to
+            # highlight invalid syntax.
+            (r'\}', String.Interpol),
+            (r'\{', String.Interpol, 'expr-inside-fstring'),
+            # backslashes, quotes and formatting signs must be parsed one at a time
+            (r'[^\\\'"{}\n]+', ttype),
+            (r'[\'"\\]', ttype),
+            # newlines are an error (use "nl" state)
+        ]
+
+    tokens = {
+        'root': [
+            (r'\n', Whitespace),
+            (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
+             bygroups(Whitespace, String.Affix, String.Doc)),
+            (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
+             bygroups(Whitespace, String.Affix, String.Doc)),
+            (r'\A#!.+$', Comment.Hashbang),
+            (r'#.*$', Comment.Single),
+            (r'\\\n', Text),
+            (r'\\', Text),
+            include('keywords'),
+            include('soft-keywords'),
+            (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'funcname'),
+            (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'classname'),
+            (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
+             'fromimport'),
+            (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
+             'import'),
+            include('expr'),
+        ],
+        'expr': [
+            # raw f-strings
+            ('(?i)(rf|fr)(""")',
+             bygroups(String.Affix, String.Double),
+             combined('rfstringescape', 'tdqf')),
+            ("(?i)(rf|fr)(''')",
+             bygroups(String.Affix, String.Single),
+             combined('rfstringescape', 'tsqf')),
+            ('(?i)(rf|fr)(")',
+             bygroups(String.Affix, String.Double),
+             combined('rfstringescape', 'dqf')),
+            ("(?i)(rf|fr)(')",
+             bygroups(String.Affix, String.Single),
+             combined('rfstringescape', 'sqf')),
+            # non-raw f-strings
+            ('([fF])(""")', bygroups(String.Affix, String.Double),
+             combined('fstringescape', 'tdqf')),
+            ("([fF])(''')", bygroups(String.Affix, String.Single),
+             combined('fstringescape', 'tsqf')),
+            ('([fF])(")', bygroups(String.Affix, String.Double),
+             combined('fstringescape', 'dqf')),
+            ("([fF])(')", bygroups(String.Affix, String.Single),
+             combined('fstringescape', 'sqf')),
+            # raw bytes and strings
+            ('(?i)(rb|br|r)(""")',
+             bygroups(String.Affix, String.Double), 'tdqs'),
+            ("(?i)(rb|br|r)(''')",
+             bygroups(String.Affix, String.Single), 'tsqs'),
+            ('(?i)(rb|br|r)(")',
+             bygroups(String.Affix, String.Double), 'dqs'),
+            ("(?i)(rb|br|r)(')",
+             bygroups(String.Affix, String.Single), 'sqs'),
+            # non-raw strings
+            ('([uU]?)(""")', bygroups(String.Affix, String.Double),
+             combined('stringescape', 'tdqs')),
+            ("([uU]?)(''')", bygroups(String.Affix, String.Single),
+             combined('stringescape', 'tsqs')),
+            ('([uU]?)(")', bygroups(String.Affix, String.Double),
+             combined('stringescape', 'dqs')),
+            ("([uU]?)(')", bygroups(String.Affix, String.Single),
+             combined('stringescape', 'sqs')),
+            # non-raw bytes
+            ('([bB])(""")', bygroups(String.Affix, String.Double),
+             combined('bytesescape', 'tdqs')),
+            ("([bB])(''')", bygroups(String.Affix, String.Single),
+             combined('bytesescape', 'tsqs')),
+            ('([bB])(")', bygroups(String.Affix, String.Double),
+             combined('bytesescape', 'dqs')),
+            ("([bB])(')", bygroups(String.Affix, String.Single),
+             combined('bytesescape', 'sqs')),
+
+            (r'[^\S\n]+', Text),
+            include('numbers'),
+            (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator),
+            (r'[]{}:(),;[]', Punctuation),
+            (r'(in|is|and|or|not)\b', Operator.Word),
+            include('expr-keywords'),
+            include('builtins'),
+            include('magicfuncs'),
+            include('magicvars'),
+            include('name'),
+        ],
+        'expr-inside-fstring': [
+            (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+            # without format specifier
+            (r'(=\s*)?'         # debug (https://bugs.python.org/issue36817)
+             r'(\![sraf])?'     # conversion
+             r'\}', String.Interpol, '#pop'),
+            # with format specifier
+            # we'll catch the remaining '}' in the outer scope
+            (r'(=\s*)?'         # debug (https://bugs.python.org/issue36817)
+             r'(\![sraf])?'     # conversion
+             r':', String.Interpol, '#pop'),
+            (r'\s+', Whitespace),  # allow new lines
+            include('expr'),
+        ],
+        'expr-inside-fstring-inner': [
+            (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+            (r'[])}]', Punctuation, '#pop'),
+            (r'\s+', Whitespace),  # allow new lines
+            include('expr'),
+        ],
+        'expr-keywords': [
+            # Based on https://docs.python.org/3/reference/expressions.html
+            (words((
+                'async for', 'await', 'else', 'for', 'if', 'lambda',
+                'yield', 'yield from'), suffix=r'\b'),
+             Keyword),
+            (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
+        ],
+        'keywords': [
+            (words((
+                'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
+                'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
+                'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
+                'yield from', 'as', 'with'), suffix=r'\b'),
+             Keyword),
+            (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
+        ],
+        'soft-keywords': [
+            # `match`, `case` and `_` soft keywords
+            (r'(^[ \t]*)'              # at beginning of line + possible indentation
+             r'(match|case)\b'         # a possible keyword
+             r'(?![ \t]*(?:'           # not followed by...
+             r'[:,;=^&|@~)\]}]|(?:' +  # characters and keywords that mean this isn't
+                                       # pattern matching (but None/True/False is ok)
+             r'|'.join(k for k in keyword.kwlist if k[0].islower()) + r')\b))',
+             bygroups(Text, Keyword), 'soft-keywords-inner'),
+        ],
+        'soft-keywords-inner': [
+            # optional `_` keyword
+            (r'(\s+)([^\n_]*)(_\b)', bygroups(Whitespace, using(this), Keyword)),
+            default('#pop')
+        ],
+        'builtins': [
+            (words((
+                '__import__', 'abs', 'aiter', 'all', 'any', 'bin', 'bool', 'bytearray',
+                'breakpoint', 'bytes', 'callable', 'chr', 'classmethod', 'compile',
+                'complex', 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval',
+                'filter', 'float', 'format', 'frozenset', 'getattr', 'globals',
+                'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'isinstance',
+                'issubclass', 'iter', 'len', 'list', 'locals', 'map', 'max',
+                'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow',
+                'print', 'property', 'range', 'repr', 'reversed', 'round', 'set',
+                'setattr', 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super',
+                'tuple', 'type', 'vars', 'zip'), prefix=r'(?>|[-~+/*%=<>&^|.]', Operator),
+            include('keywords'),
+            (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'funcname'),
+            (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'classname'),
+            (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
+             'fromimport'),
+            (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
+             'import'),
+            include('builtins'),
+            include('magicfuncs'),
+            include('magicvars'),
+            include('backtick'),
+            ('([rR]|[uUbB][rR]|[rR][uUbB])(""")',
+             bygroups(String.Affix, String.Double), 'tdqs'),
+            ("([rR]|[uUbB][rR]|[rR][uUbB])(''')",
+             bygroups(String.Affix, String.Single), 'tsqs'),
+            ('([rR]|[uUbB][rR]|[rR][uUbB])(")',
+             bygroups(String.Affix, String.Double), 'dqs'),
+            ("([rR]|[uUbB][rR]|[rR][uUbB])(')",
+             bygroups(String.Affix, String.Single), 'sqs'),
+            ('([uUbB]?)(""")', bygroups(String.Affix, String.Double),
+             combined('stringescape', 'tdqs')),
+            ("([uUbB]?)(''')", bygroups(String.Affix, String.Single),
+             combined('stringescape', 'tsqs')),
+            ('([uUbB]?)(")', bygroups(String.Affix, String.Double),
+             combined('stringescape', 'dqs')),
+            ("([uUbB]?)(')", bygroups(String.Affix, String.Single),
+             combined('stringescape', 'sqs')),
+            include('name'),
+            include('numbers'),
+        ],
+        'keywords': [
+            (words((
+                'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
+                'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
+                'print', 'raise', 'return', 'try', 'while', 'yield',
+                'yield from', 'as', 'with'), suffix=r'\b'),
+             Keyword),
+        ],
+        'builtins': [
+            (words((
+                '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+                'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
+                'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
+                'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
+                'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
+                'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
+                'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
+                'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
+                'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
+                'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
+                'unichr', 'unicode', 'vars', 'xrange', 'zip'),
+                prefix=r'(?>> )(.*\n)', bygroups(Generic.Prompt, Other.Code), 'continuations'),
+            # This happens, e.g., when tracebacks are embedded in documentation;
+            # trailing whitespaces are often stripped in such contexts.
+            (r'(>>>)(\n)', bygroups(Generic.Prompt, Whitespace)),
+            (r'(\^C)?Traceback \(most recent call last\):\n', Other.Traceback, 'traceback'),
+            # SyntaxError starts with this
+            (r'  File "[^"]+", line \d+', Other.Traceback, 'traceback'),
+            (r'.*\n', Generic.Output),
+        ],
+        'continuations': [
+            (r'(\.\.\. )(.*\n)', bygroups(Generic.Prompt, Other.Code)),
+            # See above.
+            (r'(\.\.\.)(\n)', bygroups(Generic.Prompt, Whitespace)),
+            default('#pop'),
+        ],
+        'traceback': [
+            # As soon as we see a traceback, consume everything until the next
+            # >>> prompt.
+            (r'(?=>>>( |$))', Text, '#pop'),
+            (r'(KeyboardInterrupt)(\n)', bygroups(Name.Class, Whitespace)),
+            (r'.*\n', Other.Traceback),
+        ],
+    }
+
+
+class PythonConsoleLexer(DelegatingLexer):
+    """
+    For Python console output or doctests, such as:
+
+    .. sourcecode:: pycon
+
+        >>> a = 'foo'
+        >>> print(a)
+        foo
+        >>> 1 / 0
+        Traceback (most recent call last):
+          File "", line 1, in 
+        ZeroDivisionError: integer division or modulo by zero
+
+    Additional options:
+
+    `python3`
+        Use Python 3 lexer for code.  Default is ``True``.
+
+        .. versionadded:: 1.0
+        .. versionchanged:: 2.5
+           Now defaults to ``True``.
+    """
+
+    name = 'Python console session'
+    aliases = ['pycon', 'python-console']
+    mimetypes = ['text/x-python-doctest']
+    url = 'https://python.org'
+    version_added = ''
+
+    def __init__(self, **options):
+        python3 = get_bool_opt(options, 'python3', True)
+        if python3:
+            pylexer = PythonLexer
+            tblexer = PythonTracebackLexer
+        else:
+            pylexer = Python2Lexer
+            tblexer = Python2TracebackLexer
+        # We have two auxiliary lexers. Use DelegatingLexer twice with
+        # different tokens.  TODO: DelegatingLexer should support this
+        # directly, by accepting a tuplet of auxiliary lexers and a tuple of
+        # distinguishing tokens. Then we wouldn't need this intermediary
+        # class.
+        class _ReplaceInnerCode(DelegatingLexer):
+            def __init__(self, **options):
+                super().__init__(pylexer, _PythonConsoleLexerBase, Other.Code, **options)
+        super().__init__(tblexer, _ReplaceInnerCode, Other.Traceback, **options)
+
+
+class PythonTracebackLexer(RegexLexer):
+    """
+    For Python 3.x tracebacks, with support for chained exceptions.
+
+    .. versionchanged:: 2.5
+       This is now the default ``PythonTracebackLexer``.  It is still available
+       as the alias ``Python3TracebackLexer``.
+    """
+
+    name = 'Python Traceback'
+    aliases = ['pytb', 'py3tb']
+    filenames = ['*.pytb', '*.py3tb']
+    mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
+    url = 'https://python.org'
+    version_added = '1.0'
+
+    tokens = {
+        'root': [
+            (r'\n', Whitespace),
+            (r'^(\^C)?Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
+            (r'^During handling of the above exception, another '
+             r'exception occurred:\n\n', Generic.Traceback),
+            (r'^The above exception was the direct cause of the '
+             r'following exception:\n\n', Generic.Traceback),
+            (r'^(?=  File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+            (r'^.*\n', Other),
+        ],
+        'intb': [
+            (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
+            (r'^(  File )("[^"]+")(, line )(\d+)(\n)',
+             bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
+            (r'^(    )(.+)(\n)',
+             bygroups(Whitespace, using(PythonLexer), Whitespace), 'markers'),
+            (r'^([ \t]*)(\.\.\.)(\n)',
+             bygroups(Whitespace, Comment, Whitespace)),  # for doctests...
+            (r'^([^:]+)(: )(.+)(\n)',
+             bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
+            (r'^([a-zA-Z_][\w.]*)(:?\n)',
+             bygroups(Generic.Error, Whitespace), '#pop'),
+            default('#pop'),
+        ],
+        'markers': [
+            # Either `PEP 657 `
+            # error locations in Python 3.11+, or single-caret markers
+            # for syntax errors before that.
+            (r'^( {4,})([~^]+)(\n)',
+             bygroups(Whitespace, Punctuation.Marker, Whitespace),
+             '#pop'),
+            default('#pop'),
+        ],
+    }
+
+
+Python3TracebackLexer = PythonTracebackLexer
+
+
+class Python2TracebackLexer(RegexLexer):
+    """
+    For Python tracebacks.
+
+    .. versionchanged:: 2.5
+       This class has been renamed from ``PythonTracebackLexer``.
+       ``PythonTracebackLexer`` now refers to the Python 3 variant.
+    """
+
+    name = 'Python 2.x Traceback'
+    aliases = ['py2tb']
+    filenames = ['*.py2tb']
+    mimetypes = ['text/x-python2-traceback']
+    url = 'https://python.org'
+    version_added = '0.7'
+
+    tokens = {
+        'root': [
+            # Cover both (most recent call last) and (innermost last)
+            # The optional ^C allows us to catch keyboard interrupt signals.
+            (r'^(\^C)?(Traceback.*\n)',
+             bygroups(Text, Generic.Traceback), 'intb'),
+            # SyntaxError starts with this.
+            (r'^(?=  File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+            (r'^.*\n', Other),
+        ],
+        'intb': [
+            (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
+            (r'^(  File )("[^"]+")(, line )(\d+)(\n)',
+             bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
+            (r'^(    )(.+)(\n)',
+             bygroups(Text, using(Python2Lexer), Whitespace), 'marker'),
+            (r'^([ \t]*)(\.\.\.)(\n)',
+             bygroups(Text, Comment, Whitespace)),  # for doctests...
+            (r'^([^:]+)(: )(.+)(\n)',
+             bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
+            (r'^([a-zA-Z_]\w*)(:?\n)',
+             bygroups(Generic.Error, Whitespace), '#pop')
+        ],
+        'marker': [
+            # For syntax errors.
+            (r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
+            default('#pop'),
+        ],
+    }
+
+
+class CythonLexer(RegexLexer):
+    """
+    For Pyrex and Cython source code.
+    """
+
+    name = 'Cython'
+    url = 'https://cython.org'
+    aliases = ['cython', 'pyx', 'pyrex']
+    filenames = ['*.pyx', '*.pxd', '*.pxi']
+    mimetypes = ['text/x-cython', 'application/x-cython']
+    version_added = '1.1'
+
+    tokens = {
+        'root': [
+            (r'\n', Whitespace),
+            (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Whitespace, String.Doc)),
+            (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Whitespace, String.Doc)),
+            (r'[^\S\n]+', Text),
+            (r'#.*$', Comment),
+            (r'[]{}:(),;[]', Punctuation),
+            (r'\\\n', Whitespace),
+            (r'\\', Text),
+            (r'(in|is|and|or|not)\b', Operator.Word),
+            (r'(<)([a-zA-Z0-9.?]+)(>)',
+             bygroups(Punctuation, Keyword.Type, Punctuation)),
+            (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
+            (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
+             bygroups(Keyword, Number.Integer, Operator, Whitespace, Operator,
+                      Name, Punctuation)),
+            include('keywords'),
+            (r'(def|property)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
+            (r'(cp?def)(\s+)', bygroups(Keyword, Whitespace), 'cdef'),
+            # (should actually start a block with only cdefs)
+            (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
+            (r'(class|struct)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
+            (r'(from)(\s+)', bygroups(Keyword, Whitespace), 'fromimport'),
+            (r'(c?import)(\s+)', bygroups(Keyword, Whitespace), 'import'),
+            include('builtins'),
+            include('backtick'),
+            ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
+            ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
+            ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
+            ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
+            ('[uU]?"""', String, combined('stringescape', 'tdqs')),
+            ("[uU]?'''", String, combined('stringescape', 'tsqs')),
+            ('[uU]?"', String, combined('stringescape', 'dqs')),
+            ("[uU]?'", String, combined('stringescape', 'sqs')),
+            include('name'),
+            include('numbers'),
+        ],
+        'keywords': [
+            (words((
+                'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
+                'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
+                'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
+                'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
+             Keyword),
+            (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
+        ],
+        'builtins': [
+            (words((
+                '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
+                'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
+                'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
+                'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
+                'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
+                'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
+                'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
+                'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
+                'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
+                'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
+                'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
+                'vars', 'xrange', 'zip'), prefix=r'(??/\\:']?:)(\s*)(\{)",
+             bygroups(Name.Function, Whitespace, Operator, Whitespace, Punctuation),
+             "functions"),
+            # Variable Names
+            (r"([.]?[a-zA-Z][\w.]*)(\s*)([-.~=!@#$%^&*_+|,<>?/\\:']?:)",
+             bygroups(Name.Variable, Whitespace, Operator)),
+            # Functions
+            (r"\{", Punctuation, "functions"),
+            # Parentheses
+            (r"\(", Punctuation, "parentheses"),
+            # Brackets
+            (r"\[", Punctuation, "brackets"),
+            # Errors
+            (r"'`([a-zA-Z][\w.]*)?", Name.Exception),
+            # File Symbols
+            (r"`:([a-zA-Z/][\w./]*)?", String.Symbol),
+            # Symbols
+            (r"`([a-zA-Z][\w.]*)?", String.Symbol),
+            # Numbers
+            include("numbers"),
+            # Variable Names
+            (r"[a-zA-Z][\w.]*", Name),
+            # Operators
+            (r"[-=+*#$%@!~^&:.,<>'\\|/?_]", Operator),
+            # Punctuation
+            (r";", Punctuation),
+        ],
+        "functions": [
+            include("root"),
+            (r"\}", Punctuation, "#pop"),
+        ],
+        "parentheses": [
+            include("root"),
+            (r"\)", Punctuation, "#pop"),
+        ],
+        "brackets": [
+            include("root"),
+            (r"\]", Punctuation, "#pop"),
+        ],
+        "numbers": [
+            # Binary Values
+            (r"[01]+b", Number.Bin),
+            # Nulls/Infinities
+            (r"0[nNwW][cefghijmndzuvtp]?", Number),
+            # Timestamps
+            ((r"(?:[0-9]{4}[.][0-9]{2}[.][0-9]{2}|[0-9]+)"
+              "D(?:[0-9](?:[0-9](?::[0-9]{2}"
+              "(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)?"), Literal.Date),
+            # Datetimes
+            ((r"[0-9]{4}[.][0-9]{2}"
+              "(?:m|[.][0-9]{2}(?:T(?:[0-9]{2}:[0-9]{2}"
+              "(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)"), Literal.Date),
+            # Times
+            (r"[0-9]{2}:[0-9]{2}(?::[0-9]{2}(?:[.][0-9]{1,3})?)?",
+             Literal.Date),
+            # GUIDs
+            (r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}",
+             Number.Hex),
+            # Byte Vectors
+            (r"0x[0-9a-fA-F]+", Number.Hex),
+            # Floats
+            (r"([0-9]*[.]?[0-9]+|[0-9]+[.]?[0-9]*)[eE][+-]?[0-9]+[ef]?",
+             Number.Float),
+            (r"([0-9]*[.][0-9]+|[0-9]+[.][0-9]*)[ef]?", Number.Float),
+            (r"[0-9]+[ef]", Number.Float),
+            # Characters
+            (r"[0-9]+c", Number),
+            # Integers
+            (r"[0-9]+[ihtuv]", Number.Integer),
+            # Long Integers
+            (r"[0-9]+[jnp]?", Number.Integer.Long),
+        ],
+        "comments": [
+            (r"[^\\]+", Comment.Multiline),
+            (r"^\\", Comment.Multiline, "#pop"),
+            (r"\\", Comment.Multiline),
+        ],
+        "strings": [
+            (r'[^"\\]+', String.Double),
+            (r"\\.", String.Escape),
+            (r'"', String.Double, "#pop"),
+        ],
+    }
+
+
+class QLexer(KLexer):
+    """
+    For `Q `_ source code.
+    """
+
+    name = "Q"
+    aliases = ["q"]
+    filenames = ["*.q"]
+    version_added = '2.12'
+
+    tokens = {
+        "root": [
+            (words(("aj", "aj0", "ajf", "ajf0", "all", "and", "any", "asc",
+                    "asof", "attr", "avgs", "ceiling", "cols", "count", "cross",
+                    "csv", "cut", "deltas", "desc", "differ", "distinct", "dsave",
+                    "each", "ej", "ema", "eval", "except", "fby", "fills", "first",
+                    "fkeys", "flip", "floor", "get", "group", "gtime", "hclose",
+                    "hcount", "hdel", "hsym", "iasc", "idesc", "ij", "ijf",
+                    "inter", "inv", "key", "keys", "lj", "ljf", "load", "lower",
+                    "lsq", "ltime", "ltrim", "mavg", "maxs", "mcount", "md5",
+                    "mdev", "med", "meta", "mins", "mmax", "mmin", "mmu", "mod",
+                    "msum", "neg", "next", "not", "null", "or", "over", "parse",
+                    "peach", "pj", "prds", "prior", "prev", "rand", "rank", "ratios",
+                    "raze", "read0", "read1", "reciprocal", "reval", "reverse",
+                    "rload", "rotate", "rsave", "rtrim", "save", "scan", "scov",
+                    "sdev", "set", "show", "signum", "ssr", "string", "sublist",
+                    "sums", "sv", "svar", "system", "tables", "til", "trim", "txf",
+                    "type", "uj", "ujf", "ungroup", "union", "upper", "upsert",
+                    "value", "view", "views", "vs", "where", "wj", "wj1", "ww",
+                    "xasc", "xbar", "xcol", "xcols", "xdesc", "xgroup", "xkey",
+                    "xlog", "xprev", "xrank"),
+                    suffix=r"\b"), Name.Builtin,
+            ),
+            inherit,
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/qlik.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/qlik.py
new file mode 100644
index 00000000..a29f89f3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/qlik.py
@@ -0,0 +1,117 @@
+"""
+    pygments.lexers.qlik
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for the qlik scripting language
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+    Punctuation, String, Text
+from pygments.lexers._qlik_builtins import OPERATORS_LIST, STATEMENT_LIST, \
+    SCRIPT_FUNCTIONS, CONSTANT_LIST
+
+__all__ = ["QlikLexer"]
+
+
+class QlikLexer(RegexLexer):
+    """
+    Lexer for qlik code, including .qvs files
+    """
+
+    name = "Qlik"
+    aliases = ["qlik", "qlikview", "qliksense", "qlikscript"]
+    filenames = ["*.qvs", "*.qvw"]
+    url = "https://qlik.com"
+    version_added = '2.12'
+
+    flags = re.IGNORECASE
+
+    tokens = {
+        # Handle multi-line comments
+        "comment": [
+            (r"\*/", Comment.Multiline, "#pop"),
+            (r"[^*]+", Comment.Multiline),
+        ],
+        # Handle numbers
+        "numerics": [
+            (r"\b\d+\.\d+(e\d+)?[fd]?\b", Number.Float),
+            (r"\b\d+\b", Number.Integer),
+        ],
+        # Handle variable names in things
+        "interp": [
+            (
+                r"(\$\()(\w+)(\))",
+                bygroups(String.Interpol, Name.Variable, String.Interpol),
+            ),
+        ],
+        # Handle strings
+        "string": [
+            (r"'", String, "#pop"),
+            include("interp"),
+            (r"[^'$]+", String),
+            (r"\$", String),
+        ],
+        #
+        "assignment": [
+            (r";", Punctuation, "#pop"),
+            include("root"),
+        ],
+        "field_name_quote": [
+            (r'"', String.Symbol, "#pop"),
+            include("interp"),
+            (r"[^\"$]+", String.Symbol),
+            (r"\$", String.Symbol),
+        ],
+        "field_name_bracket": [
+            (r"\]", String.Symbol, "#pop"),
+            include("interp"),
+            (r"[^\]$]+", String.Symbol),
+            (r"\$", String.Symbol),
+        ],
+        "function": [(r"\)", Punctuation, "#pop"), include("root")],
+        "root": [
+            # Whitespace and comments
+            (r"\s+", Text.Whitespace),
+            (r"/\*", Comment.Multiline, "comment"),
+            (r"//.*\n", Comment.Single),
+            # variable assignment
+            (r"(let|set)(\s+)", bygroups(Keyword.Declaration, Text.Whitespace),
+             "assignment"),
+            # Word operators
+            (words(OPERATORS_LIST["words"], prefix=r"\b", suffix=r"\b"),
+             Operator.Word),
+            # Statements
+            (words(STATEMENT_LIST, suffix=r"\b"), Keyword),
+            # Table names
+            (r"[a-z]\w*:", Keyword.Declaration),
+            # Constants
+            (words(CONSTANT_LIST, suffix=r"\b"), Keyword.Constant),
+            # Functions
+            (words(SCRIPT_FUNCTIONS, suffix=r"(?=\s*\()"), Name.Builtin,
+             "function"),
+            # interpolation - e.g. $(variableName)
+            include("interp"),
+            # Quotes denote a field/file name
+            (r'"', String.Symbol, "field_name_quote"),
+            # Square brackets denote a field/file name
+            (r"\[", String.Symbol, "field_name_bracket"),
+            # Strings
+            (r"'", String, "string"),
+            # Numbers
+            include("numerics"),
+            # Operator symbols
+            (words(OPERATORS_LIST["symbols"]), Operator),
+            # Strings denoted by single quotes
+            (r"'.+?'", String),
+            # Words as text
+            (r"\b\w+\b", Text),
+            # Basic punctuation
+            (r"[,;.()\\/]", Punctuation),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/qvt.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/qvt.py
new file mode 100644
index 00000000..302d1b6e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/qvt.py
@@ -0,0 +1,153 @@
+"""
+    pygments.lexers.qvt
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexer for QVT Operational language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include, combined, default, \
+    words
+from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \
+    Name, String, Number
+
+__all__ = ['QVToLexer']
+
+
+class QVToLexer(RegexLexer):
+    """
+    For the QVT Operational Mapping language.
+
+    Reference for implementing this: «Meta Object Facility (MOF) 2.0
+    Query/View/Transformation Specification», Version 1.1 - January 2011
+    (https://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
+    particular.
+
+    Notable tokens assignments:
+
+    - Name.Class is assigned to the identifier following any of the following
+      keywords: metamodel, class, exception, primitive, enum, transformation
+      or library
+
+    - Name.Function is assigned to the names of mappings and queries
+
+    - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this',
+      'self' and 'result'.
+    """
+    # With obvious borrowings & inspiration from the Java, Python and C lexers
+
+    name = 'QVTO'
+    aliases = ['qvto', 'qvt']
+    filenames = ['*.qvto']
+    url = 'https://www.omg.org/spec/QVT/1.1'
+    version_added = ''
+
+    tokens = {
+        'root': [
+            (r'\n', Text),
+            (r'[^\S\n]+', Text),
+            (r'(--|//)(\s*)(directive:)?(.*)$',
+             bygroups(Comment, Comment, Comment.Preproc, Comment)),
+            # Uncomment the following if you want to distinguish between
+            # '/*' and '/**', à la javadoc
+            # (r'/[*]{2}(.|\n)*?[*]/', Comment.Multiline),
+            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+            (r'\\\n', Text),
+            (r'(and|not|or|xor|##?)\b', Operator.Word),
+            (r'(:{1,2}=|[-+]=)\b', Operator.Word),
+            (r'(@|<<|>>)\b', Keyword),  # stereotypes
+            (r'!=|<>|==|=|!->|->|>=|<=|[.]{3}|[+/*%=<>&|.~]', Operator),
+            (r'[]{}:(),;[]', Punctuation),
+            (r'(true|false|unlimited|null)\b', Keyword.Constant),
+            (r'(this|self|result)\b', Name.Builtin.Pseudo),
+            (r'(var)\b', Keyword.Declaration),
+            (r'(from|import)\b', Keyword.Namespace, 'fromimport'),
+            (r'(metamodel|class|exception|primitive|enum|transformation|'
+             r'library)(\s+)(\w+)',
+             bygroups(Keyword.Word, Text, Name.Class)),
+            (r'(exception)(\s+)(\w+)',
+             bygroups(Keyword.Word, Text, Name.Exception)),
+            (r'(main)\b', Name.Function),
+            (r'(mapping|helper|query)(\s+)',
+             bygroups(Keyword.Declaration, Text), 'operation'),
+            (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'),
+            (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b',
+             Keyword.Type),
+            include('keywords'),
+            ('"', String, combined('stringescape', 'dqs')),
+            ("'", String, combined('stringescape', 'sqs')),
+            include('name'),
+            include('numbers'),
+            # (r'([a-zA-Z_]\w*)(::)([a-zA-Z_]\w*)',
+            # bygroups(Text, Text, Text)),
+        ],
+
+        'fromimport': [
+            (r'(?:[ \t]|\\\n)+', Text),
+            (r'[a-zA-Z_][\w.]*', Name.Namespace),
+            default('#pop'),
+        ],
+
+        'operation': [
+            (r'::', Text),
+            (r'(.*::)([a-zA-Z_]\w*)([ \t]*)(\()',
+             bygroups(Text, Name.Function, Text, Punctuation), '#pop')
+        ],
+
+        'assert': [
+            (r'(warning|error|fatal)\b', Keyword, '#pop'),
+            default('#pop'),  # all else: go back
+        ],
+
+        'keywords': [
+            (words((
+                'abstract', 'access', 'any', 'assert', 'blackbox', 'break',
+                'case', 'collect', 'collectNested', 'collectOne', 'collectselect',
+                'collectselectOne', 'composes', 'compute', 'configuration',
+                'constructor', 'continue', 'datatype', 'default', 'derived',
+                'disjuncts', 'do', 'elif', 'else', 'end', 'endif', 'except',
+                'exists', 'extends', 'forAll', 'forEach', 'forOne', 'from', 'if',
+                'implies', 'in', 'inherits', 'init', 'inout', 'intermediate',
+                'invresolve', 'invresolveIn', 'invresolveone', 'invresolveoneIn',
+                'isUnique', 'iterate', 'late', 'let', 'literal', 'log', 'map',
+                'merges', 'modeltype', 'new', 'object', 'one', 'ordered', 'out',
+                'package', 'population', 'property', 'raise', 'readonly',
+                'references', 'refines', 'reject', 'resolve', 'resolveIn',
+                'resolveone', 'resolveoneIn', 'return', 'select', 'selectOne',
+                'sortedBy', 'static', 'switch', 'tag', 'then', 'try', 'typedef',
+                'unlimited', 'uses', 'when', 'where', 'while', 'with', 'xcollect',
+                'xmap', 'xselect'), suffix=r'\b'), Keyword),
+        ],
+
+        # There is no need to distinguish between String.Single and
+        # String.Double: 'strings' is factorised for 'dqs' and 'sqs'
+        'strings': [
+            (r'[^\\\'"\n]+', String),
+            # quotes, percents and backslashes must be parsed one at a time
+            (r'[\'"\\]', String),
+        ],
+        'stringescape': [
+            (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
+        ],
+        'dqs': [  # double-quoted string
+            (r'"', String, '#pop'),
+            (r'\\\\|\\"', String.Escape),
+            include('strings')
+        ],
+        'sqs': [  # single-quoted string
+            (r"'", String, '#pop'),
+            (r"\\\\|\\'", String.Escape),
+            include('strings')
+        ],
+        'name': [
+            (r'[a-zA-Z_]\w*', Name),
+        ],
+        # numbers: excerpt taken from the python lexer
+        'numbers': [
+            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+            (r'\d+[eE][+-]?[0-9]+', Number.Float),
+            (r'\d+', Number.Integer)
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/r.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/r.py
new file mode 100644
index 00000000..d3f65ba2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/r.py
@@ -0,0 +1,196 @@
+"""
+    pygments.lexers.r
+    ~~~~~~~~~~~~~~~~~
+
+    Lexers for the R/S languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Generic, Whitespace
+
+__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
+
+
+line_re  = re.compile('.*?\n')
+
+
+class RConsoleLexer(Lexer):
+    """
+    For R console transcripts or R CMD BATCH output files.
+    """
+
+    name = 'RConsole'
+    aliases = ['rconsole', 'rout']
+    filenames = ['*.Rout']
+    url = 'https://www.r-project.org'
+    version_added = ''
+    _example = "rconsole/r-console-transcript.Rout"
+
+    def get_tokens_unprocessed(self, text):
+        slexer = SLexer(**self.options)
+
+        current_code_block = ''
+        insertions = []
+
+        for match in line_re.finditer(text):
+            line = match.group()
+            if line.startswith('>') or line.startswith('+'):
+                # Colorize the prompt as such,
+                # then put rest of line into current_code_block
+                insertions.append((len(current_code_block),
+                                   [(0, Generic.Prompt, line[:2])]))
+                current_code_block += line[2:]
+            else:
+                # We have reached a non-prompt line!
+                # If we have stored prompt lines, need to process them first.
+                if current_code_block:
+                    # Weave together the prompts and highlight code.
+                    yield from do_insertions(
+                        insertions, slexer.get_tokens_unprocessed(current_code_block))
+                    # Reset vars for next code block.
+                    current_code_block = ''
+                    insertions = []
+                # Now process the actual line itself, this is output from R.
+                yield match.start(), Generic.Output, line
+
+        # If we happen to end on a code block with nothing after it, need to
+        # process the last code block. This is neither elegant nor DRY so
+        # should be changed.
+        if current_code_block:
+            yield from do_insertions(
+                insertions, slexer.get_tokens_unprocessed(current_code_block))
+
+
+class SLexer(RegexLexer):
+    """
+    For S, S-plus, and R source code.
+    """
+
+    name = 'S'
+    aliases = ['splus', 's', 'r']
+    filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
+    mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
+                 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
+    url = 'https://www.r-project.org'
+    version_added = '0.10'
+
+    valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
+    tokens = {
+        'comments': [
+            (r'#.*$', Comment.Single),
+        ],
+        'valid_name': [
+            (valid_name, Name),
+        ],
+        'function_name': [
+            (rf'({valid_name})\s*(?=\()', Name.Function),
+        ],
+        'punctuation': [
+            (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
+        ],
+        'keywords': [
+            (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
+             r'(?![\w.])',
+             Keyword.Reserved),
+        ],
+        'operators': [
+            (r'<>?|-|==|<=|>=|\|>|<|>|&&?|!=|\|\|?|\?', Operator),
+            (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator),
+        ],
+        'builtin_symbols': [
+            (r'(NULL|NA(_(integer|real|complex|character)_)?|'
+             r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
+             r'(?![\w.])',
+             Keyword.Constant),
+            (r'(T|F)\b', Name.Builtin.Pseudo),
+        ],
+        'numbers': [
+            # hex number
+            (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
+            # decimal number
+            (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
+             Number),
+        ],
+        'statements': [
+            include('comments'),
+            # whitespaces
+            (r'\s+', Whitespace),
+            (r'\'', String, 'string_squote'),
+            (r'\"', String, 'string_dquote'),
+            include('builtin_symbols'),
+            include('keywords'),
+            include('function_name'),
+            include('valid_name'),
+            include('numbers'),
+            include('punctuation'),
+            include('operators'),
+        ],
+        'root': [
+            # calls:
+            include('statements'),
+            # blocks:
+            (r'\{|\}', Punctuation),
+            # (r'\{', Punctuation, 'block'),
+            (r'.', Text),
+        ],
+        # 'block': [
+        #    include('statements'),
+        #    ('\{', Punctuation, '#push'),
+        #    ('\}', Punctuation, '#pop')
+        # ],
+        'string_squote': [
+            (r'([^\'\\]|\\.)*\'', String, '#pop'),
+        ],
+        'string_dquote': [
+            (r'([^"\\]|\\.)*"', String, '#pop'),
+        ],
+    }
+
+    def analyse_text(text):
+        if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
+            return 0.11
+
+
+class RdLexer(RegexLexer):
+    """
+    Pygments Lexer for R documentation (Rd) files
+
+    This is a very minimal implementation, highlighting little more
+    than the macros. A description of Rd syntax is found in `Writing R
+    Extensions `_
+    and `Parsing Rd files `_.
+    """
+    name = 'Rd'
+    aliases = ['rd']
+    filenames = ['*.Rd']
+    mimetypes = ['text/x-r-doc']
+    url = 'http://cran.r-project.org/doc/manuals/R-exts.html'
+    version_added = '1.6'
+
+    # To account for verbatim / LaTeX-like / and R-like areas
+    # would require parsing.
+    tokens = {
+        'root': [
+            # catch escaped brackets and percent sign
+            (r'\\[\\{}%]', String.Escape),
+            # comments
+            (r'%.*$', Comment),
+            # special macros with no arguments
+            (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
+            # macros
+            (r'\\[a-zA-Z]+\b', Keyword),
+            # special preprocessor macros
+            (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
+            # non-escaped brackets
+            (r'[{}]', Name.Builtin),
+            # everything else
+            (r'[^\\%\n{}]+', Text),
+            (r'.', Text),
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rdf.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rdf.py
new file mode 100644
index 00000000..4930c1b3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rdf.py
@@ -0,0 +1,468 @@
+"""
+    pygments.lexers.rdf
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for semantic web and RDF query languages and markup.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default
+from pygments.token import Keyword, Punctuation, String, Number, Operator, \
+    Generic, Whitespace, Name, Literal, Comment, Text
+
+__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
+
+
+class SparqlLexer(RegexLexer):
+    """
+    Lexer for SPARQL query language.
+    """
+    name = 'SPARQL'
+    aliases = ['sparql']
+    filenames = ['*.rq', '*.sparql']
+    mimetypes = ['application/sparql-query']
+    url = 'https://www.w3.org/TR/sparql11-query'
+    version_added = '2.0'
+
+    # character group definitions ::
+
+    PN_CHARS_BASE_GRP = ('a-zA-Z'
+                         '\u00c0-\u00d6'
+                         '\u00d8-\u00f6'
+                         '\u00f8-\u02ff'
+                         '\u0370-\u037d'
+                         '\u037f-\u1fff'
+                         '\u200c-\u200d'
+                         '\u2070-\u218f'
+                         '\u2c00-\u2fef'
+                         '\u3001-\ud7ff'
+                         '\uf900-\ufdcf'
+                         '\ufdf0-\ufffd')
+
+    PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+    PN_CHARS_GRP = (PN_CHARS_U_GRP +
+                    r'\-' +
+                    r'0-9' +
+                    '\u00b7' +
+                    '\u0300-\u036f' +
+                    '\u203f-\u2040')
+
+    HEX_GRP = '0-9A-Fa-f'
+
+    PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
+
+    # terminal productions ::
+
+    PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+    PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
+
+    PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+    HEX = '[' + HEX_GRP + ']'
+
+    PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+    IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
+
+    BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
+                       '.]*' + PN_CHARS + ')?'
+
+    PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+    VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
+              '0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
+
+    PERCENT = '%' + HEX + HEX
+
+    PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+    PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+    PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+                '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+                PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+    EXPONENT = r'[eE][+-]?\d+'
+
+    # Lexer token definitions ::
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            # keywords ::
+            (r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
+             r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
+             r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
+             r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
+             r'using\s+named|using|graph|default|named|all|optional|service|'
+             r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
+            (r'(a)\b', Keyword),
+            # IRIs ::
+            ('(' + IRIREF + ')', Name.Label),
+            # blank nodes ::
+            ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+            #  # variables ::
+            ('[?$]' + VARNAME, Name.Variable),
+            # prefixed names ::
+            (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
+             bygroups(Name.Namespace, Punctuation, Name.Tag)),
+            # function names ::
+            (r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
+             r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
+             r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
+             r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
+             r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
+             r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
+             r'count|sum|min|max|avg|sample|group_concat|separator)\b',
+             Name.Function),
+            # boolean literals ::
+            (r'(true|false)', Keyword.Constant),
+            # double literals ::
+            (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
+            # decimal literals ::
+            (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+            # integer literals ::
+            (r'[+\-]?\d+', Number.Integer),
+            # operators ::
+            (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
+            # punctuation characters ::
+            (r'[(){}.;,:^\[\]]', Punctuation),
+            # line comments ::
+            (r'#[^\n]*', Comment),
+            # strings ::
+            (r'"""', String, 'triple-double-quoted-string'),
+            (r'"', String, 'single-double-quoted-string'),
+            (r"'''", String, 'triple-single-quoted-string'),
+            (r"'", String, 'single-single-quoted-string'),
+        ],
+        'triple-double-quoted-string': [
+            (r'"""', String, 'end-of-string'),
+            (r'[^\\]+', String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'single-double-quoted-string': [
+            (r'"', String, 'end-of-string'),
+            (r'[^"\\\n]+', String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'triple-single-quoted-string': [
+            (r"'''", String, 'end-of-string'),
+            (r'[^\\]+', String),
+            (r'\\', String.Escape, 'string-escape'),
+        ],
+        'single-single-quoted-string': [
+            (r"'", String, 'end-of-string'),
+            (r"[^'\\\n]+", String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'string-escape': [
+            (r'u' + HEX + '{4}', String.Escape, '#pop'),
+            (r'U' + HEX + '{8}', String.Escape, '#pop'),
+            (r'.', String.Escape, '#pop'),
+        ],
+        'end-of-string': [
+            (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+             bygroups(Operator, Name.Function), '#pop:2'),
+            (r'\^\^', Operator, '#pop:2'),
+            default('#pop:2'),
+        ],
+    }
+
+
+class TurtleLexer(RegexLexer):
+    """
+    Lexer for Turtle data language.
+    """
+    name = 'Turtle'
+    aliases = ['turtle']
+    filenames = ['*.ttl']
+    mimetypes = ['text/turtle', 'application/x-turtle']
+    url = 'https://www.w3.org/TR/turtle'
+    version_added = '2.1'
+
+    # character group definitions ::
+    PN_CHARS_BASE_GRP = ('a-zA-Z'
+                         '\u00c0-\u00d6'
+                         '\u00d8-\u00f6'
+                         '\u00f8-\u02ff'
+                         '\u0370-\u037d'
+                         '\u037f-\u1fff'
+                         '\u200c-\u200d'
+                         '\u2070-\u218f'
+                         '\u2c00-\u2fef'
+                         '\u3001-\ud7ff'
+                         '\uf900-\ufdcf'
+                         '\ufdf0-\ufffd')
+
+    PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+    PN_CHARS_GRP = (PN_CHARS_U_GRP +
+                    r'\-' +
+                    r'0-9' +
+                    '\u00b7' +
+                    '\u0300-\u036f' +
+                    '\u203f-\u2040')
+
+    PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+    PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+    PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+    HEX_GRP = '0-9A-Fa-f'
+
+    HEX = '[' + HEX_GRP + ']'
+
+    PERCENT = '%' + HEX + HEX
+
+    PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
+
+    PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+    PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+    PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+    PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+                '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+                PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+    patterns = {
+        'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)',  # Simplified character range
+        'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
+    }
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+
+            # Base / prefix
+            (r'(@base|BASE)(\s+){IRIREF}(\s*)(\.?)'.format(**patterns),
+             bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
+                      Punctuation)),
+            (r'(@prefix|PREFIX)(\s+){PNAME_NS}(\s+){IRIREF}(\s*)(\.?)'.format(**patterns),
+             bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
+                      Name.Variable, Whitespace, Punctuation)),
+
+            # The shorthand predicate 'a'
+            (r'(?<=\s)a(?=\s)', Keyword.Type),
+
+            # IRIREF
+            (r'{IRIREF}'.format(**patterns), Name.Variable),
+
+            # PrefixedName
+            (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
+             bygroups(Name.Namespace, Punctuation, Name.Tag)),
+
+            # BlankNodeLabel
+            (r'(_)(:)([' + PN_CHARS_U_GRP + r'0-9]([' + PN_CHARS_GRP + r'.]*' + PN_CHARS + ')?)',
+             bygroups(Name.Namespace, Punctuation, Name.Tag)),
+
+            # Comment
+            (r'#[^\n]+', Comment),
+
+            (r'\b(true|false)\b', Literal),
+            (r'[+\-]?\d*\.\d+', Number.Float),
+            (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
+            (r'[+\-]?\d+', Number.Integer),
+            (r'[\[\](){}.;,:^]', Punctuation),
+
+            (r'"""', String, 'triple-double-quoted-string'),
+            (r'"', String, 'single-double-quoted-string'),
+            (r"'''", String, 'triple-single-quoted-string'),
+            (r"'", String, 'single-single-quoted-string'),
+        ],
+        'triple-double-quoted-string': [
+            (r'"""', String, 'end-of-string'),
+            (r'[^\\]+(?=""")', String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'single-double-quoted-string': [
+            (r'"', String, 'end-of-string'),
+            (r'[^"\\\n]+', String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'triple-single-quoted-string': [
+            (r"'''", String, 'end-of-string'),
+            (r"[^\\]+(?=''')", String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'single-single-quoted-string': [
+            (r"'", String, 'end-of-string'),
+            (r"[^'\\\n]+", String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'string-escape': [
+            (r'.', String, '#pop'),
+        ],
+        'end-of-string': [
+            (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+             bygroups(Operator, Generic.Emph), '#pop:2'),
+
+            (r'(\^\^){IRIREF}'.format(**patterns), bygroups(Operator, Generic.Emph), '#pop:2'),
+
+            default('#pop:2'),
+
+        ],
+    }
+
+    # Turtle and Tera Term macro files share the same file extension
+    # but each has a recognizable and distinct syntax.
+    def analyse_text(text):
+        for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
+            if re.search(rf'^\s*{t}', text):
+                return 0.80
+
+
+class ShExCLexer(RegexLexer):
+    """
+    Lexer for ShExC shape expressions language syntax.
+    """
+    name = 'ShExC'
+    aliases = ['shexc', 'shex']
+    filenames = ['*.shex']
+    mimetypes = ['text/shex']
+    url = 'https://shex.io/shex-semantics/#shexc'
+    version_added = ''
+
+    # character group definitions ::
+
+    PN_CHARS_BASE_GRP = ('a-zA-Z'
+                         '\u00c0-\u00d6'
+                         '\u00d8-\u00f6'
+                         '\u00f8-\u02ff'
+                         '\u0370-\u037d'
+                         '\u037f-\u1fff'
+                         '\u200c-\u200d'
+                         '\u2070-\u218f'
+                         '\u2c00-\u2fef'
+                         '\u3001-\ud7ff'
+                         '\uf900-\ufdcf'
+                         '\ufdf0-\ufffd')
+
+    PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+    PN_CHARS_GRP = (PN_CHARS_U_GRP +
+                    r'\-' +
+                    r'0-9' +
+                    '\u00b7' +
+                    '\u0300-\u036f' +
+                    '\u203f-\u2040')
+
+    HEX_GRP = '0-9A-Fa-f'
+
+    PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
+
+    # terminal productions ::
+
+    PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+    PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
+
+    PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+    HEX = '[' + HEX_GRP + ']'
+
+    PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+    UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
+
+    UCHAR = r'\\' + UCHAR_NO_BACKSLASH
+
+    IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
+
+    BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
+                       '.]*' + PN_CHARS + ')?'
+
+    PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+    PERCENT = '%' + HEX + HEX
+
+    PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+    PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+    PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+                '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+                PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+    EXPONENT = r'[eE][+-]?\d+'
+
+    # Lexer token definitions ::
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            # keywords ::
+            (r'(?i)(base|prefix|start|external|'
+             r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
+             r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
+             r'totaldigits|fractiondigits|'
+             r'closed|extra)\b', Keyword),
+            (r'(a)\b', Keyword),
+            # IRIs ::
+            ('(' + IRIREF + ')', Name.Label),
+            # blank nodes ::
+            ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+            # prefixed names ::
+            (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
+             bygroups(Name.Namespace, Punctuation, Name.Tag)),
+            # boolean literals ::
+            (r'(true|false)', Keyword.Constant),
+            # double literals ::
+            (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
+            # decimal literals ::
+            (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+            # integer literals ::
+            (r'[+\-]?\d+', Number.Integer),
+            # operators ::
+            (r'[@|$&=*+?^\-~]', Operator),
+            # operator keywords ::
+            (r'(?i)(and|or|not)\b', Operator.Word),
+            # punctuation characters ::
+            (r'[(){}.;,:^\[\]]', Punctuation),
+            # line comments ::
+            (r'#[^\n]*', Comment),
+            # strings ::
+            (r'"""', String, 'triple-double-quoted-string'),
+            (r'"', String, 'single-double-quoted-string'),
+            (r"'''", String, 'triple-single-quoted-string'),
+            (r"'", String, 'single-single-quoted-string'),
+        ],
+        'triple-double-quoted-string': [
+            (r'"""', String, 'end-of-string'),
+            (r'[^\\]+', String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'single-double-quoted-string': [
+            (r'"', String, 'end-of-string'),
+            (r'[^"\\\n]+', String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'triple-single-quoted-string': [
+            (r"'''", String, 'end-of-string'),
+            (r'[^\\]+', String),
+            (r'\\', String.Escape, 'string-escape'),
+        ],
+        'single-single-quoted-string': [
+            (r"'", String, 'end-of-string'),
+            (r"[^'\\\n]+", String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'string-escape': [
+            (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
+            (r'.', String.Escape, '#pop'),
+        ],
+        'end-of-string': [
+            (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+             bygroups(Operator, Name.Function), '#pop:2'),
+            (r'\^\^', Operator, '#pop:2'),
+            default('#pop:2'),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rebol.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rebol.py
new file mode 100644
index 00000000..4b37a749
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rebol.py
@@ -0,0 +1,419 @@
+"""
+    pygments.lexers.rebol
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the REBOL and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Generic, Whitespace
+
+__all__ = ['RebolLexer', 'RedLexer']
+
+
+class RebolLexer(RegexLexer):
+    """
+    A REBOL lexer.
+    """
+    name = 'REBOL'
+    aliases = ['rebol']
+    filenames = ['*.r', '*.r3', '*.reb']
+    mimetypes = ['text/x-rebol']
+    url = 'http://www.rebol.com'
+    version_added = '1.1'
+
+    flags = re.IGNORECASE | re.MULTILINE
+
+    escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+    def word_callback(lexer, match):
+        word = match.group()
+
+        if re.match(".*:$", word):
+            yield match.start(), Generic.Subheading, word
+        elif re.match(
+            r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
+            r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
+            r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
+            r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
+            r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
+            r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
+            r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
+            r'while|compress|decompress|secure|open|close|read|read-io|'
+            r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
+            r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
+            r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
+            r'browse|launch|stats|get-modes|set-modes|to-local-file|'
+            r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
+            r'hide|draw|show|size-text|textinfo|offset-to-caret|'
+            r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
+            r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
+            r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
+            r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
+            r'rsa-encrypt)$', word):
+            yield match.start(), Name.Builtin, word
+        elif re.match(
+            r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
+            r'minimum|maximum|negate|complement|absolute|random|head|tail|'
+            r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
+            r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
+            r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
+            r'copy)$', word):
+            yield match.start(), Name.Function, word
+        elif re.match(
+            r'(error|source|input|license|help|install|echo|Usage|with|func|'
+            r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
+            r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
+            r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
+            r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
+            r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
+            r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
+            r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
+            r'write-user|save-user|set-user-name|protect-system|parse-xml|'
+            r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
+            r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
+            r'request-dir|center-face|do-events|net-error|decode-url|'
+            r'parse-header|parse-header-date|parse-email-addrs|import-email|'
+            r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
+            r'find-key-face|do-face|viewtop|confine|find-window|'
+            r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
+            r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
+            r'read-thru|load-thru|do-thru|launch-thru|load-image|'
+            r'request-download|do-face-alt|set-font|set-para|get-style|'
+            r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
+            r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
+            r'resize-face|load-stock|load-stock-block|notify|request|flash|'
+            r'request-color|request-pass|request-text|request-list|'
+            r'request-date|request-file|dbug|editor|link-relative-path|'
+            r'emailer|parse-error)$', word):
+            yield match.start(), Keyword.Namespace, word
+        elif re.match(
+            r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
+            r'return|exit|break)$', word):
+            yield match.start(), Name.Exception, word
+        elif re.match('REBOL$', word):
+            yield match.start(), Generic.Heading, word
+        elif re.match("to-.*", word):
+            yield match.start(), Keyword, word
+        elif re.match(r'(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
+                      word):
+            yield match.start(), Operator, word
+        elif re.match(r".*\?$", word):
+            yield match.start(), Keyword, word
+        elif re.match(r".*\!$", word):
+            yield match.start(), Keyword.Type, word
+        elif re.match("'.*", word):
+            yield match.start(), Name.Variable.Instance, word  # lit-word
+        elif re.match("#.*", word):
+            yield match.start(), Name.Label, word  # issue
+        elif re.match("%.*", word):
+            yield match.start(), Name.Decorator, word  # file
+        else:
+            yield match.start(), Name.Variable, word
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'#"', String.Char, 'char'),
+            (r'#\{[0-9a-f]*\}', Number.Hex),
+            (r'2#\{', Number.Hex, 'bin2'),
+            (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+            (r'"', String, 'string'),
+            (r'\{', String, 'string2'),
+            (r';#+.*\n', Comment.Special),
+            (r';\*+.*\n', Comment.Preproc),
+            (r';.*\n', Comment),
+            (r'%"', Name.Decorator, 'stringFile'),
+            (r'%[^(^{")\s\[\]]+', Name.Decorator),
+            (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float),  # money
+            (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other),    # time
+            (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
+             r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other),   # date
+            (r'\d+(\.\d+)+\.\d+', Keyword.Constant),             # tuple
+            (r'\d+X\d+', Keyword.Constant),                   # pair
+            (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+            (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+            (r'[+-]?\d+(\'\d+)?', Number),
+            (r'[\[\]()]', Generic.Strong),
+            (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator),  # url
+            (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator),  # url
+            (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator),         # email
+            (r'comment\s"', Comment, 'commentString1'),
+            (r'comment\s\{', Comment, 'commentString2'),
+            (r'comment\s\[', Comment, 'commentBlock'),
+            (r'comment\s[^(\s{"\[]+', Comment),
+            (r'/[^(^{")\s/[\]]*', Name.Attribute),
+            (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+            (r'<[\w:.-]*>', Name.Tag),
+            (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+            (r'([^(^{")\s]+)', Text),
+        ],
+        'string': [
+            (r'[^(^")]+', String),
+            (escape_re, String.Escape),
+            (r'[(|)]+', String),
+            (r'\^.', String.Escape),
+            (r'"', String, '#pop'),
+        ],
+        'string2': [
+            (r'[^(^{})]+', String),
+            (escape_re, String.Escape),
+            (r'[(|)]+', String),
+            (r'\^.', String.Escape),
+            (r'\{', String, '#push'),
+            (r'\}', String, '#pop'),
+        ],
+        'stringFile': [
+            (r'[^(^")]+', Name.Decorator),
+            (escape_re, Name.Decorator),
+            (r'\^.', Name.Decorator),
+            (r'"', Name.Decorator, '#pop'),
+        ],
+        'char': [
+            (escape_re + '"', String.Char, '#pop'),
+            (r'\^."', String.Char, '#pop'),
+            (r'."', String.Char, '#pop'),
+        ],
+        'tag': [
+            (escape_re, Name.Tag),
+            (r'"', Name.Tag, 'tagString'),
+            (r'[^(<>\r\n")]+', Name.Tag),
+            (r'>', Name.Tag, '#pop'),
+        ],
+        'tagString': [
+            (r'[^(^")]+', Name.Tag),
+            (escape_re, Name.Tag),
+            (r'[(|)]+', Name.Tag),
+            (r'\^.', Name.Tag),
+            (r'"', Name.Tag, '#pop'),
+        ],
+        'tuple': [
+            (r'(\d+\.)+', Keyword.Constant),
+            (r'\d+', Keyword.Constant, '#pop'),
+        ],
+        'bin2': [
+            (r'\s+', Number.Hex),
+            (r'([01]\s*){8}', Number.Hex),
+            (r'\}', Number.Hex, '#pop'),
+        ],
+        'commentString1': [
+            (r'[^(^")]+', Comment),
+            (escape_re, Comment),
+            (r'[(|)]+', Comment),
+            (r'\^.', Comment),
+            (r'"', Comment, '#pop'),
+        ],
+        'commentString2': [
+            (r'[^(^{})]+', Comment),
+            (escape_re, Comment),
+            (r'[(|)]+', Comment),
+            (r'\^.', Comment),
+            (r'\{', Comment, '#push'),
+            (r'\}', Comment, '#pop'),
+        ],
+        'commentBlock': [
+            (r'\[', Comment, '#push'),
+            (r'\]', Comment, '#pop'),
+            (r'"', Comment, "commentString1"),
+            (r'\{', Comment, "commentString2"),
+            (r'[^(\[\]"{)]+', Comment),
+        ],
+    }
+
+    def analyse_text(text):
+        """
+        Check if code contains REBOL header and so it probably not R code
+        """
+        if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
+            # The code starts with REBOL header
+            return 1.0
+        elif re.search(r'\s*REBOL\s*\[', text, re.IGNORECASE):
+            # The code contains REBOL header but also some text before it
+            return 0.5
+
+
+class RedLexer(RegexLexer):
+    """
+    A Red-language lexer.
+    """
+    name = 'Red'
+    aliases = ['red', 'red/system']
+    filenames = ['*.red', '*.reds']
+    mimetypes = ['text/x-red', 'text/x-red-system']
+    url = 'https://www.red-lang.org'
+    version_added = '2.0'
+
+    flags = re.IGNORECASE | re.MULTILINE
+
+    escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+    def word_callback(lexer, match):
+        word = match.group()
+
+        if re.match(".*:$", word):
+            yield match.start(), Generic.Subheading, word
+        elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
+                      r'foreach|forall|func|function|does|has|switch|'
+                      r'case|reduce|compose|get|set|print|prin|equal\?|'
+                      r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
+                      r'greater-or-equal\?|same\?|not|type\?|stats|'
+                      r'bind|union|replace|charset|routine)$', word):
+            yield match.start(), Name.Builtin, word
+        elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
+                      r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
+                      r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
+                      r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
+                      r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
+                      r'update|write)$', word):
+            yield match.start(), Name.Function, word
+        elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
+                      r'none|crlf|dot|null-byte)$', word):
+            yield match.start(), Name.Builtin.Pseudo, word
+        elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
+                      r'#switch|#default|#get-definition)$', word):
+            yield match.start(), Keyword.Namespace, word
+        elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
+                      r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
+                      r'quote|forever)$', word):
+            yield match.start(), Name.Exception, word
+        elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
+                      r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
+                      r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
+                      r'any-struct\?|none\?|word\?|any-series\?)$', word):
+            yield match.start(), Keyword, word
+        elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
+            yield match.start(), Keyword.Namespace, word
+        elif re.match("to-.*", word):
+            yield match.start(), Keyword, word
+        elif re.match(r'(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
+                      r'<<<|>>>|<<|>>|<|>%)$', word):
+            yield match.start(), Operator, word
+        elif re.match(r".*\!$", word):
+            yield match.start(), Keyword.Type, word
+        elif re.match("'.*", word):
+            yield match.start(), Name.Variable.Instance, word  # lit-word
+        elif re.match("#.*", word):
+            yield match.start(), Name.Label, word  # issue
+        elif re.match("%.*", word):
+            yield match.start(), Name.Decorator, word  # file
+        elif re.match(":.*", word):
+            yield match.start(), Generic.Subheading, word  # get-word
+        else:
+            yield match.start(), Name.Variable, word
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'#"', String.Char, 'char'),
+            (r'#\{[0-9a-f\s]*\}', Number.Hex),
+            (r'2#\{', Number.Hex, 'bin2'),
+            (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+            (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
+             bygroups(Number.Hex, Name.Variable, Whitespace)),
+            (r'"', String, 'string'),
+            (r'\{', String, 'string2'),
+            (r';#+.*\n', Comment.Special),
+            (r';\*+.*\n', Comment.Preproc),
+            (r';.*\n', Comment),
+            (r'%"', Name.Decorator, 'stringFile'),
+            (r'%[^(^{")\s\[\]]+', Name.Decorator),
+            (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float),  # money
+            (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other),    # time
+            (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
+             r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other),   # date
+            (r'\d+(\.\d+)+\.\d+', Keyword.Constant),             # tuple
+            (r'\d+X\d+', Keyword.Constant),                   # pair
+            (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+            (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+            (r'[+-]?\d+(\'\d+)?', Number),
+            (r'[\[\]()]', Generic.Strong),
+            (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator),  # url
+            (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator),  # url
+            (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator),         # email
+            (r'comment\s"', Comment, 'commentString1'),
+            (r'comment\s\{', Comment, 'commentString2'),
+            (r'comment\s\[', Comment, 'commentBlock'),
+            (r'comment\s[^(\s{"\[]+', Comment),
+            (r'/[^(^{^")\s/[\]]*', Name.Attribute),
+            (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+            (r'<[\w:.-]*>', Name.Tag),
+            (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+            (r'([^(^{")\s]+)', Text),
+        ],
+        'string': [
+            (r'[^(^")]+', String),
+            (escape_re, String.Escape),
+            (r'[(|)]+', String),
+            (r'\^.', String.Escape),
+            (r'"', String, '#pop'),
+        ],
+        'string2': [
+            (r'[^(^{})]+', String),
+            (escape_re, String.Escape),
+            (r'[(|)]+', String),
+            (r'\^.', String.Escape),
+            (r'\{', String, '#push'),
+            (r'\}', String, '#pop'),
+        ],
+        'stringFile': [
+            (r'[^(^")]+', Name.Decorator),
+            (escape_re, Name.Decorator),
+            (r'\^.', Name.Decorator),
+            (r'"', Name.Decorator, '#pop'),
+        ],
+        'char': [
+            (escape_re + '"', String.Char, '#pop'),
+            (r'\^."', String.Char, '#pop'),
+            (r'."', String.Char, '#pop'),
+        ],
+        'tag': [
+            (escape_re, Name.Tag),
+            (r'"', Name.Tag, 'tagString'),
+            (r'[^(<>\r\n")]+', Name.Tag),
+            (r'>', Name.Tag, '#pop'),
+        ],
+        'tagString': [
+            (r'[^(^")]+', Name.Tag),
+            (escape_re, Name.Tag),
+            (r'[(|)]+', Name.Tag),
+            (r'\^.', Name.Tag),
+            (r'"', Name.Tag, '#pop'),
+        ],
+        'tuple': [
+            (r'(\d+\.)+', Keyword.Constant),
+            (r'\d+', Keyword.Constant, '#pop'),
+        ],
+        'bin2': [
+            (r'\s+', Number.Hex),
+            (r'([01]\s*){8}', Number.Hex),
+            (r'\}', Number.Hex, '#pop'),
+        ],
+        'commentString1': [
+            (r'[^(^")]+', Comment),
+            (escape_re, Comment),
+            (r'[(|)]+', Comment),
+            (r'\^.', Comment),
+            (r'"', Comment, '#pop'),
+        ],
+        'commentString2': [
+            (r'[^(^{})]+', Comment),
+            (escape_re, Comment),
+            (r'[(|)]+', Comment),
+            (r'\^.', Comment),
+            (r'\{', Comment, '#push'),
+            (r'\}', Comment, '#pop'),
+        ],
+        'commentBlock': [
+            (r'\[', Comment, '#push'),
+            (r'\]', Comment, '#pop'),
+            (r'"', Comment, "commentString1"),
+            (r'\{', Comment, "commentString2"),
+            (r'[^(\[\]"{)]+', Comment),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rego.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rego.py
new file mode 100644
index 00000000..6f2e3e9e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rego.py
@@ -0,0 +1,57 @@
+"""
+    pygments.lexers.rego
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the Rego policy languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Operator, Keyword, Name, String, Number, Punctuation, Whitespace
+
+class RegoLexer(RegexLexer):
+    """
+    For Rego source.
+    """
+    name = 'Rego'
+    url = 'https://www.openpolicyagent.org/docs/latest/policy-language/'
+    filenames = ['*.rego']
+    aliases = ['rego']
+    mimetypes = ['text/x-rego']
+    version_added = '2.19'
+
+    reserved_words = (
+        'as', 'contains', 'data', 'default', 'else', 'every', 'false',
+        'if', 'in', 'import', 'package', 'not', 'null',
+        'some', 'true', 'with'
+    )
+
+    builtins = (
+        # https://www.openpolicyagent.org/docs/latest/philosophy/#the-opa-document-model
+        'data',  # Global variable for accessing base and virtual documents
+        'input', # Represents synchronously pushed base documents
+    )
+
+    tokens = {
+        'root': [
+            (r'\n', Whitespace),
+            (r'\s+', Whitespace),
+            (r'#.*?$', Comment.Single),
+            (words(reserved_words, suffix=r'\b'), Keyword),
+            (words(builtins, suffix=r'\b'), Name.Builtin),
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+            (r'"(\\\\|\\"|[^"])*"', String.Double),
+            (r'`[^`]*`', String.Backtick),
+            (r'-?\d+(\.\d+)?', Number),
+            (r'(==|!=|<=|>=|:=)', Operator),  # Compound operators
+            (r'[=<>+\-*/%&|]', Operator),     # Single-character operators
+            (r'[\[\]{}(),.:;]', Punctuation),
+        ]
+    }
+
+__all__ = ['RegoLexer']
+
+
+
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/resource.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/resource.py
new file mode 100644
index 00000000..9593c212
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/resource.py
@@ -0,0 +1,83 @@
+"""
+    pygments.lexers.resource
+    ~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for resource definition files.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Comment, String, Number, Operator, Text, \
+    Keyword, Name
+
+__all__ = ['ResourceLexer']
+
+
+class ResourceLexer(RegexLexer):
+    """Lexer for ICU Resource bundles.
+    """
+    name = 'ResourceBundle'
+    aliases = ['resourcebundle', 'resource']
+    filenames = []
+    url = 'https://unicode-org.github.io/icu/userguide/locale/resources.html'
+    version_added = '2.0'
+
+    _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
+              ':int', ':alias')
+
+    flags = re.MULTILINE | re.IGNORECASE
+    tokens = {
+        'root': [
+            (r'//.*?$', Comment),
+            (r'"', String, 'string'),
+            (r'-?\d+', Number.Integer),
+            (r'[,{}]', Operator),
+            (r'([^\s{{:]+)(\s*)({}?)'.format('|'.join(_types)),
+             bygroups(Name, Text, Keyword)),
+            (r'\s+', Text),
+            (words(_types), Keyword),
+        ],
+        'string': [
+            (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
+             r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
+            (r'\{', String.Escape, 'msgname'),
+            (r'"', String, '#pop')
+        ],
+        'msgname': [
+            (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
+        ],
+        'message': [
+            (r'\{', String.Escape, 'msgname'),
+            (r'\}', String.Escape, '#pop'),
+            (r'(,)(\s*)([a-z]+)(\s*\})',
+             bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
+            (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
+             bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+                      String.Escape, Operator.Word, String.Escape, Operator,
+                      String.Escape, Number.Integer, String.Escape), 'choice'),
+            (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
+             bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+                      String.Escape), 'choice'),
+            (r'\s+', String.Escape)
+        ],
+        'choice': [
+            (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
+             bygroups(Operator, Number.Integer, String.Escape), 'message'),
+            (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
+            (r'\}', String.Escape, ('#pop', '#pop')),
+            (r'\s+', String.Escape)
+        ],
+        'str': [
+            (r'\}', String.Escape, '#pop'),
+            (r'\{', String.Escape, 'msgname'),
+            (r'[^{}]+', String)
+        ]
+    }
+
+    def analyse_text(text):
+        if text.startswith('root:table'):
+            return 1.0
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ride.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ride.py
new file mode 100644
index 00000000..4d60c29c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ride.py
@@ -0,0 +1,138 @@
+"""
+    pygments.lexers.ride
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for the Ride programming language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include
+from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
+    String, Text
+
+__all__ = ['RideLexer']
+
+
+class RideLexer(RegexLexer):
+    """
+    For Ride source code.
+    """
+
+    name = 'Ride'
+    aliases = ['ride']
+    filenames = ['*.ride']
+    mimetypes = ['text/x-ride']
+    url = 'https://docs.waves.tech/en/ride'
+    version_added = '2.6'
+
+    validName = r'[a-zA-Z_][a-zA-Z0-9_\']*'
+
+    builtinOps = (
+        '||', '|', '>=', '>', '==', '!',
+        '=', '<=', '<', '::', ':+', ':', '!=', '/',
+        '.', '=>', '-', '+', '*', '&&', '%', '++',
+    )
+
+    globalVariablesName = (
+        'NOALG', 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512',
+        'SHA3224', 'SHA3256', 'SHA3384', 'SHA3512', 'nil', 'this', 'unit',
+        'height', 'lastBlock', 'Buy', 'Sell', 'CEILING', 'FLOOR', 'DOWN',
+        'HALFDOWN', 'HALFEVEN', 'HALFUP', 'UP',
+    )
+
+    typesName = (
+        'Unit', 'Int', 'Boolean', 'ByteVector', 'String', 'Address', 'Alias',
+        'Transfer', 'AssetPair', 'DataEntry', 'Order', 'Transaction',
+        'GenesisTransaction', 'PaymentTransaction', 'ReissueTransaction',
+        'BurnTransaction', 'MassTransferTransaction', 'ExchangeTransaction',
+        'TransferTransaction', 'SetAssetScriptTransaction',
+        'InvokeScriptTransaction', 'IssueTransaction', 'LeaseTransaction',
+        'LeaseCancelTransaction', 'CreateAliasTransaction',
+        'SetScriptTransaction', 'SponsorFeeTransaction', 'DataTransaction',
+        'WriteSet', 'AttachedPayment', 'ScriptTransfer', 'TransferSet',
+        'ScriptResult', 'Invocation', 'Asset', 'BlockInfo', 'Issue', 'Reissue',
+        'Burn', 'NoAlg', 'Md5', 'Sha1', 'Sha224', 'Sha256', 'Sha384', 'Sha512',
+        'Sha3224', 'Sha3256', 'Sha3384', 'Sha3512', 'BinaryEntry',
+        'BooleanEntry', 'IntegerEntry', 'StringEntry', 'List', 'Ceiling',
+        'Down', 'Floor', 'HalfDown', 'HalfEven', 'HalfUp', 'Up',
+    )
+
+    functionsName = (
+        'fraction', 'size', 'toBytes', 'take', 'drop', 'takeRight', 'dropRight',
+        'toString', 'isDefined', 'extract', 'throw', 'getElement', 'value',
+        'cons', 'toUtf8String', 'toInt', 'indexOf', 'lastIndexOf', 'split',
+        'parseInt', 'parseIntValue', 'keccak256', 'blake2b256', 'sha256',
+        'sigVerify', 'toBase58String', 'fromBase58String', 'toBase64String',
+        'fromBase64String', 'transactionById', 'transactionHeightById',
+        'getInteger', 'getBoolean', 'getBinary', 'getString',
+        'addressFromPublicKey', 'addressFromString', 'addressFromRecipient',
+        'assetBalance', 'wavesBalance', 'getIntegerValue', 'getBooleanValue',
+        'getBinaryValue', 'getStringValue', 'addressFromStringValue',
+        'assetInfo', 'rsaVerify', 'checkMerkleProof', 'median',
+        'valueOrElse', 'valueOrErrorMessage', 'contains', 'log', 'pow',
+        'toBase16String', 'fromBase16String', 'blockInfoByHeight',
+        'transferTransactionById',
+    )
+
+    reservedWords = words((
+        'match', 'case', 'else', 'func', 'if',
+        'let', 'then', '@Callable', '@Verifier',
+    ), suffix=r'\b')
+
+    tokens = {
+        'root': [
+            # Comments
+            (r'#.*', Comment.Single),
+            # Whitespace
+            (r'\s+', Text),
+            # Strings
+            (r'"', String, 'doublequote'),
+            (r'utf8\'', String, 'utf8quote'),
+            (r'base(58|64|16)\'', String, 'singlequote'),
+            # Keywords
+            (reservedWords, Keyword.Reserved),
+            (r'\{-#.*?#-\}', Keyword.Reserved),
+            (r'FOLD<\d+>', Keyword.Reserved),
+            # Types
+            (words(typesName), Keyword.Type),
+            # Main
+            # (specialName, Keyword.Reserved),
+            # Prefix Operators
+            (words(builtinOps, prefix=r'\(', suffix=r'\)'), Name.Function),
+            # Infix Operators
+            (words(builtinOps), Name.Function),
+            (words(globalVariablesName), Name.Function),
+            (words(functionsName), Name.Function),
+            # Numbers
+            include('numbers'),
+            # Variable Names
+            (validName, Name.Variable),
+            # Parens
+            (r'[,()\[\]{}]', Punctuation),
+        ],
+
+        'doublequote': [
+            (r'\\u[0-9a-fA-F]{4}', String.Escape),
+            (r'\\[nrfvb\\"]', String.Escape),
+            (r'[^"]', String),
+            (r'"', String, '#pop'),
+        ],
+
+        'utf8quote': [
+            (r'\\u[0-9a-fA-F]{4}', String.Escape),
+            (r'\\[nrfvb\\\']', String.Escape),
+            (r'[^\']', String),
+            (r'\'', String, '#pop'),
+        ],
+
+        'singlequote': [
+            (r'[^\']', String),
+            (r'\'', String, '#pop'),
+        ],
+
+        'numbers': [
+            (r'_?\d+', Number.Integer),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rita.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rita.py
new file mode 100644
index 00000000..536aafff
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rita.py
@@ -0,0 +1,42 @@
+"""
+    pygments.lexers.rita
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for RITA language
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Comment, Operator, Keyword, Name, Literal, \
+    Punctuation, Whitespace
+
+__all__ = ['RitaLexer']
+
+
+class RitaLexer(RegexLexer):
+    """
+    Lexer for RITA.
+    """
+    name = 'Rita'
+    url = 'https://github.com/zaibacu/rita-dsl'
+    filenames = ['*.rita']
+    aliases = ['rita']
+    mimetypes = ['text/rita']
+    version_added = '2.11'
+
+    tokens = {
+        'root': [
+            (r'\n', Whitespace),
+            (r'\s+', Whitespace),
+            (r'#(.*?)\n', Comment.Single),
+            (r'@(.*?)\n', Operator),  # Yes, whole line as an operator
+            (r'"(\w|\d|\s|(\\")|[\'_\-./,\?\!])+?"', Literal),
+            (r'\'(\w|\d|\s|(\\\')|["_\-./,\?\!])+?\'', Literal),
+            (r'([A-Z_]+)', Keyword),
+            (r'([a-z0-9_]+)', Name),
+            (r'((->)|[!?+*|=])', Operator),
+            (r'[\(\),\{\}]', Punctuation)
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rnc.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rnc.py
new file mode 100644
index 00000000..b7a06bb9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rnc.py
@@ -0,0 +1,66 @@
+"""
+    pygments.lexers.rnc
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Relax-NG Compact syntax
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Punctuation
+
+__all__ = ['RNCCompactLexer']
+
+
+class RNCCompactLexer(RegexLexer):
+    """
+    For RelaxNG-compact syntax.
+    """
+
+    name = 'Relax-NG Compact'
+    url = 'http://relaxng.org'
+    aliases = ['rng-compact', 'rnc']
+    filenames = ['*.rnc']
+    version_added = '2.2'
+
+    tokens = {
+        'root': [
+            (r'namespace\b', Keyword.Namespace),
+            (r'(?:default|datatypes)\b', Keyword.Declaration),
+            (r'##.*$', Comment.Preproc),
+            (r'#.*$', Comment.Single),
+            (r'"[^"]*"', String.Double),
+            # TODO single quoted strings and escape sequences outside of
+            # double-quoted strings
+            (r'(?:element|attribute|mixed)\b', Keyword.Declaration, 'variable'),
+            (r'(text\b|xsd:[^ ]+)', Keyword.Type, 'maybe_xsdattributes'),
+            (r'[,?&*=|~]|>>', Operator),
+            (r'[(){}]', Punctuation),
+            (r'.', Text),
+        ],
+
+        # a variable has been declared using `element` or `attribute`
+        'variable': [
+            (r'[^{]+', Name.Variable),
+            (r'\{', Punctuation, '#pop'),
+        ],
+
+        # after an xsd: declaration there may be attributes
+        'maybe_xsdattributes': [
+            (r'\{', Punctuation, 'xsdattributes'),
+            (r'\}', Punctuation, '#pop'),
+            (r'.', Text),
+        ],
+
+        # attributes take the form { key1 = value1 key2 = value2 ... }
+        'xsdattributes': [
+            (r'[^ =}]', Name.Attribute),
+            (r'=', Operator),
+            (r'"[^"]*"', String.Double),
+            (r'\}', Punctuation, '#pop'),
+            (r'.', Text),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/roboconf.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/roboconf.py
new file mode 100644
index 00000000..31adba9f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/roboconf.py
@@ -0,0 +1,81 @@
+"""
+    pygments.lexers.roboconf
+    ~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Roboconf DSL.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, re
+from pygments.token import Text, Operator, Keyword, Name, Comment
+
+__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
+
+
+class RoboconfGraphLexer(RegexLexer):
+    """
+    Lexer for Roboconf graph files.
+    """
+    name = 'Roboconf Graph'
+    aliases = ['roboconf-graph']
+    filenames = ['*.graph']
+    url = 'https://roboconf.github.io/en/user-guide/graph-definition.html'
+    version_added = '2.1'
+
+    flags = re.IGNORECASE | re.MULTILINE
+    tokens = {
+        'root': [
+            # Skip white spaces
+            (r'\s+', Text),
+
+            # There is one operator
+            (r'=', Operator),
+
+            # Keywords
+            (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
+            (words((
+                'installer', 'extends', 'exports', 'imports', 'facets',
+                'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
+
+            # Comments
+            (r'#.*\n', Comment),
+
+            # Default
+            (r'[^#]', Text),
+            (r'.*\n', Text)
+        ]
+    }
+
+
+class RoboconfInstancesLexer(RegexLexer):
+    """
+    Lexer for Roboconf instances files.
+    """
+    name = 'Roboconf Instances'
+    aliases = ['roboconf-instances']
+    filenames = ['*.instances']
+    url = 'https://roboconf.github.io'
+    version_added = '2.1'
+
+    flags = re.IGNORECASE | re.MULTILINE
+    tokens = {
+        'root': [
+
+            # Skip white spaces
+            (r'\s+', Text),
+
+            # Keywords
+            (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
+            (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
+            (r'\s*[\w.-]+\s*:', Name),
+
+            # Comments
+            (r'#.*\n', Comment),
+
+            # Default
+            (r'[^#]', Text),
+            (r'.*\n', Text)
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/robotframework.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/robotframework.py
new file mode 100644
index 00000000..f92d5675
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/robotframework.py
@@ -0,0 +1,551 @@
+"""
+    pygments.lexers.robotframework
+    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Robot Framework.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+#  Copyright 2012 Nokia Siemens Networks Oyj
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+import re
+
+from pygments.lexer import Lexer
+from pygments.token import Token
+
+__all__ = ['RobotFrameworkLexer']
+
+
+HEADING = Token.Generic.Heading
+SETTING = Token.Keyword.Namespace
+IMPORT = Token.Name.Namespace
+TC_KW_NAME = Token.Generic.Subheading
+KEYWORD = Token.Name.Function
+ARGUMENT = Token.String
+VARIABLE = Token.Name.Variable
+COMMENT = Token.Comment
+SEPARATOR = Token.Punctuation
+SYNTAX = Token.Punctuation
+GHERKIN = Token.Generic.Emph
+ERROR = Token.Error
+
+
+def normalize(string, remove=''):
+    string = string.lower()
+    for char in remove + ' ':
+        if char in string:
+            string = string.replace(char, '')
+    return string
+
+
+class RobotFrameworkLexer(Lexer):
+    """
+    For Robot Framework test data.
+
+    Supports both space and pipe separated plain text formats.
+    """
+    name = 'RobotFramework'
+    url = 'http://robotframework.org'
+    aliases = ['robotframework']
+    filenames = ['*.robot', '*.resource']
+    mimetypes = ['text/x-robotframework']
+    version_added = '1.6'
+
+    def __init__(self, **options):
+        options['tabsize'] = 2
+        options['encoding'] = 'UTF-8'
+        Lexer.__init__(self, **options)
+
+    def get_tokens_unprocessed(self, text):
+        row_tokenizer = RowTokenizer()
+        var_tokenizer = VariableTokenizer()
+        index = 0
+        for row in text.splitlines():
+            for value, token in row_tokenizer.tokenize(row):
+                for value, token in var_tokenizer.tokenize(value, token):
+                    if value:
+                        yield index, token, str(value)
+                        index += len(value)
+
+
+class VariableTokenizer:
+
+    def tokenize(self, string, token):
+        var = VariableSplitter(string, identifiers='$@%&')
+        if var.start < 0 or token in (COMMENT, ERROR):
+            yield string, token
+            return
+        for value, token in self._tokenize(var, string, token):
+            if value:
+                yield value, token
+
+    def _tokenize(self, var, string, orig_token):
+        before = string[:var.start]
+        yield before, orig_token
+        yield var.identifier + '{', SYNTAX
+        yield from self.tokenize(var.base, VARIABLE)
+        yield '}', SYNTAX
+        if var.index is not None:
+            yield '[', SYNTAX
+            yield from self.tokenize(var.index, VARIABLE)
+            yield ']', SYNTAX
+        yield from self.tokenize(string[var.end:], orig_token)
+
+
+class RowTokenizer:
+
+    def __init__(self):
+        self._table = UnknownTable()
+        self._splitter = RowSplitter()
+        testcases = TestCaseTable()
+        settings = SettingTable(testcases.set_default_template)
+        variables = VariableTable()
+        keywords = KeywordTable()
+        self._tables = {'settings': settings, 'setting': settings,
+                        'metadata': settings,
+                        'variables': variables, 'variable': variables,
+                        'testcases': testcases, 'testcase': testcases,
+                        'tasks': testcases, 'task': testcases,
+                        'keywords': keywords, 'keyword': keywords,
+                        'userkeywords': keywords, 'userkeyword': keywords}
+
+    def tokenize(self, row):
+        commented = False
+        heading = False
+        for index, value in enumerate(self._splitter.split(row)):
+            # First value, and every second after that, is a separator.
+            index, separator = divmod(index-1, 2)
+            if value.startswith('#'):
+                commented = True
+            elif index == 0 and value.startswith('*'):
+                self._table = self._start_table(value)
+                heading = True
+            yield from self._tokenize(value, index, commented,
+                                      separator, heading)
+        self._table.end_row()
+
+    def _start_table(self, header):
+        name = normalize(header, remove='*')
+        return self._tables.get(name, UnknownTable())
+
+    def _tokenize(self, value, index, commented, separator, heading):
+        if commented:
+            yield value, COMMENT
+        elif separator:
+            yield value, SEPARATOR
+        elif heading:
+            yield value, HEADING
+        else:
+            yield from self._table.tokenize(value, index)
+
+
+class RowSplitter:
+    _space_splitter = re.compile('( {2,})')
+    _pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))')
+
+    def split(self, row):
+        splitter = (row.startswith('| ') and self._split_from_pipes
+                    or self._split_from_spaces)
+        yield from splitter(row)
+        yield '\n'
+
+    def _split_from_spaces(self, row):
+        yield ''  # Start with (pseudo)separator similarly as with pipes
+        yield from self._space_splitter.split(row)
+
+    def _split_from_pipes(self, row):
+        _, separator, rest = self._pipe_splitter.split(row, 1)
+        yield separator
+        while self._pipe_splitter.search(rest):
+            cell, separator, rest = self._pipe_splitter.split(rest, 1)
+            yield cell
+            yield separator
+        yield rest
+
+
+class Tokenizer:
+    _tokens = None
+
+    def __init__(self):
+        self._index = 0
+
+    def tokenize(self, value):
+        values_and_tokens = self._tokenize(value, self._index)
+        self._index += 1
+        if isinstance(values_and_tokens, type(Token)):
+            values_and_tokens = [(value, values_and_tokens)]
+        return values_and_tokens
+
+    def _tokenize(self, value, index):
+        index = min(index, len(self._tokens) - 1)
+        return self._tokens[index]
+
+    def _is_assign(self, value):
+        if value.endswith('='):
+            value = value[:-1].strip()
+        var = VariableSplitter(value, identifiers='$@&')
+        return var.start == 0 and var.end == len(value)
+
+
+class Comment(Tokenizer):
+    _tokens = (COMMENT,)
+
+
+class Setting(Tokenizer):
+    _tokens = (SETTING, ARGUMENT)
+    _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
+                         'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition',
+                         'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate')
+    _import_settings = ('library', 'resource', 'variables')
+    _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
+                       'testtimeout','tasktimeout')
+    _custom_tokenizer = None
+
+    def __init__(self, template_setter=None):
+        Tokenizer.__init__(self)
+        self._template_setter = template_setter
+
+    def _tokenize(self, value, index):
+        if index == 1 and self._template_setter:
+            self._template_setter(value)
+        if index == 0:
+            normalized = normalize(value)
+            if normalized in self._keyword_settings:
+                self._custom_tokenizer = KeywordCall(support_assign=False)
+            elif normalized in self._import_settings:
+                self._custom_tokenizer = ImportSetting()
+            elif normalized not in self._other_settings:
+                return ERROR
+        elif self._custom_tokenizer:
+            return self._custom_tokenizer.tokenize(value)
+        return Tokenizer._tokenize(self, value, index)
+
+
+class ImportSetting(Tokenizer):
+    _tokens = (IMPORT, ARGUMENT)
+
+
+class TestCaseSetting(Setting):
+    _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
+                         'template')
+    _import_settings = ()
+    _other_settings = ('documentation', 'tags', 'timeout')
+
+    def _tokenize(self, value, index):
+        if index == 0:
+            type = Setting._tokenize(self, value[1:-1], index)
+            return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
+        return Setting._tokenize(self, value, index)
+
+
+class KeywordSetting(TestCaseSetting):
+    _keyword_settings = ('teardown',)
+    _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
+
+
+class Variable(Tokenizer):
+    _tokens = (SYNTAX, ARGUMENT)
+
+    def _tokenize(self, value, index):
+        if index == 0 and not self._is_assign(value):
+            return ERROR
+        return Tokenizer._tokenize(self, value, index)
+
+
+class KeywordCall(Tokenizer):
+    _tokens = (KEYWORD, ARGUMENT)
+
+    def __init__(self, support_assign=True):
+        Tokenizer.__init__(self)
+        self._keyword_found = not support_assign
+        self._assigns = 0
+
+    def _tokenize(self, value, index):
+        if not self._keyword_found and self._is_assign(value):
+            self._assigns += 1
+            return SYNTAX  # VariableTokenizer tokenizes this later.
+        if self._keyword_found:
+            return Tokenizer._tokenize(self, value, index - self._assigns)
+        self._keyword_found = True
+        return GherkinTokenizer().tokenize(value, KEYWORD)
+
+
+class GherkinTokenizer:
+    _gherkin_prefix = re.compile('^(Given|When|Then|And|But) ', re.IGNORECASE)
+
+    def tokenize(self, value, token):
+        match = self._gherkin_prefix.match(value)
+        if not match:
+            return [(value, token)]
+        end = match.end()
+        return [(value[:end], GHERKIN), (value[end:], token)]
+
+
+class TemplatedKeywordCall(Tokenizer):
+    _tokens = (ARGUMENT,)
+
+
+class ForLoop(Tokenizer):
+
+    def __init__(self):
+        Tokenizer.__init__(self)
+        self._in_arguments = False
+
+    def _tokenize(self, value, index):
+        token = self._in_arguments and ARGUMENT or SYNTAX
+        if value.upper() in ('IN', 'IN RANGE'):
+            self._in_arguments = True
+        return token
+
+
+class _Table:
+    _tokenizer_class = None
+
+    def __init__(self, prev_tokenizer=None):
+        self._tokenizer = self._tokenizer_class()
+        self._prev_tokenizer = prev_tokenizer
+        self._prev_values_on_row = []
+
+    def tokenize(self, value, index):
+        if self._continues(value, index):
+            self._tokenizer = self._prev_tokenizer
+            yield value, SYNTAX
+        else:
+            yield from self._tokenize(value, index)
+        self._prev_values_on_row.append(value)
+
+    def _continues(self, value, index):
+        return value == '...' and all(self._is_empty(t)
+                                      for t in self._prev_values_on_row)
+
+    def _is_empty(self, value):
+        return value in ('', '\\')
+
+    def _tokenize(self, value, index):
+        return self._tokenizer.tokenize(value)
+
+    def end_row(self):
+        self.__init__(prev_tokenizer=self._tokenizer)
+
+
+class UnknownTable(_Table):
+    _tokenizer_class = Comment
+
+    def _continues(self, value, index):
+        return False
+
+
+class VariableTable(_Table):
+    _tokenizer_class = Variable
+
+
+class SettingTable(_Table):
+    _tokenizer_class = Setting
+
+    def __init__(self, template_setter, prev_tokenizer=None):
+        _Table.__init__(self, prev_tokenizer)
+        self._template_setter = template_setter
+
+    def _tokenize(self, value, index):
+        if index == 0 and normalize(value) == 'testtemplate':
+            self._tokenizer = Setting(self._template_setter)
+        return _Table._tokenize(self, value, index)
+
+    def end_row(self):
+        self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
+
+
+class TestCaseTable(_Table):
+    _setting_class = TestCaseSetting
+    _test_template = None
+    _default_template = None
+
+    @property
+    def _tokenizer_class(self):
+        if self._test_template or (self._default_template and
+                                   self._test_template is not False):
+            return TemplatedKeywordCall
+        return KeywordCall
+
+    def _continues(self, value, index):
+        return index > 0 and _Table._continues(self, value, index)
+
+    def _tokenize(self, value, index):
+        if index == 0:
+            if value:
+                self._test_template = None
+            return GherkinTokenizer().tokenize(value, TC_KW_NAME)
+        if index == 1 and self._is_setting(value):
+            if self._is_template(value):
+                self._test_template = False
+                self._tokenizer = self._setting_class(self.set_test_template)
+            else:
+                self._tokenizer = self._setting_class()
+        if index == 1 and self._is_for_loop(value):
+            self._tokenizer = ForLoop()
+        if index == 1 and self._is_empty(value):
+            return [(value, SYNTAX)]
+        return _Table._tokenize(self, value, index)
+
+    def _is_setting(self, value):
+        return value.startswith('[') and value.endswith(']')
+
+    def _is_template(self, value):
+        return normalize(value) == '[template]'
+
+    def _is_for_loop(self, value):
+        return value.startswith(':') and normalize(value, remove=':') == 'for'
+
+    def set_test_template(self, template):
+        self._test_template = self._is_template_set(template)
+
+    def set_default_template(self, template):
+        self._default_template = self._is_template_set(template)
+
+    def _is_template_set(self, template):
+        return normalize(template) not in ('', '\\', 'none', '${empty}')
+
+
+class KeywordTable(TestCaseTable):
+    _tokenizer_class = KeywordCall
+    _setting_class = KeywordSetting
+
+    def _is_template(self, value):
+        return False
+
+
+# Following code copied directly from Robot Framework 2.7.5.
+
+class VariableSplitter:
+
+    def __init__(self, string, identifiers):
+        self.identifier = None
+        self.base = None
+        self.index = None
+        self.start = -1
+        self.end = -1
+        self._identifiers = identifiers
+        self._may_have_internal_variables = False
+        try:
+            self._split(string)
+        except ValueError:
+            pass
+        else:
+            self._finalize()
+
+    def get_replaced_base(self, variables):
+        if self._may_have_internal_variables:
+            return variables.replace_string(self.base)
+        return self.base
+
+    def _finalize(self):
+        self.identifier = self._variable_chars[0]
+        self.base = ''.join(self._variable_chars[2:-1])
+        self.end = self.start + len(self._variable_chars)
+        if self._has_list_or_dict_variable_index():
+            self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
+            self.end += len(self._list_and_dict_variable_index_chars)
+
+    def _has_list_or_dict_variable_index(self):
+        return self._list_and_dict_variable_index_chars\
+        and self._list_and_dict_variable_index_chars[-1] == ']'
+
+    def _split(self, string):
+        start_index, max_index = self._find_variable(string)
+        self.start = start_index
+        self._open_curly = 1
+        self._state = self._variable_state
+        self._variable_chars = [string[start_index], '{']
+        self._list_and_dict_variable_index_chars = []
+        self._string = string
+        start_index += 2
+        for index, char in enumerate(string[start_index:]):
+            index += start_index  # Giving start to enumerate only in Py 2.6+
+            try:
+                self._state(char, index)
+            except StopIteration:
+                return
+            if index  == max_index and not self._scanning_list_variable_index():
+                return
+
+    def _scanning_list_variable_index(self):
+        return self._state in [self._waiting_list_variable_index_state,
+                               self._list_variable_index_state]
+
+    def _find_variable(self, string):
+        max_end_index = string.rfind('}')
+        if max_end_index == -1:
+            raise ValueError('No variable end found')
+        if self._is_escaped(string, max_end_index):
+            return self._find_variable(string[:max_end_index])
+        start_index = self._find_start_index(string, 1, max_end_index)
+        if start_index == -1:
+            raise ValueError('No variable start found')
+        return start_index, max_end_index
+
+    def _find_start_index(self, string, start, end):
+        index = string.find('{', start, end) - 1
+        if index < 0:
+            return -1
+        if self._start_index_is_ok(string, index):
+            return index
+        return self._find_start_index(string, index+2, end)
+
+    def _start_index_is_ok(self, string, index):
+        return string[index] in self._identifiers\
+        and not self._is_escaped(string, index)
+
+    def _is_escaped(self, string, index):
+        escaped = False
+        while index > 0 and string[index-1] == '\\':
+            index -= 1
+            escaped = not escaped
+        return escaped
+
+    def _variable_state(self, char, index):
+        self._variable_chars.append(char)
+        if char == '}' and not self._is_escaped(self._string, index):
+            self._open_curly -= 1
+            if self._open_curly == 0:
+                if not self._is_list_or_dict_variable():
+                    raise StopIteration
+                self._state = self._waiting_list_variable_index_state
+        elif char in self._identifiers:
+            self._state = self._internal_variable_start_state
+
+    def _is_list_or_dict_variable(self):
+        return self._variable_chars[0] in ('@','&')
+
+    def _internal_variable_start_state(self, char, index):
+        self._state = self._variable_state
+        if char == '{':
+            self._variable_chars.append(char)
+            self._open_curly += 1
+            self._may_have_internal_variables = True
+        else:
+            self._variable_state(char, index)
+
+    def _waiting_list_variable_index_state(self, char, index):
+        if char != '[':
+            raise StopIteration
+        self._list_and_dict_variable_index_chars.append(char)
+        self._state = self._list_variable_index_state
+
+    def _list_variable_index_state(self, char, index):
+        self._list_and_dict_variable_index_chars.append(char)
+        if char == ']':
+            raise StopIteration
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ruby.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ruby.py
new file mode 100644
index 00000000..72aaeb5f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/ruby.py
@@ -0,0 +1,518 @@
+"""
+    pygments.lexers.ruby
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Ruby and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
+    bygroups, default, LexerContext, do_insertions, words, line_re
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Error, Generic, Whitespace
+from pygments.util import shebang_matches
+
+__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
+
+
+RUBY_OPERATORS = (
+    '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
+    '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
+)
+
+
+class RubyLexer(ExtendedRegexLexer):
+    """
+    For Ruby source code.
+    """
+
+    name = 'Ruby'
+    url = 'http://www.ruby-lang.org'
+    aliases = ['ruby', 'rb', 'duby']
+    filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
+                 '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile']
+    mimetypes = ['text/x-ruby', 'application/x-ruby']
+    version_added = ''
+
+    flags = re.DOTALL | re.MULTILINE
+
+    def heredoc_callback(self, match, ctx):
+        # okay, this is the hardest part of parsing Ruby...
+        # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+
+        start = match.start(1)
+        yield start, Operator, match.group(1)        # <<[-~]?
+        yield match.start(2), String.Heredoc, match.group(2)   # quote ", ', `
+        yield match.start(3), String.Delimiter, match.group(3) # heredoc name
+        yield match.start(4), String.Heredoc, match.group(4)   # quote again
+
+        heredocstack = ctx.__dict__.setdefault('heredocstack', [])
+        outermost = not bool(heredocstack)
+        heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
+
+        ctx.pos = match.start(5)
+        ctx.end = match.end(5)
+        # this may find other heredocs, so limit the recursion depth
+        if len(heredocstack) < 100:
+            yield from self.get_tokens_unprocessed(context=ctx)
+        else:
+            yield ctx.pos, String.Heredoc, match.group(5)
+        ctx.pos = match.end()
+
+        if outermost:
+            # this is the outer heredoc again, now we can process them all
+            for tolerant, hdname in heredocstack:
+                lines = []
+                for match in line_re.finditer(ctx.text, ctx.pos):
+                    if tolerant:
+                        check = match.group().strip()
+                    else:
+                        check = match.group().rstrip()
+                    if check == hdname:
+                        for amatch in lines:
+                            yield amatch.start(), String.Heredoc, amatch.group()
+                        yield match.start(), String.Delimiter, match.group()
+                        ctx.pos = match.end()
+                        break
+                    else:
+                        lines.append(match)
+                else:
+                    # end of heredoc not found -- error!
+                    for amatch in lines:
+                        yield amatch.start(), Error, amatch.group()
+            ctx.end = len(ctx.text)
+            del heredocstack[:]
+
+    def gen_rubystrings_rules():
+        def intp_regex_callback(self, match, ctx):
+            yield match.start(1), String.Regex, match.group(1)  # begin
+            nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
+            for i, t, v in self.get_tokens_unprocessed(context=nctx):
+                yield match.start(3)+i, t, v
+            yield match.start(4), String.Regex, match.group(4)  # end[mixounse]*
+            ctx.pos = match.end()
+
+        def intp_string_callback(self, match, ctx):
+            yield match.start(1), String.Other, match.group(1)
+            nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
+            for i, t, v in self.get_tokens_unprocessed(context=nctx):
+                yield match.start(3)+i, t, v
+            yield match.start(4), String.Other, match.group(4)  # end
+            ctx.pos = match.end()
+
+        states = {}
+        states['strings'] = [
+            # easy ones
+            (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
+            (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
+            (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
+            (r':"', String.Symbol, 'simple-sym'),
+            (r'([a-zA-Z_]\w*)(:)(?!:)',
+             bygroups(String.Symbol, Punctuation)),  # Since Ruby 1.9
+            (r'"', String.Double, 'simple-string-double'),
+            (r"'", String.Single, 'simple-string-single'),
+            (r'(?', '<>', 'ab'):
+            states[name+'-intp-string'] = [
+                (r'\\[\\' + bracecc + ']', String.Other),
+                (lbrace, String.Other, '#push'),
+                (rbrace, String.Other, '#pop'),
+                include('string-intp-escaped'),
+                (r'[\\#' + bracecc + ']', String.Other),
+                (r'[^\\#' + bracecc + ']+', String.Other),
+            ]
+            states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
+                                      name+'-intp-string'))
+            states[name+'-string'] = [
+                (r'\\[\\' + bracecc + ']', String.Other),
+                (lbrace, String.Other, '#push'),
+                (rbrace, String.Other, '#pop'),
+                (r'[\\#' + bracecc + ']', String.Other),
+                (r'[^\\#' + bracecc + ']+', String.Other),
+            ]
+            states['strings'].append((r'%[qsw]' + lbrace, String.Other,
+                                      name+'-string'))
+            states[name+'-regex'] = [
+                (r'\\[\\' + bracecc + ']', String.Regex),
+                (lbrace, String.Regex, '#push'),
+                (rbrace + '[mixounse]*', String.Regex, '#pop'),
+                include('string-intp'),
+                (r'[\\#' + bracecc + ']', String.Regex),
+                (r'[^\\#' + bracecc + ']+', String.Regex),
+            ]
+            states['strings'].append((r'%r' + lbrace, String.Regex,
+                                      name+'-regex'))
+
+        # these must come after %!
+        states['strings'] += [
+            # %r regex
+            (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
+             intp_regex_callback),
+            # regular fancy strings with qsw
+            (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
+            (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
+             intp_string_callback),
+            # special forms of fancy strings after operators or
+            # in method calls with braces
+            (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+             bygroups(Whitespace, String.Other, None)),
+            # and because of fixed width lookbehinds the whole thing a
+            # second time for line startings...
+            (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+             bygroups(Whitespace, String.Other, None)),
+            # all regular fancy strings without qsw
+            (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
+             intp_string_callback),
+        ]
+
+        return states
+
+    tokens = {
+        'root': [
+            (r'\A#!.+?$', Comment.Hashbang),
+            (r'#.*?$', Comment.Single),
+            (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
+            # keywords
+            (words((
+                'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
+                'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
+                'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
+                'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
+             Keyword),
+            # start of function, class and module names
+            (r'(module)(\s+)([a-zA-Z_]\w*'
+             r'(?:::[a-zA-Z_]\w*)*)',
+             bygroups(Keyword, Whitespace, Name.Namespace)),
+            (r'(def)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
+            (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
+            (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
+            # special methods
+            (words((
+                'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
+                'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
+                'module_function', 'public', 'protected', 'true', 'false', 'nil'),
+                suffix=r'\b'),
+             Keyword.Pseudo),
+            (r'(not|and|or)\b', Operator.Word),
+            (words((
+                'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
+                'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
+                'private_method_defined', 'protected_method_defined',
+                'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
+             Name.Builtin),
+            (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
+            (words((
+                'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
+                'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
+                'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
+                'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
+                'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
+                'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
+                'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
+                'instance_method', 'instance_methods',
+                'instance_variable_get', 'instance_variable_set', 'instance_variables',
+                'lambda', 'load', 'local_variables', 'loop',
+                'method', 'method_missing', 'methods', 'module_eval', 'name',
+                'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
+                'private_instance_methods',
+                'private_methods', 'proc', 'protected_instance_methods',
+                'protected_methods', 'public_class_method',
+                'public_instance_methods', 'public_methods',
+                'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
+                'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
+                'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
+                'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
+                'untrace_var', 'warn'), prefix=r'(?~!:])|'
+             r'(?<=(?:\s|;)when\s)|'
+             r'(?<=(?:\s|;)or\s)|'
+             r'(?<=(?:\s|;)and\s)|'
+             r'(?<=\.index\s)|'
+             r'(?<=\.scan\s)|'
+             r'(?<=\.sub\s)|'
+             r'(?<=\.sub!\s)|'
+             r'(?<=\.gsub\s)|'
+             r'(?<=\.gsub!\s)|'
+             r'(?<=\.match\s)|'
+             r'(?<=(?:\s|;)if\s)|'
+             r'(?<=(?:\s|;)elsif\s)|'
+             r'(?<=^when\s)|'
+             r'(?<=^index\s)|'
+             r'(?<=^scan\s)|'
+             r'(?<=^sub\s)|'
+             r'(?<=^gsub\s)|'
+             r'(?<=^sub!\s)|'
+             r'(?<=^gsub!\s)|'
+             r'(?<=^match\s)|'
+             r'(?<=^if\s)|'
+             r'(?<=^elsif\s)'
+             r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
+            # multiline regex (in method calls or subscripts)
+            (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
+            # multiline regex (this time the funny no whitespace rule)
+            (r'(\s+)(/)(?![\s=])', bygroups(Whitespace, String.Regex),
+             'multiline-regex'),
+            # lex numbers and ignore following regular expressions which
+            # are division operators in fact (grrrr. i hate that. any
+            # better ideas?)
+            # since pygments 0.7 we also eat a "?" operator after numbers
+            # so that the char operator does not work. Chars are not allowed
+            # there so that you can use the ternary operator.
+            # stupid example:
+            #   x>=0?n[x]:""
+            (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+             bygroups(Number.Oct, Whitespace, Operator)),
+            (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+             bygroups(Number.Hex, Whitespace, Operator)),
+            (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
+             bygroups(Number.Bin, Whitespace, Operator)),
+            (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+             bygroups(Number.Integer, Whitespace, Operator)),
+            # Names
+            (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+            (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+            (r'\$\w+', Name.Variable.Global),
+            (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
+            (r'\$-[0adFiIlpvw]', Name.Variable.Global),
+            (r'::', Operator),
+            include('strings'),
+            # chars
+            (r'\?(\\[MC]-)*'  # modifiers
+             r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
+             r'(?!\w)',
+             String.Char),
+            (r'[A-Z]\w+', Name.Constant),
+            # this is needed because ruby attributes can look
+            # like keywords (class) or like this: ` ?!?
+            (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
+             bygroups(Operator, Name.Operator)),
+            (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
+             bygroups(Operator, Name)),
+            (r'[a-zA-Z_]\w*[!?]?', Name),
+            (r'(\[|\]|\*\*|<>?|>=|<=|<=>|=~|={3}|'
+             r'!~|&&?|\|\||\.{1,3})', Operator),
+            (r'[-+/*%=<>&!^|~]=?', Operator),
+            (r'[(){};,/?:\\]', Punctuation),
+            (r'\s+', Whitespace)
+        ],
+        'funcname': [
+            (r'\(', Punctuation, 'defexpr'),
+            (r'(?:([a-zA-Z_]\w*)(\.))?'  # optional scope name, like "self."
+             r'('
+                r'[a-zA-Z\u0080-\uffff][a-zA-Z0-9_\u0080-\uffff]*[!?=]?'  # method name
+                r'|!=|!~|=~|\*\*?|[-+!~]@?|[/%&|^]|<=>|<[<=]?|>[>=]?|===?'  # or operator override
+                r'|\[\]=?'  # or element reference/assignment override
+                r'|`'  # or the undocumented backtick override
+             r')',
+             bygroups(Name.Class, Operator, Name.Function), '#pop'),
+            default('#pop')
+        ],
+        'classname': [
+            (r'\(', Punctuation, 'defexpr'),
+            (r'<<', Operator, '#pop'),
+            (r'[A-Z_]\w*', Name.Class, '#pop'),
+            default('#pop')
+        ],
+        'defexpr': [
+            (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
+            (r'\(', Operator, '#push'),
+            include('root')
+        ],
+        'in-intp': [
+            (r'\{', String.Interpol, '#push'),
+            (r'\}', String.Interpol, '#pop'),
+            include('root'),
+        ],
+        'string-intp': [
+            (r'#\{', String.Interpol, 'in-intp'),
+            (r'#@@?[a-zA-Z_]\w*', String.Interpol),
+            (r'#\$[a-zA-Z_]\w*', String.Interpol)
+        ],
+        'string-intp-escaped': [
+            include('string-intp'),
+            (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
+             String.Escape)
+        ],
+        'interpolated-regex': [
+            include('string-intp'),
+            (r'[\\#]', String.Regex),
+            (r'[^\\#]+', String.Regex),
+        ],
+        'interpolated-string': [
+            include('string-intp'),
+            (r'[\\#]', String.Other),
+            (r'[^\\#]+', String.Other),
+        ],
+        'multiline-regex': [
+            include('string-intp'),
+            (r'\\\\', String.Regex),
+            (r'\\/', String.Regex),
+            (r'[\\#]', String.Regex),
+            (r'[^\\/#]+', String.Regex),
+            (r'/[mixounse]*', String.Regex, '#pop'),
+        ],
+        'end-part': [
+            (r'.+', Comment.Preproc, '#pop')
+        ]
+    }
+    tokens.update(gen_rubystrings_rules())
+
+    def analyse_text(text):
+        return shebang_matches(text, r'ruby(1\.\d)?')
+
+
+class RubyConsoleLexer(Lexer):
+    """
+    For Ruby interactive console (**irb**) output.
+    """
+    name = 'Ruby irb session'
+    aliases = ['rbcon', 'irb']
+    mimetypes = ['text/x-ruby-shellsession']
+    url = 'https://www.ruby-lang.org'
+    version_added = ''
+    _example = 'rbcon/console'
+
+    _prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
+                            r'|>> |\?> ')
+
+    def get_tokens_unprocessed(self, text):
+        rblexer = RubyLexer(**self.options)
+
+        curcode = ''
+        insertions = []
+        for match in line_re.finditer(text):
+            line = match.group()
+            m = self._prompt_re.match(line)
+            if m is not None:
+                end = m.end()
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt, line[:end])]))
+                curcode += line[end:]
+            else:
+                if curcode:
+                    yield from do_insertions(
+                        insertions, rblexer.get_tokens_unprocessed(curcode))
+                    curcode = ''
+                    insertions = []
+                yield match.start(), Generic.Output, line
+        if curcode:
+            yield from do_insertions(
+                insertions, rblexer.get_tokens_unprocessed(curcode))
+
+
+class FancyLexer(RegexLexer):
+    """
+    Pygments Lexer For Fancy.
+
+    Fancy is a self-hosted, pure object-oriented, dynamic,
+    class-based, concurrent general-purpose programming language
+    running on Rubinius, the Ruby VM.
+    """
+    name = 'Fancy'
+    url = 'https://github.com/bakkdoor/fancy'
+    filenames = ['*.fy', '*.fancypack']
+    aliases = ['fancy', 'fy']
+    mimetypes = ['text/x-fancysrc']
+    version_added = '1.5'
+
+    tokens = {
+        # copied from PerlLexer:
+        'balanced-regex': [
+            (r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'),
+            (r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'),
+            (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+            (r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'),
+            (r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'),
+            (r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'),
+            (r'\((\\\\|\\[^\\]|[^)\\])*\)[egimosx]*', String.Regex, '#pop'),
+            (r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'),
+            (r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'),
+            (r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'),
+        ],
+        'root': [
+            (r'\s+', Whitespace),
+
+            # balanced delimiters (copied from PerlLexer):
+            (r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'),
+            (r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'),
+            (r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'),
+            (r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'),
+            (r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex),
+            (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+
+            # Comments
+            (r'#(.*?)\n', Comment.Single),
+            # Symbols
+            (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
+            # Multi-line DoubleQuotedString
+            (r'"""(\\\\|\\[^\\]|[^\\])*?"""', String),
+            # DoubleQuotedString
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+            # keywords
+            (r'(def|class|try|catch|finally|retry|return|return_local|match|'
+             r'case|->|=>)\b', Keyword),
+            # constants
+            (r'(self|super|nil|false|true)\b', Name.Constant),
+            (r'[(){};,/?|:\\]', Punctuation),
+            # names
+            (words((
+                'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
+                'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
+                'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
+                'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
+             Name.Builtin),
+            # functions
+            (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
+            # operators, must be below functions
+            (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
+            (r'[A-Z]\w*', Name.Constant),
+            (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+            (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+            ('@@?', Operator),
+            (r'[a-zA-Z_]\w*', Name),
+            # numbers - / checks are necessary to avoid mismarking regexes,
+            # see comment in RubyLexer
+            (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+             bygroups(Number.Oct, Whitespace, Operator)),
+            (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+             bygroups(Number.Hex, Whitespace, Operator)),
+            (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
+             bygroups(Number.Bin, Whitespace, Operator)),
+            (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+             bygroups(Number.Integer, Whitespace, Operator)),
+            (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
+            (r'\d+', Number.Integer)
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rust.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rust.py
new file mode 100644
index 00000000..63410475
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/rust.py
@@ -0,0 +1,222 @@
+"""
+    pygments.lexers.rust
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the Rust language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Whitespace
+
+__all__ = ['RustLexer']
+
+
+class RustLexer(RegexLexer):
+    """
+    Lexer for the Rust programming language (version 1.47).
+    """
+    name = 'Rust'
+    url = 'https://www.rust-lang.org/'
+    filenames = ['*.rs', '*.rs.in']
+    aliases = ['rust', 'rs']
+    mimetypes = ['text/rust', 'text/x-rust']
+    version_added = '1.6'
+
+    keyword_types = (words((
+        'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128',
+        'usize', 'isize', 'f32', 'f64', 'char', 'str', 'bool',
+    ), suffix=r'\b'), Keyword.Type)
+
+    builtin_funcs_types = (words((
+        'Copy', 'Send', 'Sized', 'Sync', 'Unpin',
+        'Drop', 'Fn', 'FnMut', 'FnOnce', 'drop',
+        'Box', 'ToOwned', 'Clone',
+        'PartialEq', 'PartialOrd', 'Eq', 'Ord',
+        'AsRef', 'AsMut', 'Into', 'From', 'Default',
+        'Iterator', 'Extend', 'IntoIterator', 'DoubleEndedIterator',
+        'ExactSizeIterator',
+        'Option', 'Some', 'None',
+        'Result', 'Ok', 'Err',
+        'String', 'ToString', 'Vec',
+    ), suffix=r'\b'), Name.Builtin)
+
+    builtin_macros = (words((
+        'asm', 'assert', 'assert_eq', 'assert_ne', 'cfg', 'column',
+        'compile_error', 'concat', 'concat_idents', 'dbg', 'debug_assert',
+        'debug_assert_eq', 'debug_assert_ne', 'env', 'eprint', 'eprintln',
+        'file', 'format', 'format_args', 'format_args_nl', 'global_asm',
+        'include', 'include_bytes', 'include_str',
+        'is_aarch64_feature_detected',
+        'is_arm_feature_detected',
+        'is_mips64_feature_detected',
+        'is_mips_feature_detected',
+        'is_powerpc64_feature_detected',
+        'is_powerpc_feature_detected',
+        'is_x86_feature_detected',
+        'line', 'llvm_asm', 'log_syntax', 'macro_rules', 'matches',
+        'module_path', 'option_env', 'panic', 'print', 'println', 'stringify',
+        'thread_local', 'todo', 'trace_macros', 'unimplemented', 'unreachable',
+        'vec', 'write', 'writeln',
+    ), suffix=r'!'), Name.Function.Magic)
+
+    tokens = {
+        'root': [
+            # rust allows a file to start with a shebang, but if the first line
+            # starts with #![ then it's not a shebang but a crate attribute.
+            (r'#![^[\r\n].*$', Comment.Preproc),
+            default('base'),
+        ],
+        'base': [
+            # Whitespace and Comments
+            (r'\n', Whitespace),
+            (r'\s+', Whitespace),
+            (r'//!.*?\n', String.Doc),
+            (r'///(\n|[^/].*?\n)', String.Doc),
+            (r'//(.*?)\n', Comment.Single),
+            (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
+            (r'/\*!', String.Doc, 'doccomment'),
+            (r'/\*', Comment.Multiline, 'comment'),
+
+            # Macro parameters
+            (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
+            # Keywords
+            (words(('as', 'async', 'await', 'box', 'const', 'crate', 'dyn',
+                    'else', 'extern', 'for', 'if', 'impl', 'in', 'loop',
+                    'match', 'move', 'mut', 'pub', 'ref', 'return', 'static',
+                    'super', 'trait', 'unsafe', 'use', 'where', 'while'),
+                   suffix=r'\b'), Keyword),
+            (words(('abstract', 'become', 'do', 'final', 'macro', 'override',
+                    'priv', 'typeof', 'try', 'unsized', 'virtual', 'yield'),
+                   suffix=r'\b'), Keyword.Reserved),
+            (r'(true|false)\b', Keyword.Constant),
+            (r'self\b', Name.Builtin.Pseudo),
+            (r'mod\b', Keyword, 'modname'),
+            (r'let\b', Keyword.Declaration),
+            (r'fn\b', Keyword, 'funcname'),
+            (r'(struct|enum|type|union)\b', Keyword, 'typename'),
+            (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Whitespace, Keyword)),
+            keyword_types,
+            (r'[sS]elf\b', Name.Builtin.Pseudo),
+            # Prelude (taken from Rust's src/libstd/prelude.rs)
+            builtin_funcs_types,
+            builtin_macros,
+            # Path separators, so types don't catch them.
+            (r'::\b', Punctuation),
+            # Types in positions.
+            (r'(?::|->)', Punctuation, 'typename'),
+            # Labels
+            (r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
+             bygroups(Keyword, Text.Whitespace, Name.Label)),
+
+            # Character literals
+            (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+             r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+             String.Char),
+            (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
+             r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+             String.Char),
+
+            # Binary literals
+            (r'0b[01_]+', Number.Bin, 'number_lit'),
+            # Octal literals
+            (r'0o[0-7_]+', Number.Oct, 'number_lit'),
+            # Hexadecimal literals
+            (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
+            # Decimal literals
+            (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+             r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
+             'number_lit'),
+            (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
+
+            # String literals
+            (r'b"', String, 'bytestring'),
+            (r'"', String, 'string'),
+            (r'(?s)b?r(#*)".*?"\1', String),
+
+            # Lifetime names
+            (r"'", Operator, 'lifetime'),
+
+            # Operators and Punctuation
+            (r'\.\.=?', Operator),
+            (r'[{}()\[\],.;]', Punctuation),
+            (r'[+\-*/%&|<>^!~@=:?]', Operator),
+
+            # Identifiers
+            (r'[a-zA-Z_]\w*', Name),
+            # Raw identifiers
+            (r'r#[a-zA-Z_]\w*', Name),
+
+            # Attributes
+            (r'#!?\[', Comment.Preproc, 'attribute['),
+
+            # Misc
+            # Lone hashes: not used in Rust syntax, but allowed in macro
+            # arguments, most famously for quote::quote!()
+            (r'#', Punctuation),
+        ],
+        'comment': [
+            (r'[^*/]+', Comment.Multiline),
+            (r'/\*', Comment.Multiline, '#push'),
+            (r'\*/', Comment.Multiline, '#pop'),
+            (r'[*/]', Comment.Multiline),
+        ],
+        'doccomment': [
+            (r'[^*/]+', String.Doc),
+            (r'/\*', String.Doc, '#push'),
+            (r'\*/', String.Doc, '#pop'),
+            (r'[*/]', String.Doc),
+        ],
+        'modname': [
+            (r'\s+', Whitespace),
+            (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
+            default('#pop'),
+        ],
+        'funcname': [
+            (r'\s+', Whitespace),
+            (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
+            default('#pop'),
+        ],
+        'typename': [
+            (r'\s+', Whitespace),
+            (r'&', Keyword.Pseudo),
+            (r"'", Operator, 'lifetime'),
+            builtin_funcs_types,
+            keyword_types,
+            (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+            default('#pop'),
+        ],
+        'lifetime': [
+            (r"(static|_)", Name.Builtin),
+            (r"[a-zA-Z_]+\w*", Name.Attribute),
+            default('#pop'),
+        ],
+        'number_lit': [
+            (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
+            (r'f(32|64)', Keyword, '#pop'),
+            default('#pop'),
+        ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+             r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
+            (r'[^\\"]+', String),
+            (r'\\', String),
+        ],
+        'bytestring': [
+            (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
+            include('string'),
+        ],
+        'attribute_common': [
+            (r'"', String, 'string'),
+            (r'\[', Comment.Preproc, 'attribute['),
+        ],
+        'attribute[': [
+            include('attribute_common'),
+            (r'\]', Comment.Preproc, '#pop'),
+            (r'[^"\]\[]+', Comment.Preproc),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sas.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sas.py
new file mode 100644
index 00000000..1b2ad432
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sas.py
@@ -0,0 +1,227 @@
+"""
+    pygments.lexers.sas
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexer for SAS.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Comment, Keyword, Name, Number, String, Text, \
+    Other, Generic
+
+__all__ = ['SASLexer']
+
+
+class SASLexer(RegexLexer):
+    """
+    For SAS files.
+    """
+    # Syntax from syntax/sas.vim by James Kidd 
+
+    name      = 'SAS'
+    aliases   = ['sas']
+    filenames = ['*.SAS', '*.sas']
+    mimetypes = ['text/x-sas', 'text/sas', 'application/x-sas']
+    url = 'https://en.wikipedia.org/wiki/SAS_(software)'
+    version_added = '2.2'
+    flags     = re.IGNORECASE | re.MULTILINE
+
+    builtins_macros = (
+        "bquote", "nrbquote", "cmpres", "qcmpres", "compstor", "datatyp",
+        "display", "do", "else", "end", "eval", "global", "goto", "if",
+        "index", "input", "keydef", "label", "left", "length", "let",
+        "local", "lowcase", "macro", "mend", "nrquote",
+        "nrstr", "put", "qleft", "qlowcase", "qscan",
+        "qsubstr", "qsysfunc", "qtrim", "quote", "qupcase", "scan",
+        "str", "substr", "superq", "syscall", "sysevalf", "sysexec",
+        "sysfunc", "sysget", "syslput", "sysprod", "sysrc", "sysrput",
+        "then", "to", "trim", "unquote", "until", "upcase", "verify",
+        "while", "window"
+    )
+
+    builtins_conditionals = (
+        "do", "if", "then", "else", "end", "until", "while"
+    )
+
+    builtins_statements = (
+        "abort", "array", "attrib", "by", "call", "cards", "cards4",
+        "catname", "continue", "datalines", "datalines4", "delete", "delim",
+        "delimiter", "display", "dm", "drop", "endsas", "error", "file",
+        "filename", "footnote", "format", "goto", "in", "infile", "informat",
+        "input", "keep", "label", "leave", "length", "libname", "link",
+        "list", "lostcard", "merge", "missing", "modify", "options", "output",
+        "out", "page", "put", "redirect", "remove", "rename", "replace",
+        "retain", "return", "select", "set", "skip", "startsas", "stop",
+        "title", "update", "waitsas", "where", "window", "x", "systask"
+    )
+
+    builtins_sql = (
+        "add", "and", "alter", "as", "cascade", "check", "create",
+        "delete", "describe", "distinct", "drop", "foreign", "from",
+        "group", "having", "index", "insert", "into", "in", "key", "like",
+        "message", "modify", "msgtype", "not", "null", "on", "or",
+        "order", "primary", "references", "reset", "restrict", "select",
+        "set", "table", "unique", "update", "validate", "view", "where"
+    )
+
+    builtins_functions = (
+        "abs", "addr", "airy", "arcos", "arsin", "atan", "attrc",
+        "attrn", "band", "betainv", "blshift", "bnot", "bor",
+        "brshift", "bxor", "byte", "cdf", "ceil", "cexist", "cinv",
+        "close", "cnonct", "collate", "compbl", "compound",
+        "compress", "cos", "cosh", "css", "curobs", "cv", "daccdb",
+        "daccdbsl", "daccsl", "daccsyd", "dacctab", "dairy", "date",
+        "datejul", "datepart", "datetime", "day", "dclose", "depdb",
+        "depdbsl", "depsl", "depsyd",
+        "deptab", "dequote", "dhms", "dif", "digamma",
+        "dim", "dinfo", "dnum", "dopen", "doptname", "doptnum",
+        "dread", "dropnote", "dsname", "erf", "erfc", "exist", "exp",
+        "fappend", "fclose", "fcol", "fdelete", "fetch", "fetchobs",
+        "fexist", "fget", "fileexist", "filename", "fileref",
+        "finfo", "finv", "fipname", "fipnamel", "fipstate", "floor",
+        "fnonct", "fnote", "fopen", "foptname", "foptnum", "fpoint",
+        "fpos", "fput", "fread", "frewind", "frlen", "fsep", "fuzz",
+        "fwrite", "gaminv", "gamma", "getoption", "getvarc", "getvarn",
+        "hbound", "hms", "hosthelp", "hour", "ibessel", "index",
+        "indexc", "indexw", "input", "inputc", "inputn", "int",
+        "intck", "intnx", "intrr", "irr", "jbessel", "juldate",
+        "kurtosis", "lag", "lbound", "left", "length", "lgamma",
+        "libname", "libref", "log", "log10", "log2", "logpdf", "logpmf",
+        "logsdf", "lowcase", "max", "mdy", "mean", "min", "minute",
+        "mod", "month", "mopen", "mort", "n", "netpv", "nmiss",
+        "normal", "note", "npv", "open", "ordinal", "pathname",
+        "pdf", "peek", "peekc", "pmf", "point", "poisson", "poke",
+        "probbeta", "probbnml", "probchi", "probf", "probgam",
+        "probhypr", "probit", "probnegb", "probnorm", "probt",
+        "put", "putc", "putn", "qtr", "quote", "ranbin", "rancau",
+        "ranexp", "rangam", "range", "rank", "rannor", "ranpoi",
+        "rantbl", "rantri", "ranuni", "repeat", "resolve", "reverse",
+        "rewind", "right", "round", "saving", "scan", "sdf", "second",
+        "sign", "sin", "sinh", "skewness", "soundex", "spedis",
+        "sqrt", "std", "stderr", "stfips", "stname", "stnamel",
+        "substr", "sum", "symget", "sysget", "sysmsg", "sysprod",
+        "sysrc", "system", "tan", "tanh", "time", "timepart", "tinv",
+        "tnonct", "today", "translate", "tranwrd", "trigamma",
+        "trim", "trimn", "trunc", "uniform", "upcase", "uss", "var",
+        "varfmt", "varinfmt", "varlabel", "varlen", "varname",
+        "varnum", "varray", "varrayx", "vartype", "verify", "vformat",
+        "vformatd", "vformatdx", "vformatn", "vformatnx", "vformatw",
+        "vformatwx", "vformatx", "vinarray", "vinarrayx", "vinformat",
+        "vinformatd", "vinformatdx", "vinformatn", "vinformatnx",
+        "vinformatw", "vinformatwx", "vinformatx", "vlabel",
+        "vlabelx", "vlength", "vlengthx", "vname", "vnamex", "vtype",
+        "vtypex", "weekday", "year", "yyq", "zipfips", "zipname",
+        "zipnamel", "zipstate"
+    )
+
+    tokens = {
+        'root': [
+            include('comments'),
+            include('proc-data'),
+            include('cards-datalines'),
+            include('logs'),
+            include('general'),
+            (r'.', Text),
+        ],
+        # SAS is multi-line regardless, but * is ended by ;
+        'comments': [
+            (r'^\s*\*.*?;', Comment),
+            (r'/\*.*?\*/', Comment),
+            (r'^\s*\*(.|\n)*?;', Comment.Multiline),
+            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+        ],
+        # Special highlight for proc, data, quit, run
+        'proc-data': [
+            (r'(^|;)\s*(proc \w+|data|run|quit)[\s;]',
+             Keyword.Reserved),
+        ],
+        # Special highlight cards and datalines
+        'cards-datalines': [
+            (r'^\s*(datalines|cards)\s*;\s*$', Keyword, 'data'),
+        ],
+        'data': [
+            (r'(.|\n)*^\s*;\s*$', Other, '#pop'),
+        ],
+        # Special highlight for put NOTE|ERROR|WARNING (order matters)
+        'logs': [
+            (r'\n?^\s*%?put ', Keyword, 'log-messages'),
+        ],
+        'log-messages': [
+            (r'NOTE(:|-).*', Generic, '#pop'),
+            (r'WARNING(:|-).*', Generic.Emph, '#pop'),
+            (r'ERROR(:|-).*', Generic.Error, '#pop'),
+            include('general'),
+        ],
+        'general': [
+            include('keywords'),
+            include('vars-strings'),
+            include('special'),
+            include('numbers'),
+        ],
+        # Keywords, statements, functions, macros
+        'keywords': [
+            (words(builtins_statements,
+                   prefix = r'\b',
+                   suffix = r'\b'),
+             Keyword),
+            (words(builtins_sql,
+                   prefix = r'\b',
+                   suffix = r'\b'),
+             Keyword),
+            (words(builtins_conditionals,
+                   prefix = r'\b',
+                   suffix = r'\b'),
+             Keyword),
+            (words(builtins_macros,
+                   prefix = r'%',
+                   suffix = r'\b'),
+             Name.Builtin),
+            (words(builtins_functions,
+                   prefix = r'\b',
+                   suffix = r'\('),
+             Name.Builtin),
+        ],
+        # Strings and user-defined variables and macros (order matters)
+        'vars-strings': [
+            (r'&[a-z_]\w{0,31}\.?', Name.Variable),
+            (r'%[a-z_]\w{0,31}', Name.Function),
+            (r'\'', String, 'string_squote'),
+            (r'"', String, 'string_dquote'),
+        ],
+        'string_squote': [
+            ('\'', String, '#pop'),
+            (r'\\\\|\\"|\\\n', String.Escape),
+            # AFAIK, macro variables are not evaluated in single quotes
+            # (r'&', Name.Variable, 'validvar'),
+            (r'[^$\'\\]+', String),
+            (r'[$\'\\]', String),
+        ],
+        'string_dquote': [
+            (r'"', String, '#pop'),
+            (r'\\\\|\\"|\\\n', String.Escape),
+            (r'&', Name.Variable, 'validvar'),
+            (r'[^$&"\\]+', String),
+            (r'[$"\\]', String),
+        ],
+        'validvar': [
+            (r'[a-z_]\w{0,31}\.?', Name.Variable, '#pop'),
+        ],
+        # SAS numbers and special variables
+        'numbers': [
+            (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b',
+             Number),
+        ],
+        'special': [
+            (r'(null|missing|_all_|_automatic_|_character_|_n_|'
+             r'_infile_|_name_|_null_|_numeric_|_user_|_webout_)',
+             Keyword.Constant),
+        ],
+        # 'operators': [
+        #     (r'(-|=|<=|>=|<|>|<>|&|!=|'
+        #      r'\||\*|\+|\^|/|!|~|~=)', Operator)
+        # ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/savi.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/savi.py
new file mode 100644
index 00000000..1e443ae3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/savi.py
@@ -0,0 +1,171 @@
+"""
+    pygments.lexers.savi
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Savi.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include
+from pygments.token import Whitespace, Keyword, Name, String, Number, \
+  Operator, Punctuation, Comment, Generic, Error
+
+__all__ = ['SaviLexer']
+
+
+# The canonical version of this file can be found in the following repository,
+# where it is kept in sync with any language changes, as well as the other
+# pygments-like lexers that are maintained for use with other tools:
+# - https://github.com/savi-lang/savi/blob/main/tooling/pygments/lexers/savi.py
+#
+# If you're changing this file in the pygments repository, please ensure that
+# any changes you make are also propagated to the official Savi repository,
+# in order to avoid accidental clobbering of your changes later when an update
+# from the Savi repository flows forward into the pygments repository.
+#
+# If you're changing this file in the Savi repository, please ensure that
+# any changes you make are also reflected in the other pygments-like lexers
+# (rouge, vscode, etc) so that all of the lexers can be kept cleanly in sync.
+
+class SaviLexer(RegexLexer):
+    """
+    For Savi source code.
+
+    .. versionadded: 2.10
+    """
+
+    name = 'Savi'
+    url = 'https://github.com/savi-lang/savi'
+    aliases = ['savi']
+    filenames = ['*.savi']
+    version_added = ''
+
+    tokens = {
+      "root": [
+        # Line Comment
+        (r'//.*?$', Comment.Single),
+
+        # Doc Comment
+        (r'::.*?$', Comment.Single),
+
+        # Capability Operator
+        (r'(\')(\w+)(?=[^\'])', bygroups(Operator, Name)),
+
+        # Double-Quote String
+        (r'\w?"', String.Double, "string.double"),
+
+        # Single-Char String
+        (r"'", String.Char, "string.char"),
+
+        # Type Name
+        (r'(_?[A-Z]\w*)', Name.Class),
+
+        # Nested Type Name
+        (r'(\.)(\s*)(_?[A-Z]\w*)', bygroups(Punctuation, Whitespace, Name.Class)),
+
+        # Declare
+        (r'^([ \t]*)(:\w+)',
+          bygroups(Whitespace, Name.Tag),
+          "decl"),
+
+        # Error-Raising Calls/Names
+        (r'((\w+|\+|\-|\*)\!)', Generic.Deleted),
+
+        # Numeric Values
+        (r'\b\d([\d_]*(\.[\d_]+)?)\b', Number),
+
+        # Hex Numeric Values
+        (r'\b0x([0-9a-fA-F_]+)\b', Number.Hex),
+
+        # Binary Numeric Values
+        (r'\b0b([01_]+)\b', Number.Bin),
+
+        # Function Call (with braces)
+        (r'\w+(?=\()', Name.Function),
+
+        # Function Call (with receiver)
+        (r'(\.)(\s*)(\w+)', bygroups(Punctuation, Whitespace, Name.Function)),
+
+        # Function Call (with self receiver)
+        (r'(@)(\w+)', bygroups(Punctuation, Name.Function)),
+
+        # Parenthesis
+        (r'\(', Punctuation, "root"),
+        (r'\)', Punctuation, "#pop"),
+
+        # Brace
+        (r'\{', Punctuation, "root"),
+        (r'\}', Punctuation, "#pop"),
+
+        # Bracket
+        (r'\[', Punctuation, "root"),
+        (r'(\])(\!)', bygroups(Punctuation, Generic.Deleted), "#pop"),
+        (r'\]', Punctuation, "#pop"),
+
+        # Punctuation
+        (r'[,;:\.@]', Punctuation),
+
+        # Piping Operators
+        (r'(\|\>)', Operator),
+
+        # Branching Operators
+        (r'(\&\&|\|\||\?\?|\&\?|\|\?|\.\?)', Operator),
+
+        # Comparison Operators
+        (r'(\<\=\>|\=\~|\=\=|\<\=|\>\=|\<|\>)', Operator),
+
+        # Arithmetic Operators
+        (r'(\+|\-|\/|\*|\%)', Operator),
+
+        # Assignment Operators
+        (r'(\=)', Operator),
+
+        # Other Operators
+        (r'(\!|\<\<|\<|\&|\|)', Operator),
+
+        # Identifiers
+        (r'\b\w+\b', Name),
+
+        # Whitespace
+        (r'[ \t\r]+\n*|\n+', Whitespace),
+      ],
+
+      # Declare (nested rules)
+      "decl": [
+        (r'\b[a-z_]\w*\b(?!\!)', Keyword.Declaration),
+        (r':', Punctuation, "#pop"),
+        (r'\n', Whitespace, "#pop"),
+        include("root"),
+      ],
+
+      # Double-Quote String (nested rules)
+      "string.double": [
+        (r'\\\(', String.Interpol, "string.interpolation"),
+        (r'\\u[0-9a-fA-F]{4}', String.Escape),
+        (r'\\x[0-9a-fA-F]{2}', String.Escape),
+        (r'\\[bfnrt\\\']', String.Escape),
+        (r'\\"', String.Escape),
+        (r'"', String.Double, "#pop"),
+        (r'[^\\"]+', String.Double),
+        (r'.', Error),
+      ],
+
+      # Single-Char String (nested rules)
+      "string.char": [
+        (r'\\u[0-9a-fA-F]{4}', String.Escape),
+        (r'\\x[0-9a-fA-F]{2}', String.Escape),
+        (r'\\[bfnrt\\\']', String.Escape),
+        (r"\\'", String.Escape),
+        (r"'", String.Char, "#pop"),
+        (r"[^\\']+", String.Char),
+        (r'.', Error),
+      ],
+
+      # Interpolation inside String (nested rules)
+      "string.interpolation": [
+        (r"\)", String.Interpol, "#pop"),
+        include("root"),
+      ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/scdoc.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/scdoc.py
new file mode 100644
index 00000000..8e850d02
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/scdoc.py
@@ -0,0 +1,85 @@
+"""
+    pygments.lexers.scdoc
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for scdoc, a simple man page generator.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.token import Text, Comment, Keyword, String, Generic
+
+__all__ = ['ScdocLexer']
+
+
+class ScdocLexer(RegexLexer):
+    """
+    `scdoc` is a simple man page generator for POSIX systems written in C99.
+    """
+    name = 'scdoc'
+    url = 'https://git.sr.ht/~sircmpwn/scdoc'
+    aliases = ['scdoc', 'scd']
+    filenames = ['*.scd', '*.scdoc']
+    version_added = '2.5'
+    flags = re.MULTILINE
+
+    tokens = {
+        'root': [
+            # comment
+            (r'^(;.+\n)', bygroups(Comment)),
+
+            # heading with pound prefix
+            (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
+            (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
+            # bulleted lists
+            (r'^(\s*)([*-])(\s)(.+\n)',
+            bygroups(Text, Keyword, Text, using(this, state='inline'))),
+            # numbered lists
+            (r'^(\s*)(\.+\.)( .+\n)',
+            bygroups(Text, Keyword, using(this, state='inline'))),
+            # quote
+            (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
+            # text block
+            (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
+
+            include('inline'),
+        ],
+        'inline': [
+            # escape
+            (r'\\.', Text),
+            # underlines
+            (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
+            # bold
+            (r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
+            # inline code
+            (r'`[^`]+`', String.Backtick),
+
+            # general text, must come last!
+            (r'[^\\\s]+', Text),
+            (r'.', Text),
+        ],
+    }
+
+    def analyse_text(text):
+        """We checks for bold and underline text with * and _. Also
+        every scdoc file must start with a strictly defined first line."""
+        result = 0
+
+        if '*' in text:
+            result += 0.01
+
+        if '_' in text:
+            result += 0.01
+
+        # name(section) ["left_footer" ["center_header"]]
+        first_line = text.partition('\n')[0]
+        scdoc_preamble_pattern = r'^.*\([1-7]\)( "[^"]+"){0,2}$'
+
+        if re.search(scdoc_preamble_pattern, first_line):
+            result += 0.5
+
+        return result
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/scripting.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/scripting.py
new file mode 100644
index 00000000..6e494c33
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/scripting.py
@@ -0,0 +1,1616 @@
+"""
+    pygments.lexers.scripting
+    ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for scripting and embedded languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
+    words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Error, Whitespace, Other
+from pygments.util import get_bool_opt, get_list_opt
+
+__all__ = ['LuaLexer', 'LuauLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
+           'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
+           'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer']
+
+
+def all_lua_builtins():
+    from pygments.lexers._lua_builtins import MODULES
+    return [w for values in MODULES.values() for w in values]
+
+class LuaLexer(RegexLexer):
+    """
+    For Lua source code.
+
+    Additional options accepted:
+
+    `func_name_highlighting`
+        If given and ``True``, highlight builtin function names
+        (default: ``True``).
+    `disabled_modules`
+        If given, must be a list of module names whose function names
+        should not be highlighted. By default all modules are highlighted.
+
+        To get a list of allowed modules have a look into the
+        `_lua_builtins` module:
+
+        .. sourcecode:: pycon
+
+            >>> from pygments.lexers._lua_builtins import MODULES
+            >>> MODULES.keys()
+            ['string', 'coroutine', 'modules', 'io', 'basic', ...]
+    """
+
+    name = 'Lua'
+    url = 'https://www.lua.org/'
+    aliases = ['lua']
+    filenames = ['*.lua', '*.wlua']
+    mimetypes = ['text/x-lua', 'application/x-lua']
+    version_added = ''
+
+    _comment_multiline = r'(?:--\[(?P=*)\[[\w\W]*?\](?P=level)\])'
+    _comment_single = r'(?:--.*$)'
+    _space = r'(?:\s+(?!\s))'
+    _s = rf'(?:{_comment_multiline}|{_comment_single}|{_space})'
+    _name = r'(?:[^\W\d]\w*)'
+
+    tokens = {
+        'root': [
+            # Lua allows a file to start with a shebang.
+            (r'#!.*', Comment.Preproc),
+            default('base'),
+        ],
+        'ws': [
+            (_comment_multiline, Comment.Multiline),
+            (_comment_single, Comment.Single),
+            (_space, Whitespace),
+        ],
+        'base': [
+            include('ws'),
+
+            (r'(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?', Number.Hex),
+            (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+            (r'(?i)\d+e[+-]?\d+', Number.Float),
+            (r'\d+', Number.Integer),
+
+            # multiline strings
+            (r'(?s)\[(=*)\[.*?\]\1\]', String),
+
+            (r'::', Punctuation, 'label'),
+            (r'\.{3}', Punctuation),
+            (r'[=<>|~&+\-*/%#^]+|\.\.', Operator),
+            (r'[\[\]{}().,:;]+', Punctuation),
+            (r'(and|or|not)\b', Operator.Word),
+
+            (words([
+                'break', 'do', 'else', 'elseif', 'end', 'for', 'if', 'in',
+                'repeat', 'return', 'then', 'until', 'while'
+            ], suffix=r'\b'), Keyword.Reserved),
+            (r'goto\b', Keyword.Reserved, 'goto'),
+            (r'(local)\b', Keyword.Declaration),
+            (r'(true|false|nil)\b', Keyword.Constant),
+
+            (r'(function)\b', Keyword.Reserved, 'funcname'),
+
+            (words(all_lua_builtins(), suffix=r"\b"), Name.Builtin),
+            (fr'[A-Za-z_]\w*(?={_s}*[.:])', Name.Variable, 'varname'),
+            (fr'[A-Za-z_]\w*(?={_s}*\()', Name.Function),
+            (r'[A-Za-z_]\w*', Name.Variable),
+
+            ("'", String.Single, combined('stringescape', 'sqs')),
+            ('"', String.Double, combined('stringescape', 'dqs'))
+        ],
+
+        'varname': [
+            include('ws'),
+            (r'\.\.', Operator, '#pop'),
+            (r'[.:]', Punctuation),
+            (rf'{_name}(?={_s}*[.:])', Name.Property),
+            (rf'{_name}(?={_s}*\()', Name.Function, '#pop'),
+            (_name, Name.Property, '#pop'),
+        ],
+
+        'funcname': [
+            include('ws'),
+            (r'[.:]', Punctuation),
+            (rf'{_name}(?={_s}*[.:])', Name.Class),
+            (_name, Name.Function, '#pop'),
+            # inline function
+            (r'\(', Punctuation, '#pop'),
+        ],
+
+        'goto': [
+            include('ws'),
+            (_name, Name.Label, '#pop'),
+        ],
+
+        'label': [
+            include('ws'),
+            (r'::', Punctuation, '#pop'),
+            (_name, Name.Label),
+        ],
+
+        'stringescape': [
+            (r'\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|'
+             r'u\{[0-9a-fA-F]+\})', String.Escape),
+        ],
+
+        'sqs': [
+            (r"'", String.Single, '#pop'),
+            (r"[^\\']+", String.Single),
+        ],
+
+        'dqs': [
+            (r'"', String.Double, '#pop'),
+            (r'[^\\"]+', String.Double),
+        ]
+    }
+
+    def __init__(self, **options):
+        self.func_name_highlighting = get_bool_opt(
+            options, 'func_name_highlighting', True)
+        self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
+
+        self._functions = set()
+        if self.func_name_highlighting:
+            from pygments.lexers._lua_builtins import MODULES
+            for mod, func in MODULES.items():
+                if mod not in self.disabled_modules:
+                    self._functions.update(func)
+        RegexLexer.__init__(self, **options)
+
+    def get_tokens_unprocessed(self, text):
+        for index, token, value in \
+                RegexLexer.get_tokens_unprocessed(self, text):
+            if token is Name.Builtin and value not in self._functions:
+                if '.' in value:
+                    a, b = value.split('.')
+                    yield index, Name, a
+                    yield index + len(a), Punctuation, '.'
+                    yield index + len(a) + 1, Name, b
+                else:
+                    yield index, Name, value
+                continue
+            yield index, token, value
+
+def _luau_make_expression(should_pop, _s):
+    temp_list = [
+        (r'0[xX][\da-fA-F_]*', Number.Hex, '#pop'),
+        (r'0[bB][\d_]*', Number.Bin, '#pop'),
+        (r'\.?\d[\d_]*(?:\.[\d_]*)?(?:[eE][+-]?[\d_]+)?', Number.Float, '#pop'),
+
+        (words((
+            'true', 'false', 'nil'
+        ), suffix=r'\b'), Keyword.Constant, '#pop'),
+
+        (r'\[(=*)\[[.\n]*?\]\1\]', String, '#pop'),
+
+        (r'(\.)([a-zA-Z_]\w*)(?=%s*[({"\'])', bygroups(Punctuation, Name.Function), '#pop'),
+        (r'(\.)([a-zA-Z_]\w*)', bygroups(Punctuation, Name.Variable), '#pop'),
+
+        (rf'[a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*(?={_s}*[({{"\'])', Name.Other, '#pop'),
+        (r'[a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*', Name, '#pop'),
+    ]
+    if should_pop:
+        return temp_list
+    return [entry[:2] for entry in temp_list]
+
+def _luau_make_expression_special(should_pop):
+    temp_list = [
+        (r'\{', Punctuation, ('#pop', 'closing_brace_base', 'expression')),
+        (r'\(', Punctuation, ('#pop', 'closing_parenthesis_base', 'expression')),
+
+        (r'::?', Punctuation, ('#pop', 'type_end', 'type_start')),
+
+        (r"'", String.Single, ('#pop', 'string_single')),
+        (r'"', String.Double, ('#pop', 'string_double')),
+        (r'`', String.Backtick, ('#pop', 'string_interpolated')),
+    ]
+    if should_pop:
+        return temp_list
+    return [(entry[0], entry[1], entry[2][1:]) for entry in temp_list]
+
+class LuauLexer(RegexLexer):
+    """
+    For Luau source code.
+
+    Additional options accepted:
+
+    `include_luau_builtins`
+        If given and ``True``, automatically highlight Luau builtins
+        (default: ``True``).
+    `include_roblox_builtins`
+        If given and ``True``, automatically highlight Roblox-specific builtins
+        (default: ``False``).
+    `additional_builtins`
+        If given, must be a list of additional builtins to highlight.
+    `disabled_builtins`
+        If given, must be a list of builtins that will not be highlighted.
+    """
+
+    name = 'Luau'
+    url = 'https://luau-lang.org/'
+    aliases = ['luau']
+    filenames = ['*.luau']
+    version_added = '2.18'
+
+    _comment_multiline = r'(?:--\[(?P=*)\[[\w\W]*?\](?P=level)\])'
+    _comment_single = r'(?:--.*$)'
+    _s = r'(?:{}|{}|{})'.format(_comment_multiline, _comment_single, r'\s+')
+
+    tokens = {
+        'root': [
+            (r'#!.*', Comment.Hashbang, 'base'),
+            default('base'),
+        ],
+
+        'ws': [
+            (_comment_multiline, Comment.Multiline),
+            (_comment_single, Comment.Single),
+            (r'\s+', Whitespace),
+        ],
+
+        'base': [
+            include('ws'),
+
+            *_luau_make_expression_special(False),
+            (r'\.\.\.', Punctuation),
+
+            (rf'type\b(?={_s}+[a-zA-Z_])', Keyword.Reserved, 'type_declaration'),
+            (rf'export\b(?={_s}+[a-zA-Z_])', Keyword.Reserved),
+
+            (r'(?:\.\.|//|[+\-*\/%^<>=])=?', Operator, 'expression'),
+            (r'~=', Operator, 'expression'),
+
+            (words((
+                'and', 'or', 'not'
+            ), suffix=r'\b'), Operator.Word, 'expression'),
+
+            (words((
+                'elseif', 'for', 'if', 'in', 'repeat', 'return', 'until',
+                'while'), suffix=r'\b'), Keyword.Reserved, 'expression'),
+            (r'local\b', Keyword.Declaration, 'expression'),
+
+            (r'function\b', Keyword.Reserved, ('expression', 'func_name')),
+
+            (r'[\])};]+', Punctuation),
+
+            include('expression_static'),
+            *_luau_make_expression(False, _s),
+
+            (r'[\[.,]', Punctuation, 'expression'),
+        ],
+        'expression_static': [
+            (words((
+                'break', 'continue', 'do', 'else', 'elseif', 'end', 'for',
+                'if', 'in', 'repeat', 'return', 'then', 'until', 'while'),
+                suffix=r'\b'), Keyword.Reserved),
+        ],
+        'expression': [
+            include('ws'),
+
+            (r'if\b', Keyword.Reserved, ('ternary', 'expression')),
+
+            (r'local\b', Keyword.Declaration),
+            *_luau_make_expression_special(True),
+            (r'\.\.\.', Punctuation, '#pop'),
+
+            (r'function\b', Keyword.Reserved, 'func_name'),
+
+            include('expression_static'),
+            *_luau_make_expression(True, _s),
+
+            default('#pop'),
+        ],
+        'ternary': [
+            include('ws'),
+
+            (r'else\b', Keyword.Reserved, '#pop'),
+            (words((
+                'then', 'elseif',
+            ), suffix=r'\b'), Operator.Reserved, 'expression'),
+
+            default('#pop'),
+        ],
+
+        'closing_brace_pop': [
+            (r'\}', Punctuation, '#pop'),
+        ],
+        'closing_parenthesis_pop': [
+            (r'\)', Punctuation, '#pop'),
+        ],
+        'closing_gt_pop': [
+            (r'>', Punctuation, '#pop'),
+        ],
+
+        'closing_parenthesis_base': [
+            include('closing_parenthesis_pop'),
+            include('base'),
+        ],
+        'closing_parenthesis_type': [
+            include('closing_parenthesis_pop'),
+            include('type'),
+        ],
+        'closing_brace_base': [
+            include('closing_brace_pop'),
+            include('base'),
+        ],
+        'closing_brace_type': [
+            include('closing_brace_pop'),
+            include('type'),
+        ],
+        'closing_gt_type': [
+            include('closing_gt_pop'),
+            include('type'),
+        ],
+
+        'string_escape': [
+            (r'\\z\s*', String.Escape),
+            (r'\\(?:[abfnrtvz\\"\'`\{\n])|[\r\n]{1,2}|x[\da-fA-F]{2}|\d{1,3}|'
+             r'u\{\}[\da-fA-F]*\}', String.Escape),
+        ],
+        'string_single': [
+            include('string_escape'),
+
+            (r"'", String.Single, "#pop"),
+            (r"[^\\']+", String.Single),
+        ],
+        'string_double': [
+            include('string_escape'),
+
+            (r'"', String.Double, "#pop"),
+            (r'[^\\"]+', String.Double),
+        ],
+        'string_interpolated': [
+            include('string_escape'),
+
+            (r'\{', Punctuation, ('closing_brace_base', 'expression')),
+
+            (r'`', String.Backtick, "#pop"),
+            (r'[^\\`\{]+', String.Backtick),
+        ],
+
+        'func_name': [
+            include('ws'),
+
+            (r'[.:]', Punctuation),
+            (rf'[a-zA-Z_]\w*(?={_s}*[.:])', Name.Class),
+            (r'[a-zA-Z_]\w*', Name.Function),
+
+            (r'<', Punctuation, 'closing_gt_type'),
+
+            (r'\(', Punctuation, '#pop'),
+        ],
+
+        'type': [
+            include('ws'),
+
+            (r'\(', Punctuation, 'closing_parenthesis_type'),
+            (r'\{', Punctuation, 'closing_brace_type'),
+            (r'<', Punctuation, 'closing_gt_type'),
+
+            (r"'", String.Single, 'string_single'),
+            (r'"', String.Double, 'string_double'),
+
+            (r'[|&\.,\[\]:=]+', Punctuation),
+            (r'->', Punctuation),
+
+            (r'typeof\(', Name.Builtin, ('closing_parenthesis_base',
+                                         'expression')),
+            (r'[a-zA-Z_]\w*', Name.Class),
+        ],
+        'type_start': [
+            include('ws'),
+
+            (r'\(', Punctuation, ('#pop', 'closing_parenthesis_type')),
+            (r'\{', Punctuation, ('#pop', 'closing_brace_type')),
+            (r'<', Punctuation, ('#pop', 'closing_gt_type')),
+
+            (r"'", String.Single, ('#pop', 'string_single')),
+            (r'"', String.Double, ('#pop', 'string_double')),
+
+            (r'typeof\(', Name.Builtin, ('#pop', 'closing_parenthesis_base',
+                                         'expression')),
+            (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+        ],
+        'type_end': [
+            include('ws'),
+
+            (r'[|&\.]', Punctuation, 'type_start'),
+            (r'->', Punctuation, 'type_start'),
+
+            (r'<', Punctuation, 'closing_gt_type'),
+
+            default('#pop'),
+        ],
+        'type_declaration': [
+            include('ws'),
+
+            (r'[a-zA-Z_]\w*', Name.Class),
+            (r'<', Punctuation, 'closing_gt_type'),
+
+            (r'=', Punctuation, ('#pop', 'type_end', 'type_start')),
+        ],
+    }
+
+    def __init__(self, **options):
+        self.include_luau_builtins = get_bool_opt(
+            options, 'include_luau_builtins', True)
+        self.include_roblox_builtins = get_bool_opt(
+            options, 'include_roblox_builtins', False)
+        self.additional_builtins = get_list_opt(options, 'additional_builtins', [])
+        self.disabled_builtins = get_list_opt(options, 'disabled_builtins', [])
+
+        self._builtins = set(self.additional_builtins)
+        if self.include_luau_builtins:
+            from pygments.lexers._luau_builtins import LUAU_BUILTINS
+            self._builtins.update(LUAU_BUILTINS)
+        if self.include_roblox_builtins:
+            from pygments.lexers._luau_builtins import ROBLOX_BUILTINS
+            self._builtins.update(ROBLOX_BUILTINS)
+        if self.additional_builtins:
+            self._builtins.update(self.additional_builtins)
+        self._builtins.difference_update(self.disabled_builtins)
+
+        RegexLexer.__init__(self, **options)
+
+    def get_tokens_unprocessed(self, text):
+        for index, token, value in \
+                RegexLexer.get_tokens_unprocessed(self, text):
+            if token is Name or token is Name.Other:
+                split_value = value.split('.')
+                complete_value = []
+                new_index = index
+                for position in range(len(split_value), 0, -1):
+                    potential_string = '.'.join(split_value[:position])
+                    if potential_string in self._builtins:
+                        yield index, Name.Builtin, potential_string
+                        new_index += len(potential_string)
+
+                        if complete_value:
+                            yield new_index, Punctuation, '.'
+                            new_index += 1
+                        break
+                    complete_value.insert(0, split_value[position - 1])
+
+                for position, substring in enumerate(complete_value):
+                    if position + 1 == len(complete_value):
+                        if token is Name:
+                            yield new_index, Name.Variable, substring
+                            continue
+                        yield new_index, Name.Function, substring
+                        continue
+                    yield new_index, Name.Variable, substring
+                    new_index += len(substring)
+                    yield new_index, Punctuation, '.'
+                    new_index += 1
+
+                continue
+            yield index, token, value
+
+class MoonScriptLexer(LuaLexer):
+    """
+    For MoonScript source code.
+    """
+
+    name = 'MoonScript'
+    url = 'http://moonscript.org'
+    aliases = ['moonscript', 'moon']
+    filenames = ['*.moon']
+    mimetypes = ['text/x-moonscript', 'application/x-moonscript']
+    version_added = '1.5'
+
+    tokens = {
+        'root': [
+            (r'#!(.*?)$', Comment.Preproc),
+            default('base'),
+        ],
+        'base': [
+            ('--.*$', Comment.Single),
+            (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+            (r'(?i)\d+e[+-]?\d+', Number.Float),
+            (r'(?i)0x[0-9a-f]*', Number.Hex),
+            (r'\d+', Number.Integer),
+            (r'\n', Whitespace),
+            (r'[^\S\n]+', Text),
+            (r'(?s)\[(=*)\[.*?\]\1\]', String),
+            (r'(->|=>)', Name.Function),
+            (r':[a-zA-Z_]\w*', Name.Variable),
+            (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
+            (r'[;,]', Punctuation),
+            (r'[\[\]{}()]', Keyword.Type),
+            (r'[a-zA-Z_]\w*:', Name.Variable),
+            (words((
+                'class', 'extends', 'if', 'then', 'super', 'do', 'with',
+                'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
+                'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
+                'break'), suffix=r'\b'),
+             Keyword),
+            (r'(true|false|nil)\b', Keyword.Constant),
+            (r'(and|or|not)\b', Operator.Word),
+            (r'(self)\b', Name.Builtin.Pseudo),
+            (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
+            (r'[A-Z]\w*', Name.Class),  # proper name
+            (words(all_lua_builtins(), suffix=r"\b"), Name.Builtin),
+            (r'[A-Za-z_]\w*', Name),
+            ("'", String.Single, combined('stringescape', 'sqs')),
+            ('"', String.Double, combined('stringescape', 'dqs'))
+        ],
+        'stringescape': [
+            (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+        ],
+        'sqs': [
+            ("'", String.Single, '#pop'),
+            ("[^']+", String)
+        ],
+        'dqs': [
+            ('"', String.Double, '#pop'),
+            ('[^"]+', String)
+        ]
+    }
+
+    def get_tokens_unprocessed(self, text):
+        # set . as Operator instead of Punctuation
+        for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
+            if token == Punctuation and value == ".":
+                token = Operator
+            yield index, token, value
+
+
+class ChaiscriptLexer(RegexLexer):
+    """
+    For ChaiScript source code.
+    """
+
+    name = 'ChaiScript'
+    url = 'http://chaiscript.com/'
+    aliases = ['chaiscript', 'chai']
+    filenames = ['*.chai']
+    mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+    version_added = '2.0'
+
+    flags = re.DOTALL | re.MULTILINE
+
+    tokens = {
+        'commentsandwhitespace': [
+            (r'\s+', Text),
+            (r'//.*?\n', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline),
+            (r'^\#.*?\n', Comment.Single)
+        ],
+        'slashstartsregex': [
+            include('commentsandwhitespace'),
+            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+             r'([gim]+\b|\B)', String.Regex, '#pop'),
+            (r'(?=/)', Text, ('#pop', 'badregex')),
+            default('#pop')
+        ],
+        'badregex': [
+            (r'\n', Text, '#pop')
+        ],
+        'root': [
+            include('commentsandwhitespace'),
+            (r'\n', Text),
+            (r'[^\S\n]+', Text),
+            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
+             r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+            (r'[})\].]', Punctuation),
+            (r'[=+\-*/]', Operator),
+            (r'(for|in|while|do|break|return|continue|if|else|'
+             r'throw|try|catch'
+             r')\b', Keyword, 'slashstartsregex'),
+            (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
+            (r'(attr|def|fun)\b', Keyword.Reserved),
+            (r'(true|false)\b', Keyword.Constant),
+            (r'(eval|throw)\b', Name.Builtin),
+            (r'`\S+`', Name.Builtin),
+            (r'[$a-zA-Z_]\w*', Name.Other),
+            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+            (r'0x[0-9a-fA-F]+', Number.Hex),
+            (r'[0-9]+', Number.Integer),
+            (r'"', String.Double, 'dqstring'),
+            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+        ],
+        'dqstring': [
+            (r'\$\{[^"}]+?\}', String.Interpol),
+            (r'\$', String.Double),
+            (r'\\\\', String.Double),
+            (r'\\"', String.Double),
+            (r'[^\\"$]+', String.Double),
+            (r'"', String.Double, '#pop'),
+        ],
+    }
+
+
+class LSLLexer(RegexLexer):
+    """
+    For Second Life's Linden Scripting Language source code.
+    """
+
+    name = 'LSL'
+    aliases = ['lsl']
+    filenames = ['*.lsl']
+    mimetypes = ['text/x-lsl']
+    url = 'https://wiki.secondlife.com/wiki/Linden_Scripting_Language'
+    version_added = '2.0'
+
+    flags = re.MULTILINE
+
+    lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
+    lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
+    lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
+    lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
+    lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
+    lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
+    lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
+    lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
+    lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
+    lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
+    lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
+    lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
+    lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
+    lsl_invalid_illegal = r'\b(?:event)\b'
+    lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
+    lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
+    lsl_reserved_log = r'\b(?:print)\b'
+    lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
+
+    tokens = {
+        'root':
+        [
+            (r'//.*?\n',                          Comment.Single),
+            (r'/\*',                              Comment.Multiline, 'comment'),
+            (r'"',                                String.Double, 'string'),
+            (lsl_keywords,                        Keyword),
+            (lsl_types,                           Keyword.Type),
+            (lsl_states,                          Name.Class),
+            (lsl_events,                          Name.Builtin),
+            (lsl_functions_builtin,               Name.Function),
+            (lsl_constants_float,                 Keyword.Constant),
+            (lsl_constants_integer,               Keyword.Constant),
+            (lsl_constants_integer_boolean,       Keyword.Constant),
+            (lsl_constants_rotation,              Keyword.Constant),
+            (lsl_constants_string,                Keyword.Constant),
+            (lsl_constants_vector,                Keyword.Constant),
+            (lsl_invalid_broken,                  Error),
+            (lsl_invalid_deprecated,              Error),
+            (lsl_invalid_illegal,                 Error),
+            (lsl_invalid_unimplemented,           Error),
+            (lsl_reserved_godmode,                Keyword.Reserved),
+            (lsl_reserved_log,                    Keyword.Reserved),
+            (r'\b([a-zA-Z_]\w*)\b',     Name.Variable),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
+            (r'(\d+\.\d*|\.\d+)',                 Number.Float),
+            (r'0[xX][0-9a-fA-F]+',                Number.Hex),
+            (r'\d+',                              Number.Integer),
+            (lsl_operators,                       Operator),
+            (r':=?',                              Error),
+            (r'[,;{}()\[\]]',                     Punctuation),
+            (r'\n+',                              Whitespace),
+            (r'\s+',                              Whitespace)
+        ],
+        'comment':
+        [
+            (r'[^*/]+',                           Comment.Multiline),
+            (r'/\*',                              Comment.Multiline, '#push'),
+            (r'\*/',                              Comment.Multiline, '#pop'),
+            (r'[*/]',                             Comment.Multiline)
+        ],
+        'string':
+        [
+            (r'\\([nt"\\])',                      String.Escape),
+            (r'"',                                String.Double, '#pop'),
+            (r'\\.',                              Error),
+            (r'[^"\\]+',                          String.Double),
+        ]
+    }
+
+
+class AppleScriptLexer(RegexLexer):
+    """
+    For AppleScript source code,
+    including `AppleScript Studio
+    `_.
+    Contributed by Andreas Amann .
+    """
+
+    name = 'AppleScript'
+    url = 'https://developer.apple.com/library/archive/documentation/AppleScript/Conceptual/AppleScriptLangGuide/introduction/ASLR_intro.html'
+    aliases = ['applescript']
+    filenames = ['*.applescript']
+    version_added = '1.0'
+
+    flags = re.MULTILINE | re.DOTALL
+
+    Identifiers = r'[a-zA-Z]\w*'
+
+    # XXX: use words() for all of these
+    Literals = ('AppleScript', 'current application', 'false', 'linefeed',
+                'missing value', 'pi', 'quote', 'result', 'return', 'space',
+                'tab', 'text item delimiters', 'true', 'version')
+    Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
+               'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
+               'real ', 'record ', 'reference ', 'RGB color ', 'script ',
+               'text ', 'unit types', '(?:Unicode )?text', 'string')
+    BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
+               'paragraph', 'word', 'year')
+    HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
+                     'aside from', 'at', 'below', 'beneath', 'beside',
+                     'between', 'for', 'given', 'instead of', 'on', 'onto',
+                     'out of', 'over', 'since')
+    Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
+                'choose application', 'choose color', 'choose file( name)?',
+                'choose folder', 'choose from list',
+                'choose remote application', 'clipboard info',
+                'close( access)?', 'copy', 'count', 'current date', 'delay',
+                'delete', 'display (alert|dialog)', 'do shell script',
+                'duplicate', 'exists', 'get eof', 'get volume settings',
+                'info for', 'launch', 'list (disks|folder)', 'load script',
+                'log', 'make', 'mount volume', 'new', 'offset',
+                'open( (for access|location))?', 'path to', 'print', 'quit',
+                'random number', 'read', 'round', 'run( script)?',
+                'say', 'scripting components',
+                'set (eof|the clipboard to|volume)', 'store script',
+                'summarize', 'system attribute', 'system info',
+                'the clipboard', 'time to GMT', 'write', 'quoted form')
+    References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
+                  'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
+                  'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
+                  'before', 'behind', 'every', 'front', 'index', 'last',
+                  'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
+    Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
+                 "isn't", "isn't equal( to)?", "is not equal( to)?",
+                 "doesn't equal", "does not equal", "(is )?greater than",
+                 "comes after", "is not less than or equal( to)?",
+                 "isn't less than or equal( to)?", "(is )?less than",
+                 "comes before", "is not greater than or equal( to)?",
+                 "isn't greater than or equal( to)?",
+                 "(is  )?greater than or equal( to)?", "is not less than",
+                 "isn't less than", "does not come before",
+                 "doesn't come before", "(is )?less than or equal( to)?",
+                 "is not greater than", "isn't greater than",
+                 "does not come after", "doesn't come after", "starts? with",
+                 "begins? with", "ends? with", "contains?", "does not contain",
+                 "doesn't contain", "is in", "is contained by", "is not in",
+                 "is not contained by", "isn't contained by", "div", "mod",
+                 "not", "(a  )?(ref( to)?|reference to)", "is", "does")
+    Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
+               'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
+               'try', 'until', 'using terms from', 'while', 'whith',
+               'with timeout( of)?', 'with transaction', 'by', 'continue',
+               'end', 'its?', 'me', 'my', 'return', 'of', 'as')
+    Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
+    Reserved = ('but', 'put', 'returning', 'the')
+    StudioClasses = ('action cell', 'alert reply', 'application', 'box',
+                     'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
+                     'clip view', 'color well', 'color-panel',
+                     'combo box( item)?', 'control',
+                     'data( (cell|column|item|row|source))?', 'default entry',
+                     'dialog reply', 'document', 'drag info', 'drawer',
+                     'event', 'font(-panel)?', 'formatter',
+                     'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
+                     'movie( view)?', 'open-panel', 'outline view', 'panel',
+                     'pasteboard', 'plugin', 'popup button',
+                     'progress indicator', 'responder', 'save-panel',
+                     'scroll view', 'secure text field( cell)?', 'slider',
+                     'sound', 'split view', 'stepper', 'tab view( item)?',
+                     'table( (column|header cell|header view|view))',
+                     'text( (field( cell)?|view))?', 'toolbar( item)?',
+                     'user-defaults', 'view', 'window')
+    StudioEvents = ('accept outline drop', 'accept table drop', 'action',
+                    'activated', 'alert ended', 'awake from nib', 'became key',
+                    'became main', 'begin editing', 'bounds changed',
+                    'cell value', 'cell value changed', 'change cell value',
+                    'change item value', 'changed', 'child of item',
+                    'choose menu item', 'clicked', 'clicked toolbar item',
+                    'closed', 'column clicked', 'column moved',
+                    'column resized', 'conclude drop', 'data representation',
+                    'deminiaturized', 'dialog ended', 'document nib name',
+                    'double clicked', 'drag( (entered|exited|updated))?',
+                    'drop', 'end editing', 'exposed', 'idle', 'item expandable',
+                    'item value', 'item value changed', 'items changed',
+                    'keyboard down', 'keyboard up', 'launched',
+                    'load data representation', 'miniaturized', 'mouse down',
+                    'mouse dragged', 'mouse entered', 'mouse exited',
+                    'mouse moved', 'mouse up', 'moved',
+                    'number of browser rows', 'number of items',
+                    'number of rows', 'open untitled', 'opened', 'panel ended',
+                    'parameters updated', 'plugin loaded', 'prepare drop',
+                    'prepare outline drag', 'prepare outline drop',
+                    'prepare table drag', 'prepare table drop',
+                    'read from file', 'resigned active', 'resigned key',
+                    'resigned main', 'resized( sub views)?',
+                    'right mouse down', 'right mouse dragged',
+                    'right mouse up', 'rows changed', 'scroll wheel',
+                    'selected tab view item', 'selection changed',
+                    'selection changing', 'should begin editing',
+                    'should close', 'should collapse item',
+                    'should end editing', 'should expand item',
+                    'should open( untitled)?',
+                    'should quit( after last window closed)?',
+                    'should select column', 'should select item',
+                    'should select row', 'should select tab view item',
+                    'should selection change', 'should zoom', 'shown',
+                    'update menu item', 'update parameters',
+                    'update toolbar item', 'was hidden', 'was miniaturized',
+                    'will become active', 'will close', 'will dismiss',
+                    'will display browser cell', 'will display cell',
+                    'will display item cell', 'will display outline cell',
+                    'will finish launching', 'will hide', 'will miniaturize',
+                    'will move', 'will open', 'will pop up', 'will quit',
+                    'will resign active', 'will resize( sub views)?',
+                    'will select tab view item', 'will show', 'will zoom',
+                    'write to file', 'zoomed')
+    StudioCommands = ('animate', 'append', 'call method', 'center',
+                      'close drawer', 'close panel', 'display',
+                      'display alert', 'display dialog', 'display panel', 'go',
+                      'hide', 'highlight', 'increment', 'item for',
+                      'load image', 'load movie', 'load nib', 'load panel',
+                      'load sound', 'localized string', 'lock focus', 'log',
+                      'open drawer', 'path for', 'pause', 'perform action',
+                      'play', 'register', 'resume', 'scroll', 'select( all)?',
+                      'show', 'size to fit', 'start', 'step back',
+                      'step forward', 'stop', 'synchronize', 'unlock focus',
+                      'update')
+    StudioProperties = ('accepts arrow key', 'action method', 'active',
+                        'alignment', 'allowed identifiers',
+                        'allows branch selection', 'allows column reordering',
+                        'allows column resizing', 'allows column selection',
+                        'allows customization',
+                        'allows editing text attributes',
+                        'allows empty selection', 'allows mixed state',
+                        'allows multiple selection', 'allows reordering',
+                        'allows undo', 'alpha( value)?', 'alternate image',
+                        'alternate increment value', 'alternate title',
+                        'animation delay', 'associated file name',
+                        'associated object', 'auto completes', 'auto display',
+                        'auto enables items', 'auto repeat',
+                        'auto resizes( outline column)?',
+                        'auto save expanded items', 'auto save name',
+                        'auto save table columns', 'auto saves configuration',
+                        'auto scroll', 'auto sizes all columns to fit',
+                        'auto sizes cells', 'background color', 'bezel state',
+                        'bezel style', 'bezeled', 'border rect', 'border type',
+                        'bordered', 'bounds( rotation)?', 'box type',
+                        'button returned', 'button type',
+                        'can choose directories', 'can choose files',
+                        'can draw', 'can hide',
+                        'cell( (background color|size|type))?', 'characters',
+                        'class', 'click count', 'clicked( data)? column',
+                        'clicked data item', 'clicked( data)? row',
+                        'closeable', 'collating', 'color( (mode|panel))',
+                        'command key down', 'configuration',
+                        'content(s| (size|view( margins)?))?', 'context',
+                        'continuous', 'control key down', 'control size',
+                        'control tint', 'control view',
+                        'controller visible', 'coordinate system',
+                        'copies( on scroll)?', 'corner view', 'current cell',
+                        'current column', 'current( field)?  editor',
+                        'current( menu)? item', 'current row',
+                        'current tab view item', 'data source',
+                        'default identifiers', 'delta (x|y|z)',
+                        'destination window', 'directory', 'display mode',
+                        'displayed cell', 'document( (edited|rect|view))?',
+                        'double value', 'dragged column', 'dragged distance',
+                        'dragged items', 'draws( cell)? background',
+                        'draws grid', 'dynamically scrolls', 'echos bullets',
+                        'edge', 'editable', 'edited( data)? column',
+                        'edited data item', 'edited( data)? row', 'enabled',
+                        'enclosing scroll view', 'ending page',
+                        'error handling', 'event number', 'event type',
+                        'excluded from windows menu', 'executable path',
+                        'expanded', 'fax number', 'field editor', 'file kind',
+                        'file name', 'file type', 'first responder',
+                        'first visible column', 'flipped', 'floating',
+                        'font( panel)?', 'formatter', 'frameworks path',
+                        'frontmost', 'gave up', 'grid color', 'has data items',
+                        'has horizontal ruler', 'has horizontal scroller',
+                        'has parent data item', 'has resize indicator',
+                        'has shadow', 'has sub menu', 'has vertical ruler',
+                        'has vertical scroller', 'header cell', 'header view',
+                        'hidden', 'hides when deactivated', 'highlights by',
+                        'horizontal line scroll', 'horizontal page scroll',
+                        'horizontal ruler view', 'horizontally resizable',
+                        'icon image', 'id', 'identifier',
+                        'ignores multiple clicks',
+                        'image( (alignment|dims when disabled|frame style|scaling))?',
+                        'imports graphics', 'increment value',
+                        'indentation per level', 'indeterminate', 'index',
+                        'integer value', 'intercell spacing', 'item height',
+                        'key( (code|equivalent( modifier)?|window))?',
+                        'knob thickness', 'label', 'last( visible)? column',
+                        'leading offset', 'leaf', 'level', 'line scroll',
+                        'loaded', 'localized sort', 'location', 'loop mode',
+                        'main( (bunde|menu|window))?', 'marker follows cell',
+                        'matrix mode', 'maximum( content)? size',
+                        'maximum visible columns',
+                        'menu( form representation)?', 'miniaturizable',
+                        'miniaturized', 'minimized image', 'minimized title',
+                        'minimum column width', 'minimum( content)? size',
+                        'modal', 'modified', 'mouse down state',
+                        'movie( (controller|file|rect))?', 'muted', 'name',
+                        'needs display', 'next state', 'next text',
+                        'number of tick marks', 'only tick mark values',
+                        'opaque', 'open panel', 'option key down',
+                        'outline table column', 'page scroll', 'pages across',
+                        'pages down', 'palette label', 'pane splitter',
+                        'parent data item', 'parent window', 'pasteboard',
+                        'path( (names|separator))?', 'playing',
+                        'plays every frame', 'plays selection only', 'position',
+                        'preferred edge', 'preferred type', 'pressure',
+                        'previous text', 'prompt', 'properties',
+                        'prototype cell', 'pulls down', 'rate',
+                        'released when closed', 'repeated',
+                        'requested print time', 'required file type',
+                        'resizable', 'resized column', 'resource path',
+                        'returns records', 'reuses columns', 'rich text',
+                        'roll over', 'row height', 'rulers visible',
+                        'save panel', 'scripts path', 'scrollable',
+                        'selectable( identifiers)?', 'selected cell',
+                        'selected( data)? columns?', 'selected data items?',
+                        'selected( data)? rows?', 'selected item identifier',
+                        'selection by rect', 'send action on arrow key',
+                        'sends action when done editing', 'separates columns',
+                        'separator item', 'sequence number', 'services menu',
+                        'shared frameworks path', 'shared support path',
+                        'sheet', 'shift key down', 'shows alpha',
+                        'shows state by', 'size( mode)?',
+                        'smart insert delete enabled', 'sort case sensitivity',
+                        'sort column', 'sort order', 'sort type',
+                        'sorted( data rows)?', 'sound', 'source( mask)?',
+                        'spell checking enabled', 'starting page', 'state',
+                        'string value', 'sub menu', 'super menu', 'super view',
+                        'tab key traverses cells', 'tab state', 'tab type',
+                        'tab view', 'table view', 'tag', 'target( printer)?',
+                        'text color', 'text container insert',
+                        'text container origin', 'text returned',
+                        'tick mark position', 'time stamp',
+                        'title(d| (cell|font|height|position|rect))?',
+                        'tool tip', 'toolbar', 'trailing offset', 'transparent',
+                        'treat packages as directories', 'truncated labels',
+                        'types', 'unmodified characters', 'update views',
+                        'use sort indicator', 'user defaults',
+                        'uses data source', 'uses ruler',
+                        'uses threaded animation',
+                        'uses title from previous column', 'value wraps',
+                        'version',
+                        'vertical( (line scroll|page scroll|ruler view))?',
+                        'vertically resizable', 'view',
+                        'visible( document rect)?', 'volume', 'width', 'window',
+                        'windows menu', 'wraps', 'zoomable', 'zoomed')
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'¬\n', String.Escape),
+            (r"'s\s+", Text),  # This is a possessive, consider moving
+            (r'(--|#).*?$', Comment),
+            (r'\(\*', Comment.Multiline, 'comment'),
+            (r'[(){}!,.:]', Punctuation),
+            (r'(«)([^»]+)(»)',
+             bygroups(Text, Name.Builtin, Text)),
+            (r'\b((?:considering|ignoring)\s*)'
+             r'(application responses|case|diacriticals|hyphens|'
+             r'numeric strings|punctuation|white space)',
+             bygroups(Keyword, Name.Builtin)),
+            (r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
+            (r"\b({})\b".format('|'.join(Operators)), Operator.Word),
+            (r'^(\s*(?:on|end)\s+)'
+             r'({})'.format('|'.join(StudioEvents[::-1])),
+             bygroups(Keyword, Name.Function)),
+            (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
+            (r'\b(as )({})\b'.format('|'.join(Classes)),
+             bygroups(Keyword, Name.Class)),
+            (r'\b({})\b'.format('|'.join(Literals)), Name.Constant),
+            (r'\b({})\b'.format('|'.join(Commands)), Name.Builtin),
+            (r'\b({})\b'.format('|'.join(Control)), Keyword),
+            (r'\b({})\b'.format('|'.join(Declarations)), Keyword),
+            (r'\b({})\b'.format('|'.join(Reserved)), Name.Builtin),
+            (r'\b({})s?\b'.format('|'.join(BuiltIn)), Name.Builtin),
+            (r'\b({})\b'.format('|'.join(HandlerParams)), Name.Builtin),
+            (r'\b({})\b'.format('|'.join(StudioProperties)), Name.Attribute),
+            (r'\b({})s?\b'.format('|'.join(StudioClasses)), Name.Builtin),
+            (r'\b({})\b'.format('|'.join(StudioCommands)), Name.Builtin),
+            (r'\b({})\b'.format('|'.join(References)), Name.Builtin),
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+            (rf'\b({Identifiers})\b', Name.Variable),
+            (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
+            (r'[-+]?\d+', Number.Integer),
+        ],
+        'comment': [
+            (r'\(\*', Comment.Multiline, '#push'),
+            (r'\*\)', Comment.Multiline, '#pop'),
+            ('[^*(]+', Comment.Multiline),
+            ('[*(]', Comment.Multiline),
+        ],
+    }
+
+
+class RexxLexer(RegexLexer):
+    """
+    Rexx is a scripting language available for
+    a wide range of different platforms with its roots found on mainframe
+    systems. It is popular for I/O- and data based tasks and can act as glue
+    language to bind different applications together.
+    """
+    name = 'Rexx'
+    url = 'http://www.rexxinfo.org/'
+    aliases = ['rexx', 'arexx']
+    filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
+    mimetypes = ['text/x-rexx']
+    version_added = '2.0'
+    flags = re.IGNORECASE
+
+    tokens = {
+        'root': [
+            (r'\s+', Whitespace),
+            (r'/\*', Comment.Multiline, 'comment'),
+            (r'"', String, 'string_double'),
+            (r"'", String, 'string_single'),
+            (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
+            (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
+             bygroups(Name.Function, Whitespace, Operator, Whitespace,
+                      Keyword.Declaration)),
+            (r'([a-z_]\w*)(\s*)(:)',
+             bygroups(Name.Label, Whitespace, Operator)),
+            include('function'),
+            include('keyword'),
+            include('operator'),
+            (r'[a-z_]\w*', Text),
+        ],
+        'function': [
+            (words((
+                'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
+                'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
+                'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
+                'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
+                'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
+                'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
+                'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
+                'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
+                'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
+                'xrange'), suffix=r'(\s*)(\()'),
+             bygroups(Name.Builtin, Whitespace, Operator)),
+        ],
+        'keyword': [
+            (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
+             r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
+             r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
+             r'while)\b', Keyword.Reserved),
+        ],
+        'operator': [
+            (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
+             r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
+             r'¬>>|¬>|¬|\.|,)', Operator),
+        ],
+        'string_double': [
+            (r'[^"\n]+', String),
+            (r'""', String),
+            (r'"', String, '#pop'),
+            (r'\n', Text, '#pop'),  # Stray linefeed also terminates strings.
+        ],
+        'string_single': [
+            (r'[^\'\n]+', String),
+            (r'\'\'', String),
+            (r'\'', String, '#pop'),
+            (r'\n', Text, '#pop'),  # Stray linefeed also terminates strings.
+        ],
+        'comment': [
+            (r'[^*]+', Comment.Multiline),
+            (r'\*/', Comment.Multiline, '#pop'),
+            (r'\*', Comment.Multiline),
+        ]
+    }
+
+    def _c(s):
+        return re.compile(s, re.MULTILINE)
+    _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
+    _ADDRESS_PATTERN = _c(r'^\s*address\s+')
+    _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
+    _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
+    _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
+    _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
+    _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
+    PATTERNS_AND_WEIGHTS = (
+        (_ADDRESS_COMMAND_PATTERN, 0.2),
+        (_ADDRESS_PATTERN, 0.05),
+        (_DO_WHILE_PATTERN, 0.1),
+        (_ELSE_DO_PATTERN, 0.1),
+        (_IF_THEN_DO_PATTERN, 0.1),
+        (_PROCEDURE_PATTERN, 0.5),
+        (_PARSE_ARG_PATTERN, 0.2),
+    )
+
+    def analyse_text(text):
+        """
+        Check for initial comment and patterns that distinguish Rexx from other
+        C-like languages.
+        """
+        if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
+            # Header matches MVS Rexx requirements, this is certainly a Rexx
+            # script.
+            return 1.0
+        elif text.startswith('/*'):
+            # Header matches general Rexx requirements; the source code might
+            # still be any language using C comments such as C++, C# or Java.
+            lowerText = text.lower()
+            result = sum(weight
+                         for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
+                         if pattern.search(lowerText)) + 0.01
+            return min(result, 1.0)
+
+
+class MOOCodeLexer(RegexLexer):
+    """
+    For MOOCode (the MOO scripting language).
+    """
+    name = 'MOOCode'
+    url = 'http://www.moo.mud.org/'
+    filenames = ['*.moo']
+    aliases = ['moocode', 'moo']
+    mimetypes = ['text/x-moocode']
+    version_added = '0.9'
+
+    tokens = {
+        'root': [
+            # Numbers
+            (r'(0|[1-9][0-9_]*)', Number.Integer),
+            # Strings
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+            # exceptions
+            (r'(E_PERM|E_DIV)', Name.Exception),
+            # db-refs
+            (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
+            # Keywords
+            (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
+             r'|endwhile|break|continue|return|try'
+             r'|except|endtry|finally|in)\b', Keyword),
+            # builtins
+            (r'(random|length)', Name.Builtin),
+            # special variables
+            (r'(player|caller|this|args)', Name.Variable.Instance),
+            # skip whitespace
+            (r'\s+', Text),
+            (r'\n', Text),
+            # other operators
+            (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
+            # function call
+            (r'(\w+)(\()', bygroups(Name.Function, Operator)),
+            # variables
+            (r'(\w+)', Text),
+        ]
+    }
+
+
+class HybrisLexer(RegexLexer):
+    """
+    For Hybris source code.
+    """
+
+    name = 'Hybris'
+    aliases = ['hybris']
+    filenames = ['*.hyb']
+    mimetypes = ['text/x-hybris', 'application/x-hybris']
+    url = 'https://github.com/evilsocket/hybris'
+    version_added = '1.4'
+
+    flags = re.MULTILINE | re.DOTALL
+
+    tokens = {
+        'root': [
+            # method names
+            (r'^(\s*(?:function|method|operator\s+)+?)'
+             r'([a-zA-Z_]\w*)'
+             r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
+            (r'[^\S\n]+', Text),
+            (r'//.*?\n', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline),
+            (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+            (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
+             r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
+            (r'(extends|private|protected|public|static|throws|function|method|'
+             r'operator)\b', Keyword.Declaration),
+            (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
+             r'__INC_PATH__)\b', Keyword.Constant),
+            (r'(class|struct)(\s+)',
+             bygroups(Keyword.Declaration, Text), 'class'),
+            (r'(import|include)(\s+)',
+             bygroups(Keyword.Namespace, Text), 'import'),
+            (words((
+                'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
+                'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32',
+                'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos',
+                'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin',
+                'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring',
+                'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring',
+                'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names',
+                'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
+                'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks',
+                'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink',
+                'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid',
+                'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create',
+                'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
+                'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind',
+                'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect',
+                'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input',
+                'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr',
+                'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr',
+                'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read',
+                'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
+                'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir',
+                'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values',
+                'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove',
+                'contains', 'join'), suffix=r'\b'),
+             Name.Builtin),
+            (words((
+                'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
+                'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
+                'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
+             Keyword.Type),
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+            (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+            (r'(\.)([a-zA-Z_]\w*)',
+             bygroups(Operator, Name.Attribute)),
+            (r'[a-zA-Z_]\w*:', Name.Label),
+            (r'[a-zA-Z_$]\w*', Name),
+            (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
+            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+            (r'0x[0-9a-f]+', Number.Hex),
+            (r'[0-9]+L?', Number.Integer),
+            (r'\n', Text),
+        ],
+        'class': [
+            (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+        ],
+        'import': [
+            (r'[\w.]+\*?', Name.Namespace, '#pop')
+        ],
+    }
+
+    def analyse_text(text):
+        """public method and private method don't seem to be quite common
+        elsewhere."""
+        result = 0
+        if re.search(r'\b(?:public|private)\s+method\b', text):
+            result += 0.01
+        return result
+
+
+
+class EasytrieveLexer(RegexLexer):
+    """
+    Easytrieve Plus is a programming language for extracting, filtering and
+    converting sequential data. Furthermore it can layout data for reports.
+    It is mainly used on mainframe platforms and can access several of the
+    mainframe's native file formats. It is somewhat comparable to awk.
+    """
+    name = 'Easytrieve'
+    aliases = ['easytrieve']
+    filenames = ['*.ezt', '*.mac']
+    mimetypes = ['text/x-easytrieve']
+    url = 'https://www.broadcom.com/products/mainframe/application-development/easytrieve-report-generator'
+    version_added = '2.1'
+    flags = 0
+
+    # Note: We cannot use r'\b' at the start and end of keywords because
+    # Easytrieve Plus delimiter characters are:
+    #
+    #   * space ( )
+    #   * apostrophe (')
+    #   * period (.)
+    #   * comma (,)
+    #   * parenthesis ( and )
+    #   * colon (:)
+    #
+    # Additionally words end once a '*' appears, indicatins a comment.
+    _DELIMITERS = r' \'.,():\n'
+    _DELIMITERS_OR_COMENT = _DELIMITERS + '*'
+    _DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
+    _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
+    _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
+    _OPERATORS_PATTERN = '[.+\\-/=\\[\\](){}<>;,&%¬]'
+    _KEYWORDS = [
+        'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
+        'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
+        'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR',
+        'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D',
+        'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI',
+        'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE',
+        'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF',
+        'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12',
+        'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21',
+        'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30',
+        'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7',
+        'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST',
+        'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT',
+        'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT',
+        'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY',
+        'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE',
+        'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES',
+        'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE',
+        'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT',
+        'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1',
+        'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER',
+        'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT',
+        'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT',
+        'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT',
+        'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE',
+        'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT',
+        'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM',
+        'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT',
+        'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME',
+        'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC',
+        'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE',
+        'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST'
+    ]
+
+    tokens = {
+        'root': [
+            (r'\*.*\n', Comment.Single),
+            (r'\n+', Whitespace),
+            # Macro argument
+            (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable,
+             'after_macro_argument'),
+            # Macro call
+            (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable),
+            (r'(FILE|MACRO|REPORT)(\s+)',
+             bygroups(Keyword.Declaration, Whitespace), 'after_declaration'),
+            (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')',
+             bygroups(Keyword.Declaration, Operator)),
+            (words(_KEYWORDS, suffix=_DELIMITER_PATTERN_CAPTURE),
+             bygroups(Keyword.Reserved, Operator)),
+            (_OPERATORS_PATTERN, Operator),
+            # Procedure declaration
+            (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)',
+             bygroups(Name.Function, Whitespace, Operator, Whitespace,
+                      Keyword.Declaration, Whitespace)),
+            (r'[0-9]+\.[0-9]*', Number.Float),
+            (r'[0-9]+', Number.Integer),
+            (r"'(''|[^'])*'", String),
+            (r'\s+', Whitespace),
+            # Everything else just belongs to a name
+            (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
+         ],
+        'after_declaration': [
+            (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function),
+            default('#pop'),
+        ],
+        'after_macro_argument': [
+            (r'\*.*\n', Comment.Single, '#pop'),
+            (r'\s+', Whitespace, '#pop'),
+            (_OPERATORS_PATTERN, Operator, '#pop'),
+            (r"'(''|[^'])*'", String, '#pop'),
+            # Everything else just belongs to a name
+            (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
+        ],
+    }
+    _COMMENT_LINE_REGEX = re.compile(r'^\s*\*')
+    _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO')
+
+    def analyse_text(text):
+        """
+        Perform a structural analysis for basic Easytrieve constructs.
+        """
+        result = 0.0
+        lines = text.split('\n')
+        hasEndProc = False
+        hasHeaderComment = False
+        hasFile = False
+        hasJob = False
+        hasProc = False
+        hasParm = False
+        hasReport = False
+
+        def isCommentLine(line):
+            return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None
+
+        def isEmptyLine(line):
+            return not bool(line.strip())
+
+        # Remove possible empty lines and header comments.
+        while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])):
+            if not isEmptyLine(lines[0]):
+                hasHeaderComment = True
+            del lines[0]
+
+        if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]):
+            # Looks like an Easytrieve macro.
+            result = 0.4
+            if hasHeaderComment:
+                result += 0.4
+        else:
+            # Scan the source for lines starting with indicators.
+            for line in lines:
+                words = line.split()
+                if (len(words) >= 2):
+                    firstWord = words[0]
+                    if not hasReport:
+                        if not hasJob:
+                            if not hasFile:
+                                if not hasParm:
+                                    if firstWord == 'PARM':
+                                        hasParm = True
+                                if firstWord == 'FILE':
+                                    hasFile = True
+                            if firstWord == 'JOB':
+                                hasJob = True
+                        elif firstWord == 'PROC':
+                            hasProc = True
+                        elif firstWord == 'END-PROC':
+                            hasEndProc = True
+                        elif firstWord == 'REPORT':
+                            hasReport = True
+
+            # Weight the findings.
+            if hasJob and (hasProc == hasEndProc):
+                if hasHeaderComment:
+                    result += 0.1
+                if hasParm:
+                    if hasProc:
+                        # Found PARM, JOB and PROC/END-PROC:
+                        # pretty sure this is Easytrieve.
+                        result += 0.8
+                    else:
+                        # Found PARAM and  JOB: probably this is Easytrieve
+                        result += 0.5
+                else:
+                    # Found JOB and possibly other keywords: might be Easytrieve
+                    result += 0.11
+                    if hasParm:
+                        # Note: PARAM is not a proper English word, so this is
+                        # regarded a much better indicator for Easytrieve than
+                        # the other words.
+                        result += 0.2
+                    if hasFile:
+                        result += 0.01
+                    if hasReport:
+                        result += 0.01
+        assert 0.0 <= result <= 1.0
+        return result
+
+
+class JclLexer(RegexLexer):
+    """
+    Job Control Language (JCL)
+    is a scripting language used on mainframe platforms to instruct the system
+    on how to run a batch job or start a subsystem. It is somewhat
+    comparable to MS DOS batch and Unix shell scripts.
+    """
+    name = 'JCL'
+    aliases = ['jcl']
+    filenames = ['*.jcl']
+    mimetypes = ['text/x-jcl']
+    url = 'https://en.wikipedia.org/wiki/Job_Control_Language'
+    version_added = '2.1'
+
+    flags = re.IGNORECASE
+
+    tokens = {
+        'root': [
+            (r'//\*.*\n', Comment.Single),
+            (r'//', Keyword.Pseudo, 'statement'),
+            (r'/\*', Keyword.Pseudo, 'jes2_statement'),
+            # TODO: JES3 statement
+            (r'.*\n', Other)  # Input text or inline code in any language.
+        ],
+        'statement': [
+            (r'\s*\n', Whitespace, '#pop'),
+            (r'([a-z]\w*)(\s+)(exec|job)(\s*)',
+             bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace),
+             'option'),
+            (r'[a-z]\w*', Name.Variable, 'statement_command'),
+            (r'\s+', Whitespace, 'statement_command'),
+        ],
+        'statement_command': [
+            (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|'
+             r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'),
+            include('option')
+        ],
+        'jes2_statement': [
+            (r'\s*\n', Whitespace, '#pop'),
+            (r'\$', Keyword, 'option'),
+            (r'\b(jobparam|message|netacct|notify|output|priority|route|'
+             r'setup|signoff|xeq|xmit)\b', Keyword, 'option'),
+        ],
+        'option': [
+            # (r'\n', Text, 'root'),
+            (r'\*', Name.Builtin),
+            (r'[\[\](){}<>;,]', Punctuation),
+            (r'[-+*/=&%]', Operator),
+            (r'[a-z_]\w*', Name),
+            (r'\d+\.\d*', Number.Float),
+            (r'\.\d+', Number.Float),
+            (r'\d+', Number.Integer),
+            (r"'", String, 'option_string'),
+            (r'[ \t]+', Whitespace, 'option_comment'),
+            (r'\.', Punctuation),
+        ],
+        'option_string': [
+            (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)),
+            (r"''", String),
+            (r"[^']", String),
+            (r"'", String, '#pop'),
+        ],
+        'option_comment': [
+            # (r'\n', Text, 'root'),
+            (r'.+', Comment.Single),
+        ]
+    }
+
+    _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$',
+                                     re.IGNORECASE)
+
+    def analyse_text(text):
+        """
+        Recognize JCL job by header.
+        """
+        result = 0.0
+        lines = text.split('\n')
+        if len(lines) > 0:
+            if JclLexer._JOB_HEADER_PATTERN.match(lines[0]):
+                result = 1.0
+        assert 0.0 <= result <= 1.0
+        return result
+
+
+class MiniScriptLexer(RegexLexer):
+    """
+    For MiniScript source code.
+    """
+
+    name = 'MiniScript'
+    url = 'https://miniscript.org'
+    aliases = ['miniscript', 'ms']
+    filenames = ['*.ms']
+    mimetypes = ['text/x-minicript', 'application/x-miniscript']
+    version_added = '2.6'
+
+    tokens = {
+        'root': [
+            (r'#!(.*?)$', Comment.Preproc),
+            default('base'),
+        ],
+        'base': [
+            ('//.*$', Comment.Single),
+            (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number),
+            (r'(?i)\d+e[+-]?\d+', Number),
+            (r'\d+', Number),
+            (r'\n', Text),
+            (r'[^\S\n]+', Text),
+            (r'"', String, 'string_double'),
+            (r'(==|!=|<=|>=|[=+\-*/%^<>.:])', Operator),
+            (r'[;,\[\]{}()]', Punctuation),
+            (words((
+                'break', 'continue', 'else', 'end', 'for', 'function', 'if',
+                'in', 'isa', 'then', 'repeat', 'return', 'while'), suffix=r'\b'),
+             Keyword),
+            (words((
+                'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor',
+                'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan',
+                'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower',
+                'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum',
+                'sort', 'shuffle', 'push', 'pop', 'pull', 'range',
+                'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer',
+                'yield'), suffix=r'\b'),
+             Name.Builtin),
+            (r'(true|false|null)\b', Keyword.Constant),
+            (r'(and|or|not|new)\b', Operator.Word),
+            (r'(self|super|__isa)\b', Name.Builtin.Pseudo),
+            (r'[a-zA-Z_]\w*', Name.Variable)
+        ],
+        'string_double': [
+            (r'[^"\n]+', String),
+            (r'""', String),
+            (r'"', String, '#pop'),
+            (r'\n', Text, '#pop'),  # Stray linefeed also terminates strings.
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sgf.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sgf.py
new file mode 100644
index 00000000..f0e56cba
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sgf.py
@@ -0,0 +1,59 @@
+"""
+    pygments.lexers.sgf
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Smart Game Format (sgf) file format.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Name, Literal, String, Punctuation, Whitespace
+
+__all__ = ["SmartGameFormatLexer"]
+
+
+class SmartGameFormatLexer(RegexLexer):
+    """
+    Lexer for Smart Game Format (sgf) file format.
+
+    The format is used to store game records of board games for two players
+    (mainly Go game).
+    """
+    name = 'SmartGameFormat'
+    url = 'https://www.red-bean.com/sgf/'
+    aliases = ['sgf']
+    filenames = ['*.sgf']
+    version_added = '2.4'
+
+    tokens = {
+        'root': [
+            (r'[():;]+', Punctuation),
+            # tokens:
+            (r'(A[BW]|AE|AN|AP|AR|AS|[BW]L|BM|[BW]R|[BW]S|[BW]T|CA|CH|CP|CR|'
+             r'DD|DM|DO|DT|EL|EV|EX|FF|FG|G[BW]|GC|GM|GN|HA|HO|ID|IP|IT|IY|KM|'
+             r'KO|LB|LN|LT|L|MA|MN|M|N|OB|OM|ON|OP|OT|OV|P[BW]|PC|PL|PM|RE|RG|'
+             r'RO|RU|SO|SC|SE|SI|SL|SO|SQ|ST|SU|SZ|T[BW]|TC|TE|TM|TR|UC|US|VW|'
+             r'V|[BW]|C)',
+             Name.Builtin),
+            # number:
+            (r'(\[)([0-9.]+)(\])',
+             bygroups(Punctuation, Literal.Number, Punctuation)),
+            # date:
+            (r'(\[)([0-9]{4}-[0-9]{2}-[0-9]{2})(\])',
+             bygroups(Punctuation, Literal.Date, Punctuation)),
+            # point:
+            (r'(\[)([a-z]{2})(\])',
+             bygroups(Punctuation, String, Punctuation)),
+            # double points:
+            (r'(\[)([a-z]{2})(:)([a-z]{2})(\])',
+             bygroups(Punctuation, String, Punctuation, String, Punctuation)),
+
+            (r'(\[)([\w\s#()+,\-.:?]+)(\])',
+             bygroups(Punctuation, String, Punctuation)),
+            (r'(\[)(\s.*)(\])',
+             bygroups(Punctuation, Whitespace, Punctuation)),
+            (r'\s+', Whitespace)
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/shell.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/shell.py
new file mode 100644
index 00000000..744767a1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/shell.py
@@ -0,0 +1,902 @@
+"""
+    pygments.lexers.shell
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for various shells.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
+    include, default, this, using, words, line_re
+from pygments.token import Punctuation, Whitespace, \
+    Text, Comment, Operator, Keyword, Name, String, Number, Generic
+from pygments.util import shebang_matches
+
+__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
+           'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
+           'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
+           'ExeclineLexer']
+
+
+class BashLexer(RegexLexer):
+    """
+    Lexer for (ba|k|z|)sh shell scripts.
+    """
+
+    name = 'Bash'
+    aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell', 'openrc']
+    filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
+                 '*.exheres-0', '*.exlib', '*.zsh',
+                 '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
+                 '.kshrc', 'kshrc',
+                 'PKGBUILD']
+    mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
+    url = 'https://en.wikipedia.org/wiki/Unix_shell'
+    version_added = '0.6'
+
+    tokens = {
+        'root': [
+            include('basic'),
+            (r'`', String.Backtick, 'backticks'),
+            include('data'),
+            include('interp'),
+        ],
+        'interp': [
+            (r'\$\(\(', Keyword, 'math'),
+            (r'\$\(', Keyword, 'paren'),
+            (r'\$\{#?', String.Interpol, 'curly'),
+            (r'\$[a-zA-Z_]\w*', Name.Variable),  # user variable
+            (r'\$(?:\d+|[#$?!_*@-])', Name.Variable),      # builtin
+            (r'\$', Text),
+        ],
+        'basic': [
+            (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
+             r'select|break|continue|until|esac|elif)(\s*)\b',
+             bygroups(Keyword, Whitespace)),
+            (r'\b(alias|bg|bind|builtin|caller|cd|command|compgen|'
+             r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
+             r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
+             r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
+             r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
+             r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
+             Name.Builtin),
+            (r'\A#!.+\n', Comment.Hashbang),
+            (r'#.*\n', Comment.Single),
+            (r'\\[\w\W]', String.Escape),
+            (r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Whitespace, Operator)),
+            (r'[\[\]{}()=]', Operator),
+            (r'<<<', Operator),  # here-string
+            (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+            (r'&&|\|\|', Operator),
+        ],
+        'data': [
+            (r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
+            (r'"', String.Double, 'string'),
+            (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+            (r"(?s)'.*?'", String.Single),
+            (r';', Punctuation),
+            (r'&', Punctuation),
+            (r'\|', Punctuation),
+            (r'\s+', Whitespace),
+            (r'\d+\b', Number),
+            (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
+            (r'<', Text),
+        ],
+        'string': [
+            (r'"', String.Double, '#pop'),
+            (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+            include('interp'),
+        ],
+        'curly': [
+            (r'\}', String.Interpol, '#pop'),
+            (r':-', Keyword),
+            (r'\w+', Name.Variable),
+            (r'[^}:"\'`$\\]+', Punctuation),
+            (r':', Punctuation),
+            include('root'),
+        ],
+        'paren': [
+            (r'\)', Keyword, '#pop'),
+            include('root'),
+        ],
+        'math': [
+            (r'\)\)', Keyword, '#pop'),
+            (r'\*\*|\|\||<<|>>|[-+*/%^|&<>]', Operator),
+            (r'\d+#[\da-zA-Z]+', Number),
+            (r'\d+#(?! )', Number),
+            (r'0[xX][\da-fA-F]+', Number),
+            (r'\d+', Number),
+            (r'[a-zA-Z_]\w*', Name.Variable),  # user variable
+            include('root'),
+        ],
+        'backticks': [
+            (r'`', String.Backtick, '#pop'),
+            include('root'),
+        ],
+    }
+
+    def analyse_text(text):
+        if shebang_matches(text, r'(ba|z|)sh'):
+            return 1
+        if text.startswith('$ '):
+            return 0.2
+
+
+class SlurmBashLexer(BashLexer):
+    """
+    Lexer for (ba|k|z|)sh Slurm scripts.
+    """
+
+    name = 'Slurm'
+    aliases = ['slurm', 'sbatch']
+    filenames = ['*.sl']
+    mimetypes = []
+    version_added = '2.4'
+    EXTRA_KEYWORDS = {'srun'}
+
+    def get_tokens_unprocessed(self, text):
+        for index, token, value in BashLexer.get_tokens_unprocessed(self, text):
+            if token is Text and value in self.EXTRA_KEYWORDS:
+                yield index, Name.Builtin, value
+            elif token is Comment.Single and 'SBATCH' in value:
+                yield index, Keyword.Pseudo, value
+            else:
+                yield index, token, value
+
+
+class ShellSessionBaseLexer(Lexer):
+    """
+    Base lexer for shell sessions.
+
+    .. versionadded:: 2.1
+    """
+
+    _bare_continuation = False
+    _venv = re.compile(r'^(\([^)]*\))(\s*)')
+
+    def get_tokens_unprocessed(self, text):
+        innerlexer = self._innerLexerCls(**self.options)
+
+        pos = 0
+        curcode = ''
+        insertions = []
+        backslash_continuation = False
+
+        for match in line_re.finditer(text):
+            line = match.group()
+
+            venv_match = self._venv.match(line)
+            if venv_match:
+                venv = venv_match.group(1)
+                venv_whitespace = venv_match.group(2)
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt.VirtualEnv, venv)]))
+                if venv_whitespace:
+                    insertions.append((len(curcode),
+                                       [(0, Text, venv_whitespace)]))
+                line = line[venv_match.end():]
+
+            m = self._ps1rgx.match(line)
+            if m:
+                # To support output lexers (say diff output), the output
+                # needs to be broken by prompts whenever the output lexer
+                # changes.
+                if not insertions:
+                    pos = match.start()
+
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt, m.group(1))]))
+                curcode += m.group(2)
+                backslash_continuation = curcode.endswith('\\\n')
+            elif backslash_continuation:
+                if line.startswith(self._ps2):
+                    insertions.append((len(curcode),
+                                       [(0, Generic.Prompt,
+                                         line[:len(self._ps2)])]))
+                    curcode += line[len(self._ps2):]
+                else:
+                    curcode += line
+                backslash_continuation = curcode.endswith('\\\n')
+            elif self._bare_continuation and line.startswith(self._ps2):
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt,
+                                     line[:len(self._ps2)])]))
+                curcode += line[len(self._ps2):]
+            else:
+                if insertions:
+                    toks = innerlexer.get_tokens_unprocessed(curcode)
+                    for i, t, v in do_insertions(insertions, toks):
+                        yield pos+i, t, v
+                yield match.start(), Generic.Output, line
+                insertions = []
+                curcode = ''
+        if insertions:
+            for i, t, v in do_insertions(insertions,
+                                         innerlexer.get_tokens_unprocessed(curcode)):
+                yield pos+i, t, v
+
+
+class BashSessionLexer(ShellSessionBaseLexer):
+    """
+    Lexer for Bash shell sessions, i.e. command lines, including a
+    prompt, interspersed with output.
+    """
+
+    name = 'Bash Session'
+    aliases = ['console', 'shell-session']
+    filenames = ['*.sh-session', '*.shell-session']
+    mimetypes = ['application/x-shell-session', 'application/x-sh-session']
+    url = 'https://en.wikipedia.org/wiki/Unix_shell'
+    version_added = '1.1'
+    _example = "console/example.sh-session"
+
+    _innerLexerCls = BashLexer
+    _ps1rgx = re.compile(
+        r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
+        r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
+    _ps2 = '> '
+
+
+class BatchLexer(RegexLexer):
+    """
+    Lexer for the DOS/Windows Batch file format.
+    """
+    name = 'Batchfile'
+    aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
+    filenames = ['*.bat', '*.cmd']
+    mimetypes = ['application/x-dos-batch']
+    url = 'https://en.wikipedia.org/wiki/Batch_file'
+    version_added = '0.7'
+
+    flags = re.MULTILINE | re.IGNORECASE
+
+    _nl = r'\n\x1a'
+    _punct = r'&<>|'
+    _ws = r'\t\v\f\r ,;=\xa0'
+    _nlws = r'\s\x1a\xa0,;='
+    _space = rf'(?:(?:(?:\^[{_nl}])?[{_ws}])+)'
+    _keyword_terminator = (rf'(?=(?:\^[{_nl}]?)?[{_ws}+./:[\\\]]|[{_nl}{_punct}(])')
+    _token_terminator = rf'(?=\^?[{_ws}]|[{_punct}{_nl}])'
+    _start_label = rf'((?:(?<=^[^:])|^[^:]?)[{_ws}]*)(:)'
+    _label = rf'(?:(?:[^{_nlws}{_punct}+:^]|\^[{_nl}]?[\w\W])*)'
+    _label_compound = rf'(?:(?:[^{_nlws}{_punct}+:^)]|\^[{_nl}]?[^)])*)'
+    _number = rf'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+){_token_terminator})'
+    _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
+    _string = rf'(?:"[^{_nl}"]*(?:"|(?=[{_nl}])))'
+    _variable = (r'(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
+                 rf'[^%:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%{_nl}^]|'
+                 rf'\^[^%{_nl}])[^={_nl}]*=(?:[^%{_nl}^]|\^[^%{_nl}])*)?)?%))|'
+                 rf'(?:\^?![^!:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
+                 rf'[^!{_nl}^]|\^[^!{_nl}])[^={_nl}]*=(?:[^!{_nl}^]|\^[^!{_nl}])*)?)?\^?!))')
+    _core_token = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct}])+)'
+    _core_token_compound = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct})])+)'
+    _token = rf'(?:[{_punct}]+|{_core_token})'
+    _token_compound = rf'(?:[{_punct}]+|{_core_token_compound})'
+    _stoken = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token})+)')
+
+    def _make_begin_state(compound, _core_token=_core_token,
+                          _core_token_compound=_core_token_compound,
+                          _keyword_terminator=_keyword_terminator,
+                          _nl=_nl, _punct=_punct, _string=_string,
+                          _space=_space, _start_label=_start_label,
+                          _stoken=_stoken, _token_terminator=_token_terminator,
+                          _variable=_variable, _ws=_ws):
+        rest = '(?:{}|{}|[^"%{}{}{}])*'.format(_string, _variable, _nl, _punct,
+                                            ')' if compound else '')
+        rest_of_line = rf'(?:(?:[^{_nl}^]|\^[{_nl}]?[\w\W])*)'
+        rest_of_line_compound = rf'(?:(?:[^{_nl}^)]|\^[{_nl}]?[^)])*)'
+        set_space = rf'((?:(?:\^[{_nl}]?)?[^\S\n])*)'
+        suffix = ''
+        if compound:
+            _keyword_terminator = rf'(?:(?=\))|{_keyword_terminator})'
+            _token_terminator = rf'(?:(?=\))|{_token_terminator})'
+            suffix = '/compound'
+        return [
+            ((r'\)', Punctuation, '#pop') if compound else
+             (rf'\)((?=\()|{_token_terminator}){rest_of_line}',
+              Comment.Single)),
+            (rf'(?={_start_label})', Text, f'follow{suffix}'),
+            (_space, using(this, state='text')),
+            include(f'redirect{suffix}'),
+            (rf'[{_nl}]+', Text),
+            (r'\(', Punctuation, 'root/compound'),
+            (r'@+', Punctuation),
+            (rf'((?:for|if|rem)(?:(?=(?:\^[{_nl}]?)?/)|(?:(?!\^)|'
+             rf'(?<=m))(?:(?=\()|{_token_terminator})))({_space}?{_core_token_compound if compound else _core_token}?(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?)',
+             bygroups(Keyword, using(this, state='text')),
+             f'follow{suffix}'),
+            (rf'(goto{_keyword_terminator})({rest}(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?{rest})',
+             bygroups(Keyword, using(this, state='text')),
+             f'follow{suffix}'),
+            (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
+                    'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
+                    'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
+                    'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
+                    'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
+                    'title', 'type', 'ver', 'verify', 'vol'),
+                   suffix=_keyword_terminator), Keyword, f'follow{suffix}'),
+            (rf'(call)({_space}?)(:)',
+             bygroups(Keyword, using(this, state='text'), Punctuation),
+             f'call{suffix}'),
+            (rf'call{_keyword_terminator}', Keyword),
+            (rf'(for{_token_terminator}(?!\^))({_space})(/f{_token_terminator})',
+             bygroups(Keyword, using(this, state='text'), Keyword),
+             ('for/f', 'for')),
+            (rf'(for{_token_terminator}(?!\^))({_space})(/l{_token_terminator})',
+             bygroups(Keyword, using(this, state='text'), Keyword),
+             ('for/l', 'for')),
+            (rf'for{_token_terminator}(?!\^)', Keyword, ('for2', 'for')),
+            (rf'(goto{_keyword_terminator})({_space}?)(:?)',
+             bygroups(Keyword, using(this, state='text'), Punctuation),
+             f'label{suffix}'),
+            (rf'(if(?:(?=\()|{_token_terminator})(?!\^))({_space}?)((?:/i{_token_terminator})?)({_space}?)((?:not{_token_terminator})?)({_space}?)',
+             bygroups(Keyword, using(this, state='text'), Keyword,
+                      using(this, state='text'), Keyword,
+                      using(this, state='text')), ('(?', 'if')),
+            (rf'rem(((?=\()|{_token_terminator}){_space}?{_stoken}?.*|{_keyword_terminator}{rest_of_line_compound if compound else rest_of_line})',
+             Comment.Single, f'follow{suffix}'),
+            (rf'(set{_keyword_terminator}){set_space}(/a)',
+             bygroups(Keyword, using(this, state='text'), Keyword),
+             f'arithmetic{suffix}'),
+            (r'(set{}){}((?:/p)?){}((?:(?:(?:\^[{}]?)?[^"{}{}^={}]|'
+             r'\^[{}]?[^"=])+)?)((?:(?:\^[{}]?)?=)?)'.format(_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
+              ')' if compound else '', _nl, _nl),
+             bygroups(Keyword, using(this, state='text'), Keyword,
+                      using(this, state='text'), using(this, state='variable'),
+                      Punctuation),
+             f'follow{suffix}'),
+            default(f'follow{suffix}')
+        ]
+
+    def _make_follow_state(compound, _label=_label,
+                           _label_compound=_label_compound, _nl=_nl,
+                           _space=_space, _start_label=_start_label,
+                           _token=_token, _token_compound=_token_compound,
+                           _ws=_ws):
+        suffix = '/compound' if compound else ''
+        state = []
+        if compound:
+            state.append((r'(?=\))', Text, '#pop'))
+        state += [
+            (rf'{_start_label}([{_ws}]*)({_label_compound if compound else _label})(.*)',
+             bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
+            include(f'redirect{suffix}'),
+            (rf'(?=[{_nl}])', Text, '#pop'),
+            (r'\|\|?|&&?', Punctuation, '#pop'),
+            include('text')
+        ]
+        return state
+
+    def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
+                               _string=_string, _variable=_variable,
+                               _ws=_ws, _nlws=_nlws):
+        op = r'=+\-*/!~'
+        state = []
+        if compound:
+            state.append((r'(?=\))', Text, '#pop'))
+        state += [
+            (r'0[0-7]+', Number.Oct),
+            (r'0x[\da-f]+', Number.Hex),
+            (r'\d+', Number.Integer),
+            (r'[(),]+', Punctuation),
+            (rf'([{op}]|%|\^\^)+', Operator),
+            (r'({}|{}|(\^[{}]?)?[^(){}%\^"{}{}]|\^[{}]?{})+'.format(_string, _variable, _nl, op, _nlws, _punct, _nlws,
+              r'[^)]' if compound else r'[\w\W]'),
+             using(this, state='variable')),
+            (r'(?=[\x00|&])', Text, '#pop'),
+            include('follow')
+        ]
+        return state
+
+    def _make_call_state(compound, _label=_label,
+                         _label_compound=_label_compound):
+        state = []
+        if compound:
+            state.append((r'(?=\))', Text, '#pop'))
+        state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
+                      bygroups(Punctuation, Name.Label), '#pop'))
+        return state
+
+    def _make_label_state(compound, _label=_label,
+                          _label_compound=_label_compound, _nl=_nl,
+                          _punct=_punct, _string=_string, _variable=_variable):
+        state = []
+        if compound:
+            state.append((r'(?=\))', Text, '#pop'))
+        state.append((r'({}?)((?:{}|{}|\^[{}]?{}|[^"%^{}{}{}])*)'.format(_label_compound if compound else _label, _string,
+                       _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
+                       _punct, r')' if compound else ''),
+                      bygroups(Name.Label, Comment.Single), '#pop'))
+        return state
+
+    def _make_redirect_state(compound,
+                             _core_token_compound=_core_token_compound,
+                             _nl=_nl, _punct=_punct, _stoken=_stoken,
+                             _string=_string, _space=_space,
+                             _variable=_variable, _nlws=_nlws):
+        stoken_compound = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token_compound})+)')
+        return [
+            (rf'((?:(?<=[{_nlws}])\d)?)(>>?&|<&)([{_nlws}]*)(\d)',
+             bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
+            (rf'((?:(?<=[{_nlws}])(?>?|<)({_space}?{stoken_compound if compound else _stoken})',
+             bygroups(Number.Integer, Punctuation, using(this, state='text')))
+        ]
+
+    tokens = {
+        'root': _make_begin_state(False),
+        'follow': _make_follow_state(False),
+        'arithmetic': _make_arithmetic_state(False),
+        'call': _make_call_state(False),
+        'label': _make_label_state(False),
+        'redirect': _make_redirect_state(False),
+        'root/compound': _make_begin_state(True),
+        'follow/compound': _make_follow_state(True),
+        'arithmetic/compound': _make_arithmetic_state(True),
+        'call/compound': _make_call_state(True),
+        'label/compound': _make_label_state(True),
+        'redirect/compound': _make_redirect_state(True),
+        'variable-or-escape': [
+            (_variable, Name.Variable),
+            (rf'%%|\^[{_nl}]?(\^!|[\w\W])', String.Escape)
+        ],
+        'string': [
+            (r'"', String.Double, '#pop'),
+            (_variable, Name.Variable),
+            (r'\^!|%%', String.Escape),
+            (rf'[^"%^{_nl}]+|[%^]', String.Double),
+            default('#pop')
+        ],
+        'sqstring': [
+            include('variable-or-escape'),
+            (r'[^%]+|%', String.Single)
+        ],
+        'bqstring': [
+            include('variable-or-escape'),
+            (r'[^%]+|%', String.Backtick)
+        ],
+        'text': [
+            (r'"', String.Double, 'string'),
+            include('variable-or-escape'),
+            (rf'[^"%^{_nlws}{_punct}\d)]+|.', Text)
+        ],
+        'variable': [
+            (r'"', String.Double, 'string'),
+            include('variable-or-escape'),
+            (rf'[^"%^{_nl}]+|.', Name.Variable)
+        ],
+        'for': [
+            (rf'({_space})(in)({_space})(\()',
+             bygroups(using(this, state='text'), Keyword,
+                      using(this, state='text'), Punctuation), '#pop'),
+            include('follow')
+        ],
+        'for2': [
+            (r'\)', Punctuation),
+            (rf'({_space})(do{_token_terminator})',
+             bygroups(using(this, state='text'), Keyword), '#pop'),
+            (rf'[{_nl}]+', Text),
+            include('follow')
+        ],
+        'for/f': [
+            (rf'(")((?:{_variable}|[^"])*?")([{_nlws}]*)(\))',
+             bygroups(String.Double, using(this, state='string'), Text,
+                      Punctuation)),
+            (r'"', String.Double, ('#pop', 'for2', 'string')),
+            (rf"('(?:%%|{_variable}|[\w\W])*?')([{_nlws}]*)(\))",
+             bygroups(using(this, state='sqstring'), Text, Punctuation)),
+            (rf'(`(?:%%|{_variable}|[\w\W])*?`)([{_nlws}]*)(\))',
+             bygroups(using(this, state='bqstring'), Text, Punctuation)),
+            include('for2')
+        ],
+        'for/l': [
+            (r'-?\d+', Number.Integer),
+            include('for2')
+        ],
+        'if': [
+            (rf'((?:cmdextversion|errorlevel){_token_terminator})({_space})(\d+)',
+             bygroups(Keyword, using(this, state='text'),
+                      Number.Integer), '#pop'),
+            (rf'(defined{_token_terminator})({_space})({_stoken})',
+             bygroups(Keyword, using(this, state='text'),
+                      using(this, state='variable')), '#pop'),
+            (rf'(exist{_token_terminator})({_space}{_stoken})',
+             bygroups(Keyword, using(this, state='text')), '#pop'),
+            (rf'({_number}{_space})({_opword})({_space}{_number})',
+             bygroups(using(this, state='arithmetic'), Operator.Word,
+                      using(this, state='arithmetic')), '#pop'),
+            (_stoken, using(this, state='text'), ('#pop', 'if2')),
+        ],
+        'if2': [
+            (rf'({_space}?)(==)({_space}?{_stoken})',
+             bygroups(using(this, state='text'), Operator,
+                      using(this, state='text')), '#pop'),
+            (rf'({_space})({_opword})({_space}{_stoken})',
+             bygroups(using(this, state='text'), Operator.Word,
+                      using(this, state='text')), '#pop')
+        ],
+        '(?': [
+            (_space, using(this, state='text')),
+            (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
+            default('#pop')
+        ],
+        'else?': [
+            (_space, using(this, state='text')),
+            (rf'else{_token_terminator}', Keyword, '#pop'),
+            default('#pop')
+        ]
+    }
+
+
+class MSDOSSessionLexer(ShellSessionBaseLexer):
+    """
+    Lexer for MS DOS shell sessions, i.e. command lines, including a
+    prompt, interspersed with output.
+    """
+
+    name = 'MSDOS Session'
+    aliases = ['doscon']
+    filenames = []
+    mimetypes = []
+    url = 'https://en.wikipedia.org/wiki/MS-DOS'
+    version_added = '2.1'
+    _example = "doscon/session"
+
+    _innerLexerCls = BatchLexer
+    _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
+    _ps2 = 'More? '
+
+
+class TcshLexer(RegexLexer):
+    """
+    Lexer for tcsh scripts.
+    """
+
+    name = 'Tcsh'
+    aliases = ['tcsh', 'csh']
+    filenames = ['*.tcsh', '*.csh']
+    mimetypes = ['application/x-csh']
+    url = 'https://www.tcsh.org'
+    version_added = '0.10'
+
+    tokens = {
+        'root': [
+            include('basic'),
+            (r'\$\(', Keyword, 'paren'),
+            (r'\$\{#?', Keyword, 'curly'),
+            (r'`', String.Backtick, 'backticks'),
+            include('data'),
+        ],
+        'basic': [
+            (r'\b(if|endif|else|while|then|foreach|case|default|'
+             r'break|continue|goto|breaksw|end|switch|endsw)\s*\b',
+             Keyword),
+            (r'\b(alias|alloc|bg|bindkey|builtins|bye|caller|cd|chdir|'
+             r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
+             r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
+             r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
+             r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
+             r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
+             r'source|stop|suspend|source|suspend|telltc|time|'
+             r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
+             r'ver|wait|warp|watchlog|where|which)\s*\b',
+             Name.Builtin),
+            (r'#.*', Comment),
+            (r'\\[\w\W]', String.Escape),
+            (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+            (r'[\[\]{}()=]+', Operator),
+            (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+            (r';', Punctuation),
+        ],
+        'data': [
+            (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+            (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+            (r'\s+', Text),
+            (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+            (r'\d+(?= |\Z)', Number),
+            (r'\$#?(\w+|.)', Name.Variable),
+        ],
+        'curly': [
+            (r'\}', Keyword, '#pop'),
+            (r':-', Keyword),
+            (r'\w+', Name.Variable),
+            (r'[^}:"\'`$]+', Punctuation),
+            (r':', Punctuation),
+            include('root'),
+        ],
+        'paren': [
+            (r'\)', Keyword, '#pop'),
+            include('root'),
+        ],
+        'backticks': [
+            (r'`', String.Backtick, '#pop'),
+            include('root'),
+        ],
+    }
+
+
+class TcshSessionLexer(ShellSessionBaseLexer):
+    """
+    Lexer for Tcsh sessions, i.e. command lines, including a
+    prompt, interspersed with output.
+    """
+
+    name = 'Tcsh Session'
+    aliases = ['tcshcon']
+    filenames = []
+    mimetypes = []
+    url = 'https://www.tcsh.org'
+    version_added = '2.1'
+    _example = "tcshcon/session"
+
+    _innerLexerCls = TcshLexer
+    _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
+    _ps2 = '? '
+
+
+class PowerShellLexer(RegexLexer):
+    """
+    For Windows PowerShell code.
+    """
+    name = 'PowerShell'
+    aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
+    filenames = ['*.ps1', '*.psm1']
+    mimetypes = ['text/x-powershell']
+    url = 'https://learn.microsoft.com/en-us/powershell'
+    version_added = '1.5'
+
+    flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
+
+    keywords = (
+        'while validateset validaterange validatepattern validatelength '
+        'validatecount until trap switch return ref process param parameter in '
+        'if global: local: function foreach for finally filter end elseif else '
+        'dynamicparam do default continue cmdletbinding break begin alias \\? '
+        '% #script #private #local #global mandatory parametersetname position '
+        'valuefrompipeline valuefrompipelinebypropertyname '
+        'valuefromremainingarguments helpmessage try catch throw').split()
+
+    operators = (
+        'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
+        'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
+        'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
+        'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
+        'lt match ne not notcontains notlike notmatch or regex replace '
+        'wildcard').split()
+
+    verbs = (
+        'write where watch wait use update unregister unpublish unprotect '
+        'unlock uninstall undo unblock trace test tee take sync switch '
+        'suspend submit stop step start split sort skip show set send select '
+        'search scroll save revoke resume restore restart resolve resize '
+        'reset request repair rename remove register redo receive read push '
+        'publish protect pop ping out optimize open new move mount merge '
+        'measure lock limit join invoke install initialize import hide group '
+        'grant get format foreach find export expand exit enter enable edit '
+        'dismount disconnect disable deny debug cxnew copy convertto '
+        'convertfrom convert connect confirm compress complete compare close '
+        'clear checkpoint block backup assert approve aggregate add').split()
+
+    aliases_ = (
+        'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
+        'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
+        'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm '
+        'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi '
+        'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp '
+        'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv '
+        'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo '
+        'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select '
+        'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
+        'trcm type wget where wjb write').split()
+
+    commenthelp = (
+        'component description example externalhelp forwardhelpcategory '
+        'forwardhelptargetname functionality inputs link '
+        'notes outputs parameter remotehelprunspace role synopsis').split()
+
+    tokens = {
+        'root': [
+            # we need to count pairs of parentheses for correct highlight
+            # of '$(...)' blocks in strings
+            (r'\(', Punctuation, 'child'),
+            (r'\s+', Text),
+            (r'^(\s*#[#\s]*)(\.(?:{}))([^\n]*$)'.format('|'.join(commenthelp)),
+             bygroups(Comment, String.Doc, Comment)),
+            (r'#[^\n]*?$', Comment),
+            (r'(<|<)#', Comment.Multiline, 'multline'),
+            (r'@"\n', String.Heredoc, 'heredoc-double'),
+            (r"@'\n.*?\n'@", String.Heredoc),
+            # escaped syntax
+            (r'`[\'"$@-]', Punctuation),
+            (r'"', String.Double, 'string'),
+            (r"'([^']|'')*'", String.Single),
+            (r'(\$|@@|@)((global|script|private|env):)?\w+',
+             Name.Variable),
+            (r'({})\b'.format('|'.join(keywords)), Keyword),
+            (r'-({})\b'.format('|'.join(operators)), Operator),
+            (r'({})-[a-z_]\w*\b'.format('|'.join(verbs)), Name.Builtin),
+            (r'({})\s'.format('|'.join(aliases_)), Name.Builtin),
+            (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant),  # .net [type]s
+            (r'-[a-z_]\w*', Name),
+            (r'\w+', Name),
+            (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
+        ],
+        'child': [
+            (r'\)', Punctuation, '#pop'),
+            include('root'),
+        ],
+        'multline': [
+            (r'[^#&.]+', Comment.Multiline),
+            (r'#(>|>)', Comment.Multiline, '#pop'),
+            (r'\.({})'.format('|'.join(commenthelp)), String.Doc),
+            (r'[#&.]', Comment.Multiline),
+        ],
+        'string': [
+            (r"`[0abfnrtv'\"$`]", String.Escape),
+            (r'[^$`"]+', String.Double),
+            (r'\$\(', Punctuation, 'child'),
+            (r'""', String.Double),
+            (r'[`$]', String.Double),
+            (r'"', String.Double, '#pop'),
+        ],
+        'heredoc-double': [
+            (r'\n"@', String.Heredoc, '#pop'),
+            (r'\$\(', Punctuation, 'child'),
+            (r'[^@\n]+"]', String.Heredoc),
+            (r".", String.Heredoc),
+        ]
+    }
+
+
+class PowerShellSessionLexer(ShellSessionBaseLexer):
+    """
+    Lexer for PowerShell sessions, i.e. command lines, including a
+    prompt, interspersed with output.
+    """
+
+    name = 'PowerShell Session'
+    aliases = ['pwsh-session', 'ps1con']
+    filenames = []
+    mimetypes = []
+    url = 'https://learn.microsoft.com/en-us/powershell'
+    version_added = '2.1'
+    _example = "pwsh-session/session"
+
+    _innerLexerCls = PowerShellLexer
+    _bare_continuation = True
+    _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
+    _ps2 = '> '
+
+
+class FishShellLexer(RegexLexer):
+    """
+    Lexer for Fish shell scripts.
+    """
+
+    name = 'Fish'
+    aliases = ['fish', 'fishshell']
+    filenames = ['*.fish', '*.load']
+    mimetypes = ['application/x-fish']
+    url = 'https://fishshell.com'
+    version_added = '2.1'
+
+    tokens = {
+        'root': [
+            include('basic'),
+            include('data'),
+            include('interp'),
+        ],
+        'interp': [
+            (r'\$\(\(', Keyword, 'math'),
+            (r'\(', Keyword, 'paren'),
+            (r'\$#?(\w+|.)', Name.Variable),
+        ],
+        'basic': [
+            (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
+             r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
+             r'cd|count|test)(\s*)\b',
+             bygroups(Keyword, Text)),
+            (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
+             r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
+             r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
+             r'fish_update_completions|fishd|funced|funcsave|functions|help|'
+             r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
+             r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
+             r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
+             Name.Builtin),
+            (r'#.*\n', Comment),
+            (r'\\[\w\W]', String.Escape),
+            (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
+            (r'[\[\]()=]', Operator),
+            (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+        ],
+        'data': [
+            (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
+            (r'"', String.Double, 'string'),
+            (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+            (r"(?s)'.*?'", String.Single),
+            (r';', Punctuation),
+            (r'&|\||\^|<|>', Operator),
+            (r'\s+', Text),
+            (r'\d+(?= |\Z)', Number),
+            (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
+        ],
+        'string': [
+            (r'"', String.Double, '#pop'),
+            (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+            include('interp'),
+        ],
+        'paren': [
+            (r'\)', Keyword, '#pop'),
+            include('root'),
+        ],
+        'math': [
+            (r'\)\)', Keyword, '#pop'),
+            (r'[-+*/%^|&]|\*\*|\|\|', Operator),
+            (r'\d+#\d+', Number),
+            (r'\d+#(?! )', Number),
+            (r'\d+', Number),
+            include('root'),
+        ],
+    }
+
+class ExeclineLexer(RegexLexer):
+    """
+    Lexer for Laurent Bercot's execline language.
+    """
+
+    name = 'execline'
+    aliases = ['execline']
+    filenames = ['*.exec']
+    url = 'https://skarnet.org/software/execline'
+    version_added = '2.7'
+
+    tokens = {
+        'root': [
+            include('basic'),
+            include('data'),
+            include('interp')
+        ],
+        'interp': [
+            (r'\$\{', String.Interpol, 'curly'),
+            (r'\$[\w@#]+', Name.Variable),  # user variable
+            (r'\$', Text),
+        ],
+        'basic': [
+            (r'\b(background|backtick|cd|define|dollarat|elgetopt|'
+             r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|'
+             r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|'
+             r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|'
+             r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|'
+             r'multidefine|multisubstitute|pipeline|piperw|posix-cd|'
+             r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|'
+             r'withstdinas)\b', Name.Builtin),
+            (r'\A#!.+\n', Comment.Hashbang),
+            (r'#.*\n', Comment.Single),
+            (r'[{}]', Operator)
+        ],
+        'data': [
+            (r'(?s)"(\\.|[^"\\$])*"', String.Double),
+            (r'"', String.Double, 'string'),
+            (r'\s+', Text),
+            (r'[^\s{}$"\\]+', Text)
+        ],
+        'string': [
+            (r'"', String.Double, '#pop'),
+            (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double),
+            include('interp'),
+        ],
+        'curly': [
+            (r'\}', String.Interpol, '#pop'),
+            (r'[\w#@]+', Name.Variable),
+            include('root')
+        ]
+
+    }
+
+    def analyse_text(text):
+        if shebang_matches(text, r'execlineb'):
+            return 1
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sieve.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sieve.py
new file mode 100644
index 00000000..fc48980c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sieve.py
@@ -0,0 +1,78 @@
+"""
+    pygments.lexers.sieve
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Sieve file format.
+
+    https://tools.ietf.org/html/rfc5228
+    https://tools.ietf.org/html/rfc5173
+    https://tools.ietf.org/html/rfc5229
+    https://tools.ietf.org/html/rfc5230
+    https://tools.ietf.org/html/rfc5232
+    https://tools.ietf.org/html/rfc5235
+    https://tools.ietf.org/html/rfc5429
+    https://tools.ietf.org/html/rfc8580
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, Name, Literal, String, Text, Punctuation, \
+    Keyword
+
+__all__ = ["SieveLexer"]
+
+
+class SieveLexer(RegexLexer):
+    """
+    Lexer for sieve format.
+    """
+    name = 'Sieve'
+    filenames = ['*.siv', '*.sieve']
+    aliases = ['sieve']
+    url = 'https://en.wikipedia.org/wiki/Sieve_(mail_filtering_language)'
+    version_added = '2.6'
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'[();,{}\[\]]', Punctuation),
+            # import:
+            (r'(?i)require',
+             Keyword.Namespace),
+            # tags:
+            (r'(?i)(:)(addresses|all|contains|content|create|copy|comparator|'
+             r'count|days|detail|domain|fcc|flags|from|handle|importance|is|'
+             r'localpart|length|lowerfirst|lower|matches|message|mime|options|'
+             r'over|percent|quotewildcard|raw|regex|specialuse|subject|text|'
+             r'under|upperfirst|upper|value)',
+             bygroups(Name.Tag, Name.Tag)),
+            # tokens:
+            (r'(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|'
+             r'ereject|exists|false|fileinto|if|hasflag|header|keep|'
+             r'notify_method_capability|notify|not|redirect|reject|removeflag|'
+             r'setflag|size|spamtest|stop|string|true|vacation|virustest)',
+             Name.Builtin),
+            (r'(?i)set',
+             Keyword.Declaration),
+            # number:
+            (r'([0-9.]+)([kmgKMG])?',
+             bygroups(Literal.Number, Literal.Number)),
+            # comment:
+            (r'#.*$',
+             Comment.Single),
+            (r'/\*.*\*/',
+             Comment.Multiline),
+            # string:
+            (r'"[^"]*?"',
+             String),
+            # text block:
+            (r'text:',
+             Name.Tag, 'text'),
+        ],
+        'text': [
+            (r'[^.].*?\n', String),
+            (r'^\.', Punctuation, "#pop"),
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/slash.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/slash.py
new file mode 100644
index 00000000..1c439d0d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/slash.py
@@ -0,0 +1,183 @@
+"""
+    pygments.lexers.slash
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for the Slash programming language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import ExtendedRegexLexer, bygroups, DelegatingLexer
+from pygments.token import Name, Number, String, Comment, Punctuation, \
+    Other, Keyword, Operator, Whitespace
+
+__all__ = ['SlashLexer']
+
+
+class SlashLanguageLexer(ExtendedRegexLexer):
+    _nkw = r'(?=[^a-zA-Z_0-9])'
+
+    def move_state(new_state):
+        return ("#pop", new_state)
+
+    def right_angle_bracket(lexer, match, ctx):
+        if len(ctx.stack) > 1 and ctx.stack[-2] == "string":
+            ctx.stack.pop()
+        yield match.start(), String.Interpol, '}'
+        ctx.pos = match.end()
+        pass
+
+    tokens = {
+        "root": [
+            (r"<%=",        Comment.Preproc,    move_state("slash")),
+            (r"<%!!",       Comment.Preproc,    move_state("slash")),
+            (r"<%#.*?%>",   Comment.Multiline),
+            (r"<%",         Comment.Preproc,    move_state("slash")),
+            (r".|\n",       Other),
+        ],
+        "string": [
+            (r"\\",         String.Escape,      move_state("string_e")),
+            (r"\"",         String,             move_state("slash")),
+            (r"#\{",        String.Interpol,    "slash"),
+            (r'.|\n',       String),
+        ],
+        "string_e": [
+            (r'n',                  String.Escape,      move_state("string")),
+            (r't',                  String.Escape,      move_state("string")),
+            (r'r',                  String.Escape,      move_state("string")),
+            (r'e',                  String.Escape,      move_state("string")),
+            (r'x[a-fA-F0-9]{2}',    String.Escape,      move_state("string")),
+            (r'.',                  String.Escape,      move_state("string")),
+        ],
+        "regexp": [
+            (r'}[a-z]*',            String.Regex,       move_state("slash")),
+            (r'\\(.|\n)',           String.Regex),
+            (r'{',                  String.Regex,       "regexp_r"),
+            (r'.|\n',               String.Regex),
+        ],
+        "regexp_r": [
+            (r'}[a-z]*',            String.Regex,       "#pop"),
+            (r'\\(.|\n)',           String.Regex),
+            (r'{',                  String.Regex,       "regexp_r"),
+        ],
+        "slash": [
+            (r"%>",                     Comment.Preproc,    move_state("root")),
+            (r"\"",                     String,             move_state("string")),
+            (r"'[a-zA-Z0-9_]+",         String),
+            (r'%r{',                    String.Regex,       move_state("regexp")),
+            (r'/\*.*?\*/',              Comment.Multiline),
+            (r"(#|//).*?\n",            Comment.Single),
+            (r'-?[0-9]+e[+-]?[0-9]+',   Number.Float),
+            (r'-?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?', Number.Float),
+            (r'-?[0-9]+',               Number.Integer),
+            (r'nil'+_nkw,               Name.Builtin),
+            (r'true'+_nkw,              Name.Builtin),
+            (r'false'+_nkw,             Name.Builtin),
+            (r'self'+_nkw,              Name.Builtin),
+            (r'(class)(\s+)([A-Z][a-zA-Z0-9_\']*)',
+                bygroups(Keyword, Whitespace, Name.Class)),
+            (r'class'+_nkw,             Keyword),
+            (r'extends'+_nkw,           Keyword),
+            (r'(def)(\s+)(self)(\s*)(\.)(\s*)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
+                bygroups(Keyword, Whitespace, Name.Builtin, Whitespace, Punctuation, Whitespace, Name.Function)),
+            (r'(def)(\s+)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
+                bygroups(Keyword, Whitespace, Name.Function)),
+            (r'def'+_nkw,               Keyword),
+            (r'if'+_nkw,                Keyword),
+            (r'elsif'+_nkw,             Keyword),
+            (r'else'+_nkw,              Keyword),
+            (r'unless'+_nkw,            Keyword),
+            (r'for'+_nkw,               Keyword),
+            (r'in'+_nkw,                Keyword),
+            (r'while'+_nkw,             Keyword),
+            (r'until'+_nkw,             Keyword),
+            (r'and'+_nkw,               Keyword),
+            (r'or'+_nkw,                Keyword),
+            (r'not'+_nkw,               Keyword),
+            (r'lambda'+_nkw,            Keyword),
+            (r'try'+_nkw,               Keyword),
+            (r'catch'+_nkw,             Keyword),
+            (r'return'+_nkw,            Keyword),
+            (r'next'+_nkw,              Keyword),
+            (r'last'+_nkw,              Keyword),
+            (r'throw'+_nkw,             Keyword),
+            (r'use'+_nkw,               Keyword),
+            (r'switch'+_nkw,            Keyword),
+            (r'\\',                     Keyword),
+            (r'λ',                      Keyword),
+            (r'__FILE__'+_nkw,          Name.Builtin.Pseudo),
+            (r'__LINE__'+_nkw,          Name.Builtin.Pseudo),
+            (r'[A-Z][a-zA-Z0-9_\']*'+_nkw, Name.Constant),
+            (r'[a-z_][a-zA-Z0-9_\']*'+_nkw, Name),
+            (r'@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Instance),
+            (r'@@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Class),
+            (r'\(',                     Punctuation),
+            (r'\)',                     Punctuation),
+            (r'\[',                     Punctuation),
+            (r'\]',                     Punctuation),
+            (r'\{',                     Punctuation),
+            (r'\}',                     right_angle_bracket),
+            (r';',                      Punctuation),
+            (r',',                      Punctuation),
+            (r'<<=',                    Operator),
+            (r'>>=',                    Operator),
+            (r'<<',                     Operator),
+            (r'>>',                     Operator),
+            (r'==',                     Operator),
+            (r'!=',                     Operator),
+            (r'=>',                     Operator),
+            (r'=',                      Operator),
+            (r'<=>',                    Operator),
+            (r'<=',                     Operator),
+            (r'>=',                     Operator),
+            (r'<',                      Operator),
+            (r'>',                      Operator),
+            (r'\+\+',                   Operator),
+            (r'\+=',                    Operator),
+            (r'-=',                     Operator),
+            (r'\*\*=',                  Operator),
+            (r'\*=',                    Operator),
+            (r'\*\*',                   Operator),
+            (r'\*',                     Operator),
+            (r'/=',                     Operator),
+            (r'\+',                     Operator),
+            (r'-',                      Operator),
+            (r'/',                      Operator),
+            (r'%=',                     Operator),
+            (r'%',                      Operator),
+            (r'^=',                     Operator),
+            (r'&&=',                    Operator),
+            (r'&=',                     Operator),
+            (r'&&',                     Operator),
+            (r'&',                      Operator),
+            (r'\|\|=',                  Operator),
+            (r'\|=',                    Operator),
+            (r'\|\|',                   Operator),
+            (r'\|',                     Operator),
+            (r'!',                      Operator),
+            (r'\.\.\.',                 Operator),
+            (r'\.\.',                   Operator),
+            (r'\.',                     Operator),
+            (r'::',                     Operator),
+            (r':',                      Operator),
+            (r'(\s|\n)+',               Whitespace),
+            (r'[a-z_][a-zA-Z0-9_\']*',  Name.Variable),
+        ],
+    }
+
+
+class SlashLexer(DelegatingLexer):
+    """
+    Lexer for the Slash programming language.
+    """
+
+    name = 'Slash'
+    aliases = ['slash']
+    filenames = ['*.sla']
+    url = 'https://github.com/arturadib/Slash-A'
+    version_added = '2.4'
+
+    def __init__(self, **options):
+        from pygments.lexers.web import HtmlLexer
+        super().__init__(HtmlLexer, SlashLanguageLexer, **options)
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smalltalk.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smalltalk.py
new file mode 100644
index 00000000..674b7b4b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smalltalk.py
@@ -0,0 +1,194 @@
+"""
+    pygments.lexers.smalltalk
+    ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Smalltalk and related languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+
+__all__ = ['SmalltalkLexer', 'NewspeakLexer']
+
+
+class SmalltalkLexer(RegexLexer):
+    """
+    For Smalltalk syntax.
+    Contributed by Stefan Matthias Aust.
+    Rewritten by Nils Winter.
+    """
+    name = 'Smalltalk'
+    url = 'http://www.smalltalk.org/'
+    filenames = ['*.st']
+    aliases = ['smalltalk', 'squeak', 'st']
+    mimetypes = ['text/x-smalltalk']
+    version_added = '0.10'
+
+    tokens = {
+        'root': [
+            (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
+            include('squeak fileout'),
+            include('whitespaces'),
+            include('method definition'),
+            (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
+            include('objects'),
+            (r'\^|\:=|\_', Operator),
+            # temporaries
+            (r'[\]({}.;!]', Text),
+        ],
+        'method definition': [
+            # Not perfect can't allow whitespaces at the beginning and the
+            # without breaking everything
+            (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
+             bygroups(Name.Function, Text, Name.Variable)),
+            (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
+            (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
+             bygroups(Name.Function, Text, Name.Variable, Text)),
+        ],
+        'blockvariables': [
+            include('whitespaces'),
+            (r'(:)(\s*)(\w+)',
+             bygroups(Operator, Text, Name.Variable)),
+            (r'\|', Operator, '#pop'),
+            default('#pop'),  # else pop
+        ],
+        'literals': [
+            (r"'(''|[^'])*'", String, 'afterobject'),
+            (r'\$.', String.Char, 'afterobject'),
+            (r'#\(', String.Symbol, 'parenth'),
+            (r'\)', Text, 'afterobject'),
+            (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
+        ],
+        '_parenth_helper': [
+            include('whitespaces'),
+            (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
+            (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
+            # literals
+            (r"'(''|[^'])*'", String),
+            (r'\$.', String.Char),
+            (r'#*\(', String.Symbol, 'inner_parenth'),
+        ],
+        'parenth': [
+            # This state is a bit tricky since
+            # we can't just pop this state
+            (r'\)', String.Symbol, ('root', 'afterobject')),
+            include('_parenth_helper'),
+        ],
+        'inner_parenth': [
+            (r'\)', String.Symbol, '#pop'),
+            include('_parenth_helper'),
+        ],
+        'whitespaces': [
+            # skip whitespace and comments
+            (r'\s+', Text),
+            (r'"(""|[^"])*"', Comment),
+        ],
+        'objects': [
+            (r'\[', Text, 'blockvariables'),
+            (r'\]', Text, 'afterobject'),
+            (r'\b(self|super|true|false|nil|thisContext)\b',
+             Name.Builtin.Pseudo, 'afterobject'),
+            (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
+            (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
+            (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
+             String.Symbol, 'afterobject'),
+            include('literals'),
+        ],
+        'afterobject': [
+            (r'! !$', Keyword, '#pop'),  # squeak chunk delimiter
+            include('whitespaces'),
+            (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
+             Name.Builtin, '#pop'),
+            (r'\b(new\b(?!:))', Name.Builtin),
+            (r'\:=|\_', Operator, '#pop'),
+            (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
+            (r'\b[a-zA-Z]+\w*', Name.Function),
+            (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
+            (r'\.', Punctuation, '#pop'),
+            (r';', Punctuation),
+            (r'[\])}]', Text),
+            (r'[\[({]', Text, '#pop'),
+        ],
+        'squeak fileout': [
+            # Squeak fileout format (optional)
+            (r'^"(""|[^"])*"!', Keyword),
+            (r"^'(''|[^'])*'!", Keyword),
+            (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
+                bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
+            (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
+                bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
+            (r'^(\w+)( subclass: )(#\w+)'
+             r'(\s+instanceVariableNames: )(.*?)'
+             r'(\s+classVariableNames: )(.*?)'
+             r'(\s+poolDictionaries: )(.*?)'
+             r'(\s+category: )(.*?)(!)',
+                bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
+                         String, Keyword, String, Keyword, String, Keyword)),
+            (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
+                bygroups(Name.Class, Keyword, String, Keyword)),
+            (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
+            (r'! !$', Keyword),
+        ],
+    }
+
+
+class NewspeakLexer(RegexLexer):
+    """
+    For Newspeak syntax.
+    """
+    name = 'Newspeak'
+    url = 'http://newspeaklanguage.org/'
+    filenames = ['*.ns2']
+    aliases = ['newspeak', ]
+    mimetypes = ['text/x-newspeak']
+    version_added = '1.1'
+
+    tokens = {
+        'root': [
+            (r'\b(Newsqueak2)\b', Keyword.Declaration),
+            (r"'[^']*'", String),
+            (r'\b(class)(\s+)(\w+)(\s*)',
+             bygroups(Keyword.Declaration, Text, Name.Class, Text)),
+            (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
+             Keyword),
+            (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
+             bygroups(Name.Function, Text, Name.Variable)),
+            (r'(\w+)(\s*)(=)',
+             bygroups(Name.Attribute, Text, Operator)),
+            (r'<\w+>', Comment.Special),
+            include('expressionstat'),
+            include('whitespace')
+        ],
+
+        'expressionstat': [
+            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+            (r'\d+', Number.Integer),
+            (r':\w+', Name.Variable),
+            (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
+            (r'\w+:', Name.Function),
+            (r'\w+', Name.Variable),
+            (r'\(|\)', Punctuation),
+            (r'\[|\]', Punctuation),
+            (r'\{|\}', Punctuation),
+
+            (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
+            (r'\.|;', Punctuation),
+            include('whitespace'),
+            include('literals'),
+        ],
+        'literals': [
+            (r'\$.', String),
+            (r"'[^']*'", String),
+            (r"#'[^']*'", String.Symbol),
+            (r"#\w+:?", String.Symbol),
+            (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
+        ],
+        'whitespace': [
+            (r'\s+', Text),
+            (r'"[^"]*"', Comment)
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smithy.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smithy.py
new file mode 100644
index 00000000..bd479aec
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smithy.py
@@ -0,0 +1,77 @@
+"""
+    pygments.lexers.smithy
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the Smithy IDL.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Keyword, Name, String, \
+    Number, Whitespace, Punctuation
+
+__all__ = ['SmithyLexer']
+
+
+class SmithyLexer(RegexLexer):
+    """
+    For Smithy IDL
+    """
+    name = 'Smithy'
+    url = 'https://awslabs.github.io/smithy/'
+    filenames = ['*.smithy']
+    aliases = ['smithy']
+    version_added = '2.10'
+
+    unquoted = r'[A-Za-z0-9_\.#$-]+'
+    identifier = r"[A-Za-z0-9_\.#$-]+"
+
+    simple_shapes = (
+        'use', 'byte', 'short', 'integer', 'long', 'float', 'document',
+        'double', 'bigInteger', 'bigDecimal', 'boolean', 'blob', 'string',
+        'timestamp',
+    )
+
+    aggregate_shapes = (
+       'apply', 'list', 'map', 'set', 'structure', 'union', 'resource',
+       'operation', 'service', 'trait'
+    )
+
+    tokens = {
+        'root': [
+            (r'///.*$', Comment.Multiline),
+            (r'//.*$', Comment),
+            (r'@[0-9a-zA-Z\.#-]*', Name.Decorator),
+            (r'(=)', Name.Decorator),
+            (r'^(\$version)(:)(.+)',
+                bygroups(Keyword.Declaration, Name.Decorator, Name.Class)),
+            (r'^(namespace)(\s+' + identifier + r')\b',
+                bygroups(Keyword.Declaration, Name.Class)),
+            (words(simple_shapes,
+                   prefix=r'^', suffix=r'(\s+' + identifier + r')\b'),
+                bygroups(Keyword.Declaration, Name.Class)),
+            (words(aggregate_shapes,
+                   prefix=r'^', suffix=r'(\s+' + identifier + r')'),
+                bygroups(Keyword.Declaration, Name.Class)),
+            (r'^(metadata)(\s+)((?:\S+)|(?:\"[^"]+\"))(\s*)(=)',
+                bygroups(Keyword.Declaration, Whitespace, Name.Class,
+                         Whitespace, Name.Decorator)),
+            (r"(true|false|null)", Keyword.Constant),
+            (r"(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)", Number),
+            (identifier + ":", Name.Label),
+            (identifier, Name.Variable.Class),
+            (r'\[', Text, "#push"),
+            (r'\]', Text, "#pop"),
+            (r'\(', Text, "#push"),
+            (r'\)', Text, "#pop"),
+            (r'\{', Text, "#push"),
+            (r'\}', Text, "#pop"),
+            (r'"{3}(\\\\|\n|\\")*"{3}', String.Doc),
+            (r'"(\\\\|\n|\\"|[^"])*"', String.Double),
+            (r"'(\\\\|\n|\\'|[^'])*'", String.Single),
+            (r'[:,]+', Punctuation),
+            (r'\s+', Whitespace),
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smv.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smv.py
new file mode 100644
index 00000000..bf97b52a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/smv.py
@@ -0,0 +1,78 @@
+"""
+    pygments.lexers.smv
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the SMV languages.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+    Punctuation, Text
+
+__all__ = ['NuSMVLexer']
+
+
+class NuSMVLexer(RegexLexer):
+    """
+    Lexer for the NuSMV language.
+    """
+
+    name = 'NuSMV'
+    aliases = ['nusmv']
+    filenames = ['*.smv']
+    mimetypes = []
+    url = 'https://nusmv.fbk.eu'
+    version_added = '2.2'
+
+    tokens = {
+        'root': [
+            # Comments
+            (r'(?s)\/\-\-.*?\-\-/', Comment),
+            (r'--.*\n', Comment),
+
+            # Reserved
+            (words(('MODULE', 'DEFINE', 'MDEFINE', 'CONSTANTS', 'VAR', 'IVAR',
+                    'FROZENVAR', 'INIT', 'TRANS', 'INVAR', 'SPEC', 'CTLSPEC',
+                    'LTLSPEC', 'PSLSPEC', 'COMPUTE', 'NAME', 'INVARSPEC',
+                    'FAIRNESS', 'JUSTICE', 'COMPASSION', 'ISA', 'ASSIGN',
+                    'CONSTRAINT', 'SIMPWFF', 'CTLWFF', 'LTLWFF', 'PSLWFF',
+                    'COMPWFF', 'IN', 'MIN', 'MAX', 'MIRROR', 'PRED',
+                    'PREDICATES'), suffix=r'(?![\w$#-])'),
+             Keyword.Declaration),
+            (r'process(?![\w$#-])', Keyword),
+            (words(('array', 'of', 'boolean', 'integer', 'real', 'word'),
+                   suffix=r'(?![\w$#-])'), Keyword.Type),
+            (words(('case', 'esac'), suffix=r'(?![\w$#-])'), Keyword),
+            (words(('word1', 'bool', 'signed', 'unsigned', 'extend', 'resize',
+                    'sizeof', 'uwconst', 'swconst', 'init', 'self', 'count',
+                    'abs', 'max', 'min'), suffix=r'(?![\w$#-])'),
+             Name.Builtin),
+            (words(('EX', 'AX', 'EF', 'AF', 'EG', 'AG', 'E', 'F', 'O', 'G',
+                    'H', 'X', 'Y', 'Z', 'A', 'U', 'S', 'V', 'T', 'BU', 'EBF',
+                    'ABF', 'EBG', 'ABG', 'next', 'mod', 'union', 'in', 'xor',
+                    'xnor'), suffix=r'(?![\w$#-])'),
+                Operator.Word),
+            (words(('TRUE', 'FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
+
+            # Names
+            (r'[a-zA-Z_][\w$#-]*', Name.Variable),
+
+            # Operators
+            (r':=', Operator),
+            (r'[-&|+*/<>!=]', Operator),
+
+            # Literals
+            (r'\-?\d+\b', Number.Integer),
+            (r'0[su][bB]\d*_[01_]+', Number.Bin),
+            (r'0[su][oO]\d*_[0-7_]+', Number.Oct),
+            (r'0[su][dD]\d*_[\d_]+', Number.Decimal),
+            (r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
+
+            # Whitespace, punctuation and the rest
+            (r'\s+', Text.Whitespace),
+            (r'[()\[\]{};?:.,]', Punctuation),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/snobol.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/snobol.py
new file mode 100644
index 00000000..bab51e9b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/snobol.py
@@ -0,0 +1,82 @@
+"""
+    pygments.lexers.snobol
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the SNOBOL language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+
+__all__ = ['SnobolLexer']
+
+
+class SnobolLexer(RegexLexer):
+    """
+    Lexer for the SNOBOL4 programming language.
+
+    Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
+    Does not require spaces around binary operators.
+    """
+
+    name = "Snobol"
+    aliases = ["snobol"]
+    filenames = ['*.snobol']
+    mimetypes = ['text/x-snobol']
+    url = 'https://www.regressive.org/snobol4'
+    version_added = '1.5'
+
+    tokens = {
+        # root state, start of line
+        # comments, continuation lines, and directives start in column 1
+        # as do labels
+        'root': [
+            (r'\*.*\n', Comment),
+            (r'[+.] ', Punctuation, 'statement'),
+            (r'-.*\n', Comment),
+            (r'END\s*\n', Name.Label, 'heredoc'),
+            (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
+            (r'\s+', Text, 'statement'),
+        ],
+        # statement state, line after continuation or label
+        'statement': [
+            (r'\s*\n', Text, '#pop'),
+            (r'\s+', Text),
+            (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
+             r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
+             r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
+             r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
+             Name.Builtin),
+            (r'[A-Za-z][\w.]*', Name),
+            # ASCII equivalents of original operators
+            # | for the EBCDIC equivalent, ! likewise
+            # \ for EBCDIC negation
+            (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
+            (r'"[^"]*"', String),
+            (r"'[^']*'", String),
+            # Accept SPITBOL syntax for real numbers
+            # as well as Macro SNOBOL4
+            (r'[0-9]+(?=[^.EeDd])', Number.Integer),
+            (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
+            # Goto
+            (r':', Punctuation, 'goto'),
+            (r'[()<>,;]', Punctuation),
+        ],
+        # Goto block
+        'goto': [
+            (r'\s*\n', Text, "#pop:2"),
+            (r'\s+', Text),
+            (r'F|S', Keyword),
+            (r'(\()([A-Za-z][\w.]*)(\))',
+             bygroups(Punctuation, Name.Label, Punctuation))
+        ],
+        # everything after the END statement is basically one
+        # big heredoc.
+        'heredoc': [
+            (r'.*\n', String.Heredoc)
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/solidity.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/solidity.py
new file mode 100644
index 00000000..3182a148
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/solidity.py
@@ -0,0 +1,87 @@
+"""
+    pygments.lexers.solidity
+    ~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Solidity.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Whitespace
+
+__all__ = ['SolidityLexer']
+
+
+class SolidityLexer(RegexLexer):
+    """
+    For Solidity source code.
+    """
+
+    name = 'Solidity'
+    aliases = ['solidity']
+    filenames = ['*.sol']
+    mimetypes = []
+    url = 'https://soliditylang.org'
+    version_added = '2.5'
+
+    datatype = (
+        r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
+        r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
+        r'|216|224|232|240|248|256)?))\b'
+    )
+
+    tokens = {
+        'root': [
+            include('whitespace'),
+            include('comments'),
+            (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
+            (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
+             bygroups(Keyword, Whitespace, Name.Entity)),
+            (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' +
+             r'([a-zA-Z_]\w*)',
+             bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)),
+            (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
+             bygroups(Keyword.Type, Whitespace, Name.Variable)),
+            (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword),
+            (words((
+                'block', 'break', 'constant', 'constructor', 'continue',
+                'contract', 'do', 'else', 'external', 'false', 'for',
+                'function', 'if', 'import', 'inherited', 'internal', 'is',
+                'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
+                'payable', 'private', 'public', 'require', 'return',
+                'returns', 'struct', 'suicide', 'throw', 'this', 'true',
+                'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
+             Keyword.Type),
+            (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
+            (datatype, Keyword.Type),
+            include('constants'),
+            (r'[a-zA-Z_]\w*', Text),
+            (r'[~!%^&*+=|?:<>/-]', Operator),
+            (r'[.;{}(),\[\]]', Punctuation)
+        ],
+        'comments': [
+            (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
+            (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
+            (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
+        ],
+        'constants': [
+            (r'("(\\"|.)*?")', String.Double),
+            (r"('(\\'|.)*?')", String.Single),
+            (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
+            (r'\b\d+\b', Number.Decimal),
+        ],
+        'pragma': [
+            include('whitespace'),
+            include('comments'),
+            (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
+             bygroups(Operator, Whitespace, Keyword)),
+            (r';', Punctuation, '#pop')
+        ],
+        'whitespace': [
+            (r'\s+', Whitespace),
+            (r'\n', Whitespace)
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/soong.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/soong.py
new file mode 100644
index 00000000..bbf204dd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/soong.py
@@ -0,0 +1,78 @@
+"""
+    pygments.lexers.soong
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Soong (Android.bp Blueprint) files.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include
+from pygments.token import Comment, Name, Number, Operator, Punctuation, \
+        String, Whitespace
+
+__all__ = ['SoongLexer']
+
+class SoongLexer(RegexLexer):
+    name = 'Soong'
+    version_added = '2.18'
+    url = 'https://source.android.com/docs/setup/reference/androidbp'
+    aliases = ['androidbp', 'bp', 'soong']
+    filenames = ['Android.bp']
+
+    tokens = {
+        'root': [
+            # A variable assignment
+            (r'(\w*)(\s*)(\+?=)(\s*)',
+             bygroups(Name.Variable, Whitespace, Operator, Whitespace),
+             'assign-rhs'),
+
+            # A top-level module
+            (r'(\w*)(\s*)(\{)',
+             bygroups(Name.Function, Whitespace, Punctuation),
+             'in-rule'),
+
+            # Everything else
+            include('comments'),
+            (r'\s+', Whitespace),  # newlines okay
+        ],
+        'assign-rhs': [
+            include('expr'),
+            (r'\n', Whitespace, '#pop'),
+        ],
+        'in-list': [
+            include('expr'),
+            include('comments'),
+            (r'\s+', Whitespace),  # newlines okay in a list
+            (r',', Punctuation),
+            (r'\]', Punctuation, '#pop'),
+        ],
+        'in-map': [
+            # A map key
+            (r'(\w+)(:)(\s*)', bygroups(Name, Punctuation, Whitespace)),
+
+            include('expr'),
+            include('comments'),
+            (r'\s+', Whitespace),  # newlines okay in a map
+            (r',', Punctuation),
+            (r'\}', Punctuation, '#pop'),
+        ],
+        'in-rule': [
+            # Just re-use map syntax
+            include('in-map'),
+        ],
+        'comments': [
+            (r'//.*', Comment.Single),
+            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+        ],
+        'expr': [
+            (r'(true|false)\b', Name.Builtin),
+            (r'0x[0-9a-fA-F]+', Number.Hex),
+            (r'\d+', Number.Integer),
+            (r'".*?"', String),
+            (r'\{', Punctuation, 'in-map'),
+            (r'\[', Punctuation, 'in-list'),
+            (r'\w+', Name),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sophia.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sophia.py
new file mode 100644
index 00000000..37fcec5c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sophia.py
@@ -0,0 +1,102 @@
+"""
+    pygments.lexers.sophia
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Sophia.
+
+    Derived from pygments/lexers/reason.py.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, default, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+    Punctuation, String, Text
+
+__all__ = ['SophiaLexer']
+
+class SophiaLexer(RegexLexer):
+    """
+    A Sophia lexer.
+    """
+
+    name = 'Sophia'
+    aliases = ['sophia']
+    filenames = ['*.aes']
+    mimetypes = []
+    url = 'https://docs.aeternity.com/aesophia'
+    version_added = '2.11'
+
+    keywords = (
+        'contract', 'include', 'let', 'switch', 'type', 'record', 'datatype',
+        'if', 'elif', 'else', 'function', 'stateful', 'payable', 'public',
+        'entrypoint', 'private', 'indexed', 'namespace', 'interface', 'main',
+        'using', 'as', 'for', 'hiding',
+    )
+
+    builtins = ('state', 'put', 'abort', 'require')
+
+    word_operators = ('mod', 'band', 'bor', 'bxor', 'bnot')
+
+    primitive_types = ('int', 'address', 'bool', 'bits', 'bytes', 'string',
+                       'list', 'option', 'char', 'unit', 'map', 'event',
+                       'hash', 'signature', 'oracle', 'oracle_query')
+
+    tokens = {
+        'escape-sequence': [
+            (r'\\[\\"\'ntbr]', String.Escape),
+            (r'\\[0-9]{3}', String.Escape),
+            (r'\\x[0-9a-fA-F]{2}', String.Escape),
+        ],
+        'root': [
+            (r'\s+', Text.Whitespace),
+            (r'(true|false)\b', Keyword.Constant),
+            (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Class, 'dotted'),
+            (r'\b([A-Z][\w\']*)', Name.Function),
+            (r'//.*?\n', Comment.Single),
+            (r'\/\*(?!/)', Comment.Multiline, 'comment'),
+
+            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+            (r'#[\da-fA-F][\da-fA-F_]*', Name.Label),
+            (r'\d[\d_]*', Number.Integer),
+
+            (words(keywords, suffix=r'\b'), Keyword),
+            (words(builtins, suffix=r'\b'), Name.Builtin),
+            (words(word_operators, prefix=r'\b', suffix=r'\b'), Operator.Word),
+            (words(primitive_types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+            (r'[=!<>+\\*/:&|?~@^-]', Operator.Word),
+            (r'[.;:{}(),\[\]]', Punctuation),
+
+            (r"(ak_|ok_|oq_|ct_)[\w']*", Name.Label),
+            (r"[^\W\d][\w']*", Name),
+
+            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+             String.Char),
+            (r"'.'", String.Char),
+            (r"'[a-z][\w]*", Name.Variable),
+
+            (r'"', String.Double, 'string')
+        ],
+        'comment': [
+            (r'[^/*]+', Comment.Multiline),
+            (r'\/\*', Comment.Multiline, '#push'),
+            (r'\*\/', Comment.Multiline, '#pop'),
+            (r'\*', Comment.Multiline),
+        ],
+        'string': [
+            (r'[^\\"]+', String.Double),
+            include('escape-sequence'),
+            (r'\\\n', String.Double),
+            (r'"', String.Double, '#pop'),
+        ],
+        'dotted': [
+            (r'\s+', Text),
+            (r'\.', Punctuation),
+            (r'[A-Z][\w\']*(?=\s*\.)', Name.Function),
+            (r'[A-Z][\w\']*', Name.Function, '#pop'),
+            (r'[a-z_][\w\']*', Name, '#pop'),
+            default('#pop'),
+        ],
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/special.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/special.py
new file mode 100644
index 00000000..524946fc
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/special.py
@@ -0,0 +1,122 @@
+"""
+    pygments.lexers.special
+    ~~~~~~~~~~~~~~~~~~~~~~~
+
+    Special lexers.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import ast
+
+from pygments.lexer import Lexer, line_re
+from pygments.token import Token, Error, Text, Generic
+from pygments.util import get_choice_opt
+
+
+__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
+
+
+class TextLexer(Lexer):
+    """
+    "Null" lexer, doesn't highlight anything.
+    """
+    name = 'Text only'
+    aliases = ['text']
+    filenames = ['*.txt']
+    mimetypes = ['text/plain']
+    url = ""
+    version_added = ''
+
+    priority = 0.01
+
+    def get_tokens_unprocessed(self, text):
+        yield 0, Text, text
+
+    def analyse_text(text):
+        return TextLexer.priority
+
+
+class OutputLexer(Lexer):
+    """
+    Simple lexer that highlights everything as ``Token.Generic.Output``.
+    """
+    name = 'Text output'
+    aliases = ['output']
+    url = ""
+    version_added = '2.10'
+    _example = "output/output"
+
+    def get_tokens_unprocessed(self, text):
+        yield 0, Generic.Output, text
+
+
+_ttype_cache = {}
+
+
+class RawTokenLexer(Lexer):
+    """
+    Recreate a token stream formatted with the `RawTokenFormatter`.
+
+    Additional options accepted:
+
+    `compress`
+        If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
+        the given compression algorithm before lexing (default: ``""``).
+    """
+    name = 'Raw token data'
+    aliases = []
+    filenames = []
+    mimetypes = ['application/x-pygments-tokens']
+    url = 'https://pygments.org/docs/formatters/#RawTokenFormatter'
+    version_added = ''
+
+    def __init__(self, **options):
+        self.compress = get_choice_opt(options, 'compress',
+                                       ['', 'none', 'gz', 'bz2'], '')
+        Lexer.__init__(self, **options)
+
+    def get_tokens(self, text):
+        if self.compress:
+            if isinstance(text, str):
+                text = text.encode('latin1')
+            try:
+                if self.compress == 'gz':
+                    import gzip
+                    text = gzip.decompress(text)
+                elif self.compress == 'bz2':
+                    import bz2
+                    text = bz2.decompress(text)
+            except OSError:
+                yield Error, text.decode('latin1')
+        if isinstance(text, bytes):
+            text = text.decode('latin1')
+
+        # do not call Lexer.get_tokens() because stripping is not optional.
+        text = text.strip('\n') + '\n'
+        for i, t, v in self.get_tokens_unprocessed(text):
+            yield t, v
+
+    def get_tokens_unprocessed(self, text):
+        length = 0
+        for match in line_re.finditer(text):
+            try:
+                ttypestr, val = match.group().rstrip().split('\t', 1)
+                ttype = _ttype_cache.get(ttypestr)
+                if not ttype:
+                    ttype = Token
+                    ttypes = ttypestr.split('.')[1:]
+                    for ttype_ in ttypes:
+                        if not ttype_ or not ttype_[0].isupper():
+                            raise ValueError('malformed token name')
+                        ttype = getattr(ttype, ttype_)
+                    _ttype_cache[ttypestr] = ttype
+                val = ast.literal_eval(val)
+                if not isinstance(val, str):
+                    raise ValueError('expected str')
+            except (SyntaxError, ValueError):
+                val = match.group()
+                ttype = Error
+            yield length, ttype, val
+            length += len(val)
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/spice.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/spice.py
new file mode 100644
index 00000000..9d2b1a1a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/spice.py
@@ -0,0 +1,70 @@
+"""
+    pygments.lexers.spice
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for the Spice programming language.
+
+    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Whitespace
+
+__all__ = ['SpiceLexer']
+
+
+class SpiceLexer(RegexLexer):
+    """
+    For Spice source.
+    """
+    name = 'Spice'
+    url = 'https://www.spicelang.com'
+    filenames = ['*.spice']
+    aliases = ['spice', 'spicelang']
+    mimetypes = ['text/x-spice']
+    version_added = '2.11'
+
+    tokens = {
+        'root': [
+            (r'\n', Whitespace),
+            (r'\s+', Whitespace),
+            (r'\\\n', Text),
+            # comments
+            (r'//(.*?)\n', Comment.Single),
+            (r'/(\\\n)?[*]{2}(.|\n)*?[*](\\\n)?/', String.Doc),
+            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+            # keywords
+            (r'(import|as)\b', Keyword.Namespace),
+            (r'(f|p|type|struct|interface|enum|alias|operator)\b', Keyword.Declaration),
+            (words(('if', 'else', 'switch', 'case', 'default', 'for', 'foreach', 'do',
+                    'while', 'break', 'continue', 'fallthrough', 'return', 'assert',
+                    'unsafe', 'ext'), suffix=r'\b'), Keyword),
+            (words(('const', 'signed', 'unsigned', 'inline', 'public', 'heap', 'compose'),
+                   suffix=r'\b'), Keyword.Pseudo),
+            (words(('new', 'yield', 'stash', 'pick', 'sync', 'class'), suffix=r'\b'),
+                   Keyword.Reserved),
+            (r'(true|false|nil)\b', Keyword.Constant),
+            (words(('double', 'int', 'short', 'long', 'byte', 'char', 'string',
+                    'bool', 'dyn'), suffix=r'\b'), Keyword.Type),
+            (words(('printf', 'sizeof', 'alignof', 'len', 'panic'), suffix=r'\b(\()'),
+             bygroups(Name.Builtin, Punctuation)),
+            # numeric literals
+            (r'[-]?[0-9]*[.][0-9]+([eE][+-]?[0-9]+)?', Number.Double),
+            (r'0[bB][01]+[slu]?', Number.Bin),
+            (r'0[oO][0-7]+[slu]?', Number.Oct),
+            (r'0[xXhH][0-9a-fA-F]+[slu]?', Number.Hex),
+            (r'(0[dD])?[0-9]+[slu]?', Number.Integer),
+            # string literal
+            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+            # char literal
+            (r'\'(\\\\|\\[^\\]|[^\'\\])\'', String.Char),
+            # tokens
+            (r'<<=|>>=|<<|>>|<=|>=|\+=|-=|\*=|/=|\%=|\|=|&=|\^=|&&|\|\||&|\||'
+             r'\+\+|--|\%|\^|\~|==|!=|->|::|[.]{3}|#!|#|[+\-*/&]', Operator),
+            (r'[|<>=!()\[\]{}.,;:\?]', Punctuation),
+            # identifiers
+            (r'[^\W\d]\w*', Name.Other),
+        ]
+    }
diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sql.py b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sql.py
new file mode 100644
index 00000000..d3e6f17f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/sql.py
@@ -0,0 +1,1109 @@
+"""
+    pygments.lexers.sql
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for various SQL dialects and related interactive sessions.
+
+    Postgres specific lexers:
+
+    `PostgresLexer`
+        A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
+        lexer are:
+
+        - keywords and data types list parsed from the PG docs (run the
+          `_postgres_builtins` module to update them);
+        - Content of $-strings parsed using a specific lexer, e.g. the content
+          of a PL/Python function is parsed using the Python lexer;
+        - parse PG specific constructs: E-strings, $-strings, U&-strings,
+          different operators and punctuation.
+
+    `PlPgsqlLexer`
+        A lexer for the PL/pgSQL language. Adds a few specific construct on
+        top of the PG SQL lexer (such as <
+
+
+

Custom Report Builder

+

Select metrics and generate custom analytics reports

+
+ {onClose && ( + + )} +
+ + {/* Report Name */} +
+ + setReportName(e.target.value)} + placeholder="e.g., Monthly Revenue Report" + className="w-full px-4 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-transparent" + /> +
+ + {/* Date Range */} +
+ +
+ setDateRange({ ...dateRange, from: e.target.value })} + className="px-4 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500" + /> + to + setDateRange({ ...dateRange, to: e.target.value })} + className="px-4 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500" + /> +
+
+ + {/* Category Filter */} +
+ +
+ {categories.map((category) => ( + + ))} +
+
+ + {/* Metrics Selection */} +
+
+ +
+ + +
+
+
+
+ {filteredMetrics.map((metric) => { + const isSelected = selectedMetrics.includes(metric.id); + return ( + + ); + })} +
+
+
+ + {/* Actions */} +
+ + + +
+ + {loading && ( +
+ Generating report... +
+ )} +
+ ); +}; + +export default CustomReportBuilder; + diff --git a/Frontend/src/components/analytics/SimpleChart.tsx b/Frontend/src/components/analytics/SimpleChart.tsx new file mode 100644 index 00000000..f6ce07ff --- /dev/null +++ b/Frontend/src/components/analytics/SimpleChart.tsx @@ -0,0 +1,224 @@ +import React from 'react'; +import { TrendingUp } from 'lucide-react'; + +// Simple chart components using CSS/SVG (no external library dependency) +// These provide basic visualizations without requiring recharts + +interface SimpleBarChartProps { + data: Array<{ label: string; value: number; color?: string }>; + height?: number; + title?: string; +} + +export const SimpleBarChart: React.FC = ({ data, height = 200, title }) => { + const maxValue = Math.max(...data.map(d => d.value), 1); + + return ( +
+ {title &&

{title}

} +
+ {data.map((item, index) => ( +
+
{item.label}
+
+
+ + {typeof item.value === 'number' ? item.value.toLocaleString() : item.value} + +
+
+ ))} +
+
+ ); +}; + +interface SimpleLineChartProps { + data: Array<{ label: string; value: number }>; + height?: number; + title?: string; + color?: string; +} + +export const SimpleLineChart: React.FC = ({ data, height = 200, title, color = '#3b82f6' }) => { + if (!data || data.length === 0) return null; + + const maxValue = Math.max(...data.map(d => d.value), 1); + const minValue = Math.min(...data.map(d => d.value), 0); + const range = maxValue - minValue || 1; + + const points = data.map((item, index) => { + const x = (index / (data.length - 1 || 1)) * 100; + const y = 100 - ((item.value - minValue) / range) * 100; + return `${x},${y}`; + }).join(' '); + + return ( +
+ {title &&

{title}

} +
+ + + {data.map((item, index) => { + const x = (index / (data.length - 1 || 1)) * 100; + const y = 100 - ((item.value - minValue) / range) * 100; + return ( + + ); + })} + +
+ {data.map((item, index) => ( + + {item.label} + + ))} +
+
+
+ ); +}; + +interface SimplePieChartProps { + data: Array<{ label: string; value: number; color?: string }>; + size?: number; + title?: string; +} + +export const SimplePieChart: React.FC = ({ data, size = 200, title }) => { + const total = data.reduce((sum, item) => sum + item.value, 0); + if (total === 0) return null; + + const colors = [ + '#3b82f6', '#10b981', '#f59e0b', '#ef4444', '#8b5cf6', + '#ec4899', '#06b6d4', '#84cc16', '#f97316', '#6366f1' + ]; + + let currentAngle = -90; + const segments = data.map((item, index) => { + const percentage = (item.value / total) * 100; + const angle = (percentage / 100) * 360; + const startAngle = currentAngle; + currentAngle += angle; + + const x1 = 50 + 50 * Math.cos((startAngle * Math.PI) / 180); + const y1 = 50 + 50 * Math.sin((startAngle * Math.PI) / 180); + const x2 = 50 + 50 * Math.cos((currentAngle * Math.PI) / 180); + const y2 = 50 + 50 * Math.sin((currentAngle * Math.PI) / 180); + const largeArc = angle > 180 ? 1 : 0; + + return { + path: `M 50 50 L ${x1} ${y1} A 50 50 0 ${largeArc} 1 ${x2} ${y2} Z`, + color: item.color || colors[index % colors.length], + label: item.label, + value: item.value, + percentage: percentage.toFixed(1) + }; + }); + + return ( +
+ {title &&

{title}

} +
+
+ + {segments.map((segment, index) => ( + + ))} + +
+
+ {segments.map((segment, index) => ( +
+
+ {segment.label} + + {segment.percentage}% + +
+ ))} +
+
+
+ ); +}; + +interface KPICardProps { + title: string; + value: string | number; + change?: number; + icon?: React.ReactNode; + color?: string; + subtitle?: string; +} + +export const KPICard: React.FC = ({ + title, + value, + change, + icon, + color = 'blue', + subtitle +}) => { + const colorClasses = { + blue: 'from-blue-500 to-indigo-600', + green: 'from-emerald-500 to-green-600', + orange: 'from-orange-500 to-amber-600', + purple: 'from-purple-500 to-indigo-600', + red: 'from-rose-500 to-red-600', + }; + + return ( +
+
+
+

{title}

+

+ {typeof value === 'number' ? value.toLocaleString() : value} +

+ {subtitle &&

{subtitle}

} +
+ {icon && ( +
+ {icon} +
+ )} +
+ {change !== undefined && ( +
= 0 ? 'text-emerald-600' : 'text-rose-600' + }`}> + + {Math.abs(change).toFixed(1)}% + vs previous period +
+ )} +
+ ); +}; + diff --git a/Frontend/src/components/auth/AccountantRoute.tsx b/Frontend/src/components/auth/AccountantRoute.tsx index 52afa7d2..5317983f 100644 --- a/Frontend/src/components/auth/AccountantRoute.tsx +++ b/Frontend/src/components/auth/AccountantRoute.tsx @@ -47,6 +47,12 @@ const AccountantRoute: React.FC = ({ // Check if user is accountant const isAccountant = userInfo?.role === 'accountant'; if (!isAccountant) { + // Redirect to appropriate dashboard based on role + if (userInfo?.role === 'admin') { + return ; + } else if (userInfo?.role === 'staff') { + return ; + } return ; } diff --git a/Frontend/src/components/auth/AdminRoute.tsx b/Frontend/src/components/auth/AdminRoute.tsx index 66a05696..3df428e1 100644 --- a/Frontend/src/components/auth/AdminRoute.tsx +++ b/Frontend/src/components/auth/AdminRoute.tsx @@ -47,6 +47,12 @@ const AdminRoute: React.FC = ({ const isAdmin = userInfo?.role === 'admin'; if (!isAdmin) { + // Redirect to appropriate dashboard based on role + if (userInfo?.role === 'staff') { + return ; + } else if (userInfo?.role === 'accountant') { + return ; + } return ; } diff --git a/Frontend/src/components/ResetPasswordRouteHandler.tsx b/Frontend/src/components/auth/ResetPasswordRouteHandler.tsx similarity index 88% rename from Frontend/src/components/ResetPasswordRouteHandler.tsx rename to Frontend/src/components/auth/ResetPasswordRouteHandler.tsx index 78fcc259..505c087a 100644 --- a/Frontend/src/components/ResetPasswordRouteHandler.tsx +++ b/Frontend/src/components/auth/ResetPasswordRouteHandler.tsx @@ -1,6 +1,6 @@ import { useEffect } from 'react'; import { useParams, useNavigate } from 'react-router-dom'; -import { useAuthModal } from '../contexts/AuthModalContext'; +import { useAuthModal } from '../../contexts/AuthModalContext'; const ResetPasswordRouteHandler: React.FC = () => { const { token } = useParams<{ token: string }>(); diff --git a/Frontend/src/components/auth/StaffRoute.tsx b/Frontend/src/components/auth/StaffRoute.tsx index 7f8eef5b..04381ec6 100644 --- a/Frontend/src/components/auth/StaffRoute.tsx +++ b/Frontend/src/components/auth/StaffRoute.tsx @@ -47,6 +47,12 @@ const StaffRoute: React.FC = ({ const isStaff = userInfo?.role === 'staff'; if (!isStaff) { + // Redirect to appropriate dashboard based on role + if (userInfo?.role === 'admin') { + return ; + } else if (userInfo?.role === 'accountant') { + return ; + } return ; } diff --git a/Frontend/src/components/auth/index.ts b/Frontend/src/components/auth/index.ts index 262c8385..df5bec05 100644 --- a/Frontend/src/components/auth/index.ts +++ b/Frontend/src/components/auth/index.ts @@ -3,3 +3,4 @@ export { default as AdminRoute } from './AdminRoute'; export { default as StaffRoute } from './StaffRoute'; export { default as AccountantRoute } from './AccountantRoute'; export { default as CustomerRoute } from './CustomerRoute'; +export { default as ResetPasswordRouteHandler } from './ResetPasswordRouteHandler'; diff --git a/Frontend/src/components/chat/StaffChatNotification.tsx b/Frontend/src/components/chat/StaffChatNotification.tsx index bcd353b8..d7748195 100644 --- a/Frontend/src/components/chat/StaffChatNotification.tsx +++ b/Frontend/src/components/chat/StaffChatNotification.tsx @@ -92,6 +92,16 @@ const StaffChatNotification: React.FC = () => { }, autoClose: 10000 }); + } else if (data.type === 'housekeeping_task_assigned') { + const taskData = data.data; + const taskTypeLabel = taskData.task_type ? taskData.task_type.charAt(0).toUpperCase() + taskData.task_type.slice(1) : 'Housekeeping'; + + toast.success(`New ${taskTypeLabel} task assigned: Room ${taskData.room_number}`, { + onClick: () => { + navigate('/staff/advanced-rooms?tab=housekeeping'); + }, + autoClose: 10000 + }); } } catch (error) { console.error('Error parsing notification:', error); diff --git a/Frontend/src/components/common/PaymentMethodSelector.tsx b/Frontend/src/components/common/PaymentMethodSelector.tsx index d5ae7460..d0b27967 100644 --- a/Frontend/src/components/common/PaymentMethodSelector.tsx +++ b/Frontend/src/components/common/PaymentMethodSelector.tsx @@ -2,8 +2,8 @@ import React from 'react'; import { CreditCard } from 'lucide-react'; interface PaymentMethodSelectorProps { - value: 'cash' | 'stripe'; - onChange: (value: 'cash' | 'stripe') => void; + value: 'cash' | 'stripe' | 'borica' | 'paypal'; + onChange: (value: 'cash' | 'stripe' | 'borica' | 'paypal') => void; error?: string; disabled?: boolean; } @@ -110,6 +110,55 @@ const PaymentMethodSelector: React.FC<
+ + {/* Borica Payment */} +
{error && ( @@ -126,7 +175,11 @@ const PaymentMethodSelector: React.FC< 💡 Note: {' '} {value === 'cash' ? 'You will pay when checking in. Cash and card accepted at the hotel.' - : 'Your payment will be processed securely through Stripe.'} + : value === 'stripe' + ? 'Your payment will be processed securely through Stripe.' + : value === 'borica' + ? 'Your payment will be processed securely through Borica payment gateway.' + : 'Your payment will be processed securely.'}

diff --git a/Frontend/src/components/common/Preloader.tsx b/Frontend/src/components/common/Preloader.tsx new file mode 100644 index 00000000..262d639a --- /dev/null +++ b/Frontend/src/components/common/Preloader.tsx @@ -0,0 +1,87 @@ +import React, { useState } from 'react'; +import { useCompanySettings } from '../../contexts/CompanySettingsContext'; +import { Loader2 } from 'lucide-react'; + +interface PreloaderProps { + isLoading?: boolean; +} + +const Preloader: React.FC = ({ isLoading = true }) => { + const { settings } = useCompanySettings(); + const [logoError, setLogoError] = useState(false); + const baseUrl = import.meta.env.VITE_API_URL || 'http://localhost:8000'; + + // Get logo URL - handle both absolute and relative URLs + const logoUrl = settings.company_logo_url && !logoError + ? settings.company_logo_url.startsWith('http') + ? settings.company_logo_url + : `${baseUrl}${settings.company_logo_url}` + : null; + + if (!isLoading) return null; + + return ( +
+
+
+ {/* Animated background glow */} +
+ + {/* Main container */} +
+ {/* Logo or fallback */} + {logoUrl && !logoError ? ( +
+ {settings.company_name setLogoError(true)} + /> +
+ ) : ( +
+ +
+ )} + + {/* Loading bar */} +
+
+
+
+
+ + {/* Loading text */} +

+ {settings.company_name ? `Loading ${settings.company_name}...` : 'Loading...'} +

+
+ + {/* CSS for shimmer animation */} + +
+ ); +}; + +export default Preloader; + diff --git a/Frontend/src/components/layout/Header.tsx b/Frontend/src/components/layout/Header.tsx index 9bfbb189..3d03e129 100644 --- a/Frontend/src/components/layout/Header.tsx +++ b/Frontend/src/components/layout/Header.tsx @@ -13,11 +13,13 @@ import { Mail, Calendar, Star, + Users, } from 'lucide-react'; import { useClickOutside } from '../../hooks/useClickOutside'; import { useCompanySettings } from '../../contexts/CompanySettingsContext'; import { useAuthModal } from '../../contexts/AuthModalContext'; import { normalizeImageUrl } from '../../utils/imageUtils'; +import InAppNotificationBell from '../notifications/InAppNotificationBell'; interface HeaderProps { isAuthenticated?: boolean; @@ -76,7 +78,6 @@ const Header: React.FC = ({ return (
- {}
@@ -96,10 +97,8 @@ const Header: React.FC = ({
- {}
- {} - + + {userInfo?.name?.charAt(0) + .toUpperCase()} + +
+ )} + + {userInfo?.name} + + - {} - {isUserMenuOpen && ( + {isUserMenuOpen && (
Loyalty Program + setIsUserMenuOpen(false)} + className="flex items-center space-x-3 + px-4 py-2.5 text-white/90 + hover:bg-[#d4af37]/10 hover:text-[#d4af37] + transition-all duration-300 border-l-2 border-transparent + hover:border-[#d4af37]" + > + + Group Bookings + )} {userInfo?.role === 'admin' && ( @@ -356,79 +368,74 @@ const Header: React.FC = ({ Logout
- )} + )} +
)} + +
- {} - - - - {} - {isMobileMenuOpen && ( -
-
- setIsMobileMenuOpen(false)} - className="px-4 py-3 text-white/90 - hover:bg-[#d4af37]/10 hover:text-[#d4af37] - rounded-sm transition-all duration-300 - border-l-2 border-transparent - hover:border-[#d4af37] font-light tracking-wide" - > - Home - - setIsMobileMenuOpen(false)} - className="px-4 py-3 text-white/90 - hover:bg-[#d4af37]/10 hover:text-[#d4af37] - rounded-sm transition-all duration-300 - border-l-2 border-transparent - hover:border-[#d4af37] font-light tracking-wide" - > - Rooms - - setIsMobileMenuOpen(false)} - className="px-4 py-3 text-white/90 - hover:bg-[#d4af37]/10 hover:text-[#d4af37] - rounded-sm transition-all duration-300 - border-l-2 border-transparent - hover:border-[#d4af37] font-light tracking-wide" - > - About - - setIsMobileMenuOpen(false)} - className="px-4 py-3 text-white/90 - hover:bg-[#d4af37]/10 hover:text-[#d4af37] - rounded-sm transition-all duration-300 - border-l-2 border-transparent - hover:border-[#d4af37] font-light tracking-wide" - > - Contact - - -
+
+ setIsMobileMenuOpen(false)} + className="px-4 py-3 text-white/90 + hover:bg-[#d4af37]/10 hover:text-[#d4af37] + rounded-sm transition-all duration-300 + border-l-2 border-transparent + hover:border-[#d4af37] font-light tracking-wide" + > + Home + + setIsMobileMenuOpen(false)} + className="px-4 py-3 text-white/90 + hover:bg-[#d4af37]/10 hover:text-[#d4af37] + rounded-sm transition-all duration-300 + border-l-2 border-transparent + hover:border-[#d4af37] font-light tracking-wide" + > + Rooms + + setIsMobileMenuOpen(false)} + className="px-4 py-3 text-white/90 + hover:bg-[#d4af37]/10 hover:text-[#d4af37] + rounded-sm transition-all duration-300 + border-l-2 border-transparent + hover:border-[#d4af37] font-light tracking-wide" + > + About + + setIsMobileMenuOpen(false)} + className="px-4 py-3 text-white/90 + hover:bg-[#d4af37]/10 hover:text-[#d4af37] + rounded-sm transition-all duration-300 + border-l-2 border-transparent + hover:border-[#d4af37] font-light tracking-wide" + > + Contact + + +
{!isAuthenticated ? ( @@ -570,10 +577,11 @@ const Header: React.FC = ({ )} +
-
- )} + )} +
); diff --git a/Frontend/src/components/layout/SidebarAccountant.tsx b/Frontend/src/components/layout/SidebarAccountant.tsx index 55c9c64f..f8fa9540 100644 --- a/Frontend/src/components/layout/SidebarAccountant.tsx +++ b/Frontend/src/components/layout/SidebarAccountant.tsx @@ -13,6 +13,7 @@ import { Receipt } from 'lucide-react'; import useAuthStore from '../../store/useAuthStore'; +import { useResponsive } from '../../hooks'; interface SidebarAccountantProps { isCollapsed?: boolean; @@ -24,11 +25,11 @@ const SidebarAccountant: React.FC = ({ onToggle }) => { const [internalCollapsed, setInternalCollapsed] = useState(false); - const [isMobile, setIsMobile] = useState(false); const [isMobileOpen, setIsMobileOpen] = useState(false); const location = useLocation(); const navigate = useNavigate(); const { logout } = useAuthStore(); + const { isMobile, isTablet, isDesktop } = useResponsive(); const handleLogout = async () => { try { @@ -42,19 +43,12 @@ const SidebarAccountant: React.FC = ({ } }; - // Handle mobile responsiveness + // Close mobile menu when screen becomes desktop useEffect(() => { - const checkMobile = () => { - setIsMobile(window.innerWidth < 1024); - if (window.innerWidth >= 1024) { - setIsMobileOpen(false); - } - }; - - checkMobile(); - window.addEventListener('resize', checkMobile); - return () => window.removeEventListener('resize', checkMobile); - }, []); + if (isDesktop) { + setIsMobileOpen(false); + } + }, [isDesktop]); const isCollapsed = controlledCollapsed !== undefined diff --git a/Frontend/src/components/layout/SidebarAdmin.tsx b/Frontend/src/components/layout/SidebarAdmin.tsx index ebdc9bf7..cafc7646 100644 --- a/Frontend/src/components/layout/SidebarAdmin.tsx +++ b/Frontend/src/components/layout/SidebarAdmin.tsx @@ -14,25 +14,48 @@ import { Menu, X, Award, - User + User, + Workflow, + CheckSquare, + Bell, + UserCheck, + Hotel, + Tag, + Package, + Shield, + Mail, + TrendingUp, + Building2, + Crown } from 'lucide-react'; import useAuthStore from '../../store/useAuthStore'; +import { useResponsive } from '../../hooks'; interface SidebarAdminProps { isCollapsed?: boolean; onToggle?: () => void; } +interface MenuGroup { + title: string; + icon?: React.ComponentType<{ className?: string }>; + items: Array<{ + path: string; + icon: React.ComponentType<{ className?: string }>; + label: string; + }>; +} + const SidebarAdmin: React.FC = ({ isCollapsed: controlledCollapsed, onToggle }) => { const [internalCollapsed, setInternalCollapsed] = useState(false); - const [isMobile, setIsMobile] = useState(false); const [isMobileOpen, setIsMobileOpen] = useState(false); const location = useLocation(); const navigate = useNavigate(); const { logout } = useAuthStore(); + const { isMobile, isTablet, isDesktop } = useResponsive(); const handleLogout = async () => { try { @@ -46,19 +69,12 @@ const SidebarAdmin: React.FC = ({ } }; - + // Close mobile menu when screen becomes desktop useEffect(() => { - const checkMobile = () => { - setIsMobile(window.innerWidth < 1024); - if (window.innerWidth >= 1024) { - setIsMobileOpen(false); - } - }; - - checkMobile(); - window.addEventListener('resize', checkMobile); - return () => window.removeEventListener('resize', checkMobile); - }, []); + if (isDesktop) { + setIsMobileOpen(false); + } + }, [isDesktop]); const isCollapsed = controlledCollapsed !== undefined @@ -83,63 +99,161 @@ const SidebarAdmin: React.FC = ({ } }; - const menuItems = [ - { - path: '/admin/dashboard', - icon: LayoutDashboard, - label: 'Dashboard' + const menuGroups: MenuGroup[] = [ + { + title: 'Overview', + icon: LayoutDashboard, + items: [ + { + path: '/admin/dashboard', + icon: LayoutDashboard, + label: 'Dashboard' + }, + ] }, - { - path: '/admin/users', - icon: Users, - label: 'Users' + { + title: 'Operations', + icon: Building2, + items: [ + { + path: '/admin/reception', + icon: LogIn, + label: 'Reception' + }, + { + path: '/admin/advanced-rooms', + icon: Hotel, + label: 'Room Management' + }, + ] }, - { - path: '/admin/guest-profiles', - icon: User, - label: 'Guest Profiles' + { + title: 'Business', + icon: TrendingUp, + items: [ + { + path: '/admin/business', + icon: FileText, + label: 'Business Dashboard' + }, + ] }, - { - path: '/admin/loyalty', - icon: Award, - label: 'Loyalty Program' + { + title: 'Analytics & Reports', + icon: BarChart3, + items: [ + { + path: '/admin/analytics', + icon: BarChart3, + label: 'Analytics' + }, + ] }, - { - path: '/admin/business', - icon: FileText, - label: 'Business' + { + title: 'Users & Guests', + icon: Users, + items: [ + { + path: '/admin/users', + icon: Users, + label: 'Users' + }, + { + path: '/admin/guest-profiles', + icon: User, + label: 'Guest Profiles' + }, + { + path: '/admin/group-bookings', + icon: UserCheck, + label: 'Group Bookings' + }, + { + path: '/admin/loyalty', + icon: Award, + label: 'Loyalty Program' + }, + ] }, - { - path: '/admin/reception', - icon: LogIn, - label: 'Reception' + { + title: 'Products & Pricing', + icon: Tag, + items: [ + { + path: '/admin/rate-plans', + icon: Tag, + label: 'Rate Plans' + }, + { + path: '/admin/packages', + icon: Package, + label: 'Packages' + }, + ] }, - { - path: '/admin/page-content', - icon: Globe, - label: 'Page Content' + { + title: 'Marketing', + icon: Mail, + items: [ + { + path: '/admin/email-campaigns', + icon: Mail, + label: 'Email Campaigns' + }, + ] }, - { - path: '/admin/analytics', - icon: BarChart3, - label: 'Analytics' + { + title: 'Content Management', + icon: Globe, + items: [ + { + path: '/admin/page-content', + icon: Globe, + label: 'Page Content' + }, + ] }, - { - path: '/admin/settings', - icon: Settings, - label: 'Settings' + { + title: 'System', + icon: Settings, + items: [ + { + path: '/admin/security', + icon: Shield, + label: 'Security' + }, + { + path: '/admin/tasks', + icon: CheckSquare, + label: 'Tasks' + }, + { + path: '/admin/workflows', + icon: Workflow, + label: 'Workflows' + }, + { + path: '/admin/notifications', + icon: Bell, + label: 'Notifications' + }, + { + path: '/admin/settings', + icon: Settings, + label: 'Settings' + }, + ] }, ]; const isActive = (path: string) => { - if (location.pathname === path) return true; - if (path === '/admin/settings' || path === '/admin/analytics' || path === '/admin/business' || path === '/admin/reception' || path === '/admin/page-content' || path === '/admin/loyalty') { + if (path === '/admin/settings' || path === '/admin/analytics' || path === '/admin/business' || path === '/admin/reception' || path === '/admin/advanced-rooms' || path === '/admin/page-content' || path === '/admin/loyalty') { return location.pathname === path; } - if (path === '/admin/reception') { + if (path === '/admin/reception' || path === '/admin/advanced-rooms') { return location.pathname === path || location.pathname.startsWith(`${path}/`); } @@ -148,168 +262,242 @@ const SidebarAdmin: React.FC = ({ return ( <> - {} - {isMobile && ( - - )} + {/* Mobile Menu Button - Always visible on mobile screens */} + - {} - {isMobile && isMobileOpen && ( + {/* Mobile Overlay */} + {isMobileOpen && (
)} - {} diff --git a/Frontend/src/components/layout/SidebarStaff.tsx b/Frontend/src/components/layout/SidebarStaff.tsx index 5e6de804..d8fa7a9d 100644 --- a/Frontend/src/components/layout/SidebarStaff.tsx +++ b/Frontend/src/components/layout/SidebarStaff.tsx @@ -13,10 +13,12 @@ import { CreditCard, MessageCircle, Award, - Users + Users, + Wrench } from 'lucide-react'; import useAuthStore from '../../store/useAuthStore'; import { useChatNotifications } from '../../contexts/ChatNotificationContext'; +import { useResponsive } from '../../hooks'; interface SidebarStaffProps { isCollapsed?: boolean; @@ -28,12 +30,12 @@ const SidebarStaff: React.FC = ({ onToggle }) => { const [internalCollapsed, setInternalCollapsed] = useState(false); - const [isMobile, setIsMobile] = useState(false); const [isMobileOpen, setIsMobileOpen] = useState(false); const location = useLocation(); const navigate = useNavigate(); const { logout } = useAuthStore(); const { unreadCount } = useChatNotifications(); + const { isMobile, isTablet, isDesktop } = useResponsive(); const handleLogout = async () => { try { @@ -47,19 +49,12 @@ const SidebarStaff: React.FC = ({ } }; - + // Close mobile menu when screen becomes desktop useEffect(() => { - const checkMobile = () => { - setIsMobile(window.innerWidth < 1024); - if (window.innerWidth >= 1024) { - setIsMobileOpen(false); - } - }; - - checkMobile(); - window.addEventListener('resize', checkMobile); - return () => window.removeEventListener('resize', checkMobile); - }, []); + if (isDesktop) { + setIsMobileOpen(false); + } + }, [isDesktop]); const isCollapsed = controlledCollapsed !== undefined @@ -116,6 +111,11 @@ const SidebarStaff: React.FC = ({ icon: Users, label: 'Guest Profiles' }, + { + path: '/staff/advanced-rooms', + icon: Wrench, + label: 'Room Management' + }, { path: '/staff/chats', icon: MessageCircle, diff --git a/Frontend/src/components/notifications/InAppNotificationBell.tsx b/Frontend/src/components/notifications/InAppNotificationBell.tsx new file mode 100644 index 00000000..1c5f0e5d --- /dev/null +++ b/Frontend/src/components/notifications/InAppNotificationBell.tsx @@ -0,0 +1,139 @@ +import React, { useState, useEffect } from 'react'; +import { Bell, X } from 'lucide-react'; +import { toast } from 'react-toastify'; +import notificationService, { Notification } from '../../services/api/notificationService'; +import { formatDate } from '../../utils/format'; + +const InAppNotificationBell: React.FC = () => { + const [notifications, setNotifications] = useState([]); + const [unreadCount, setUnreadCount] = useState(0); + const [showDropdown, setShowDropdown] = useState(false); + const [loading, setLoading] = useState(false); + + useEffect(() => { + loadNotifications(); + // Poll for new notifications every 30 seconds + const interval = setInterval(loadNotifications, 30000); + return () => clearInterval(interval); + }, []); + + const loadNotifications = async () => { + try { + const response = await notificationService.getMyNotifications({ + status: 'delivered', + limit: 10, + }); + const notifs = response.data.data || []; + setNotifications(notifs); + setUnreadCount(notifs.filter(n => !n.read_at).length); + } catch (error) { + // Silently fail + } + }; + + const handleMarkAsRead = async (notificationId: number) => { + try { + await notificationService.markAsRead(notificationId); + setNotifications(notifications.map(n => + n.id === notificationId ? { ...n, status: 'read' as any, read_at: new Date().toISOString() } : n + )); + setUnreadCount(Math.max(0, unreadCount - 1)); + } catch (error: any) { + toast.error(error.message || 'Failed to mark as read'); + } + }; + + const handleMarkAllAsRead = async () => { + try { + setLoading(true); + const unread = notifications.filter(n => !n.read_at); + await Promise.all(unread.map(n => notificationService.markAsRead(n.id))); + setNotifications(notifications.map(n => ({ ...n, status: 'read' as any, read_at: new Date().toISOString() }))); + setUnreadCount(0); + } catch (error: any) { + toast.error(error.message || 'Failed to mark all as read'); + } finally { + setLoading(false); + } + }; + + return ( +
+ + + {showDropdown && ( + <> +
setShowDropdown(false)} + /> +
+
+

Notifications

+ {unreadCount > 0 && ( + + )} +
+
+ {notifications.length === 0 ? ( +
+ No notifications +
+ ) : ( + notifications.map((notification) => ( +
{ + if (!notification.read_at) { + handleMarkAsRead(notification.id); + } + }} + > +
+
+

+ {notification.subject || notification.notification_type.replace('_', ' ')} +

+

+ {notification.content} +

+

+ {formatDate(new Date(notification.created_at), 'short')} +

+
+ {!notification.read_at && ( +
+ )} +
+
+ )) + )} +
+
+ + )} +
+ ); +}; + +export default InAppNotificationBell; + diff --git a/Frontend/src/components/notifications/NotificationPreferences.tsx b/Frontend/src/components/notifications/NotificationPreferences.tsx new file mode 100644 index 00000000..15836892 --- /dev/null +++ b/Frontend/src/components/notifications/NotificationPreferences.tsx @@ -0,0 +1,302 @@ +import React, { useState, useEffect } from 'react'; +import { Bell, Mail, MessageSquare, Smartphone, Save } from 'lucide-react'; +import { toast } from 'react-toastify'; +import { Loading } from '../common'; +import notificationService, { NotificationPreferences } from '../../services/api/notificationService'; + +const NotificationPreferences: React.FC = () => { + const [preferences, setPreferences] = useState(null); + const [loading, setLoading] = useState(true); + const [saving, setSaving] = useState(false); + + useEffect(() => { + loadPreferences(); + }, []); + + const loadPreferences = async () => { + try { + setLoading(true); + const response = await notificationService.getPreferences(); + setPreferences(response.data.data); + } catch (error: any) { + toast.error(error.message || 'Failed to load preferences'); + } finally { + setLoading(false); + } + }; + + const handleSave = async () => { + if (!preferences) return; + + try { + setSaving(true); + await notificationService.updatePreferences(preferences); + toast.success('Preferences saved successfully'); + } catch (error: any) { + toast.error(error.message || 'Failed to save preferences'); + } finally { + setSaving(false); + } + }; + + const updatePreference = (key: keyof NotificationPreferences, value: boolean) => { + if (preferences) { + setPreferences({ ...preferences, [key]: value }); + } + }; + + if (loading) { + return ; + } + + if (!preferences) { + return
Failed to load preferences
; + } + + return ( +
+
+

Notification Preferences

+ + {/* Global Channel Preferences */} +
+

Channel Preferences

+
+ + + + +
+
+ + {/* Type-Specific Preferences */} +
+
+

Booking Confirmations

+
+ + +
+
+ +
+

Payment Receipts

+
+ + +
+
+ +
+

Reminders

+
+
+

Pre-Arrival

+
+ + +
+
+
+

Check-In

+
+ + +
+
+
+

Check-Out

+
+ + +
+
+
+
+ +
+

Marketing & Updates

+
+
+

Marketing Campaigns

+
+ + +
+
+
+

Loyalty Updates

+
+ + +
+
+
+
+
+ +
+ +
+
+
+ ); +}; + +export default NotificationPreferences; + diff --git a/Frontend/src/components/notifications/NotificationTemplatesModal.tsx b/Frontend/src/components/notifications/NotificationTemplatesModal.tsx new file mode 100644 index 00000000..7345fa88 --- /dev/null +++ b/Frontend/src/components/notifications/NotificationTemplatesModal.tsx @@ -0,0 +1,210 @@ +import React, { useState, useEffect } from 'react'; +import { X, Plus, Trash2, Edit } from 'lucide-react'; +import { toast } from 'react-toastify'; +import notificationService, { NotificationTemplate } from '../../services/api/notificationService'; + +interface NotificationTemplatesModalProps { + onClose: () => void; +} + +const NotificationTemplatesModal: React.FC = ({ onClose }) => { + const [templates, setTemplates] = useState([]); + const [loading, setLoading] = useState(true); + const [showCreate, setShowCreate] = useState(false); + const [formData, setFormData] = useState({ + name: '', + notification_type: 'booking_confirmation', + channel: 'email', + subject: '', + content: '', + }); + + useEffect(() => { + loadTemplates(); + }, []); + + const loadTemplates = async () => { + try { + setLoading(true); + const response = await notificationService.getTemplates(); + setTemplates(response.data.data || []); + } catch (error: any) { + toast.error(error.message || 'Failed to load templates'); + } finally { + setLoading(false); + } + }; + + const handleCreate = async (e: React.FormEvent) => { + e.preventDefault(); + + if (!formData.name.trim() || !formData.content.trim()) { + toast.error('Name and content are required'); + return; + } + + try { + await notificationService.createTemplate(formData); + toast.success('Template created successfully'); + setShowCreate(false); + setFormData({ + name: '', + notification_type: 'booking_confirmation', + channel: 'email', + subject: '', + content: '', + }); + loadTemplates(); + } catch (error: any) { + toast.error(error.message || 'Failed to create template'); + } + }; + + return ( +
+
+
+

Notification Templates

+
+ + +
+
+ +
+ {showCreate ? ( + +
+ + setFormData({ ...formData, name: e.target.value })} + className="w-full px-4 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500" + required + /> +
+
+
+ + +
+
+ + +
+
+ {(formData.channel === 'email' || formData.channel === 'push') && ( +
+ + setFormData({ ...formData, subject: e.target.value })} + className="w-full px-4 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-indigo-500 focus:border-indigo-500" + placeholder="Use {{variable_name}} for variables" + /> +
+ )} +
+ +

' : '\U0001d4ab', + '\\' : '\U0001d4ac', + '\\' : '\U0000211b', + '\\' : '\U0001d4ae', + '\\' : '\U0001d4af', + '\\' : '\U0001d4b0', + '\\' : '\U0001d4b1', + '\\' : '\U0001d4b2', + '\\' : '\U0001d4b3', + '\\' : '\U0001d4b4', + '\\' : '\U0001d4b5', + '\\' : '\U0001d5ba', + '\\' : '\U0001d5bb', + '\\' : '\U0001d5bc', + '\\' : '\U0001d5bd', + '\\' : '\U0001d5be', + '\\' : '\U0001d5bf', + '\\' : '\U0001d5c0', + '\\' : '\U0001d5c1', + '\\' : '\U0001d5c2', + '\\' : '\U0001d5c3', + '\\' : '\U0001d5c4', + '\\' : '\U0001d5c5', + '\\' : '\U0001d5c6', + '\\' : '\U0001d5c7', + '\\' : '\U0001d5c8', + '\\