update to python fastpi
This commit is contained in:
377
Backend/venv/lib/python3.12/site-packages/pydantic/__init__.py
Normal file
377
Backend/venv/lib/python3.12/site-packages/pydantic/__init__.py
Normal file
@@ -0,0 +1,377 @@
|
||||
import typing
|
||||
|
||||
from ._migration import getattr_migration
|
||||
from .version import VERSION
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
# import of virtually everything is supported via `__getattr__` below,
|
||||
# but we need them here for type checking and IDE support
|
||||
import pydantic_core
|
||||
from pydantic_core.core_schema import (
|
||||
FieldSerializationInfo,
|
||||
SerializationInfo,
|
||||
SerializerFunctionWrapHandler,
|
||||
ValidationInfo,
|
||||
ValidatorFunctionWrapHandler,
|
||||
)
|
||||
|
||||
from . import dataclasses
|
||||
from ._internal._generate_schema import GenerateSchema as GenerateSchema
|
||||
from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
from .config import ConfigDict
|
||||
from .errors import *
|
||||
from .fields import AliasChoices, AliasPath, Field, PrivateAttr, computed_field
|
||||
from .functional_serializers import (
|
||||
PlainSerializer,
|
||||
SerializeAsAny,
|
||||
WrapSerializer,
|
||||
field_serializer,
|
||||
model_serializer,
|
||||
)
|
||||
from .functional_validators import (
|
||||
AfterValidator,
|
||||
BeforeValidator,
|
||||
InstanceOf,
|
||||
PlainValidator,
|
||||
SkipValidation,
|
||||
WrapValidator,
|
||||
field_validator,
|
||||
model_validator,
|
||||
)
|
||||
from .json_schema import WithJsonSchema
|
||||
from .main import *
|
||||
from .networks import *
|
||||
from .type_adapter import TypeAdapter
|
||||
from .types import *
|
||||
from .validate_call_decorator import validate_call
|
||||
from .warnings import PydanticDeprecatedSince20, PydanticDeprecationWarning
|
||||
|
||||
# this encourages pycharm to import `ValidationError` from here, not pydantic_core
|
||||
ValidationError = pydantic_core.ValidationError
|
||||
from .deprecated.class_validators import root_validator, validator
|
||||
from .deprecated.config import BaseConfig, Extra
|
||||
from .deprecated.tools import *
|
||||
from .root_model import RootModel
|
||||
|
||||
__version__ = VERSION
|
||||
__all__ = (
|
||||
# dataclasses
|
||||
'dataclasses',
|
||||
# functional validators
|
||||
'field_validator',
|
||||
'model_validator',
|
||||
'AfterValidator',
|
||||
'BeforeValidator',
|
||||
'PlainValidator',
|
||||
'WrapValidator',
|
||||
'SkipValidation',
|
||||
'InstanceOf',
|
||||
# JSON Schema
|
||||
'WithJsonSchema',
|
||||
# deprecated V1 functional validators, these are imported via `__getattr__` below
|
||||
'root_validator',
|
||||
'validator',
|
||||
# functional serializers
|
||||
'field_serializer',
|
||||
'model_serializer',
|
||||
'PlainSerializer',
|
||||
'SerializeAsAny',
|
||||
'WrapSerializer',
|
||||
# config
|
||||
'ConfigDict',
|
||||
# deprecated V1 config, these are imported via `__getattr__` below
|
||||
'BaseConfig',
|
||||
'Extra',
|
||||
# validate_call
|
||||
'validate_call',
|
||||
# errors
|
||||
'PydanticErrorCodes',
|
||||
'PydanticUserError',
|
||||
'PydanticSchemaGenerationError',
|
||||
'PydanticImportError',
|
||||
'PydanticUndefinedAnnotation',
|
||||
'PydanticInvalidForJsonSchema',
|
||||
# fields
|
||||
'AliasPath',
|
||||
'AliasChoices',
|
||||
'Field',
|
||||
'computed_field',
|
||||
'PrivateAttr',
|
||||
# main
|
||||
'BaseModel',
|
||||
'create_model',
|
||||
# network
|
||||
'AnyUrl',
|
||||
'AnyHttpUrl',
|
||||
'FileUrl',
|
||||
'HttpUrl',
|
||||
'UrlConstraints',
|
||||
'EmailStr',
|
||||
'NameEmail',
|
||||
'IPvAnyAddress',
|
||||
'IPvAnyInterface',
|
||||
'IPvAnyNetwork',
|
||||
'PostgresDsn',
|
||||
'CockroachDsn',
|
||||
'AmqpDsn',
|
||||
'RedisDsn',
|
||||
'MongoDsn',
|
||||
'KafkaDsn',
|
||||
'MySQLDsn',
|
||||
'MariaDBDsn',
|
||||
'validate_email',
|
||||
# root_model
|
||||
'RootModel',
|
||||
# deprecated tools, these are imported via `__getattr__` below
|
||||
'parse_obj_as',
|
||||
'schema_of',
|
||||
'schema_json_of',
|
||||
# types
|
||||
'Strict',
|
||||
'StrictStr',
|
||||
'conbytes',
|
||||
'conlist',
|
||||
'conset',
|
||||
'confrozenset',
|
||||
'constr',
|
||||
'StringConstraints',
|
||||
'ImportString',
|
||||
'conint',
|
||||
'PositiveInt',
|
||||
'NegativeInt',
|
||||
'NonNegativeInt',
|
||||
'NonPositiveInt',
|
||||
'confloat',
|
||||
'PositiveFloat',
|
||||
'NegativeFloat',
|
||||
'NonNegativeFloat',
|
||||
'NonPositiveFloat',
|
||||
'FiniteFloat',
|
||||
'condecimal',
|
||||
'condate',
|
||||
'UUID1',
|
||||
'UUID3',
|
||||
'UUID4',
|
||||
'UUID5',
|
||||
'FilePath',
|
||||
'DirectoryPath',
|
||||
'NewPath',
|
||||
'Json',
|
||||
'SecretStr',
|
||||
'SecretBytes',
|
||||
'StrictBool',
|
||||
'StrictBytes',
|
||||
'StrictInt',
|
||||
'StrictFloat',
|
||||
'PaymentCardNumber',
|
||||
'ByteSize',
|
||||
'PastDate',
|
||||
'FutureDate',
|
||||
'PastDatetime',
|
||||
'FutureDatetime',
|
||||
'AwareDatetime',
|
||||
'NaiveDatetime',
|
||||
'AllowInfNan',
|
||||
'EncoderProtocol',
|
||||
'EncodedBytes',
|
||||
'EncodedStr',
|
||||
'Base64Encoder',
|
||||
'Base64Bytes',
|
||||
'Base64Str',
|
||||
'Base64UrlBytes',
|
||||
'Base64UrlStr',
|
||||
'GetPydanticSchema',
|
||||
'Tag',
|
||||
'Discriminator',
|
||||
'JsonValue',
|
||||
# type_adapter
|
||||
'TypeAdapter',
|
||||
# version
|
||||
'__version__',
|
||||
'VERSION',
|
||||
# warnings
|
||||
'PydanticDeprecatedSince20',
|
||||
'PydanticDeprecationWarning',
|
||||
# annotated handlers
|
||||
'GetCoreSchemaHandler',
|
||||
'GetJsonSchemaHandler',
|
||||
# generate schema from ._internal
|
||||
'GenerateSchema',
|
||||
# pydantic_core
|
||||
'ValidationError',
|
||||
'ValidationInfo',
|
||||
'SerializationInfo',
|
||||
'ValidatorFunctionWrapHandler',
|
||||
'FieldSerializationInfo',
|
||||
'SerializerFunctionWrapHandler',
|
||||
)
|
||||
|
||||
# A mapping of {<member name>: (package, <module name>)} defining dynamic imports
|
||||
_dynamic_imports: 'dict[str, tuple[str, str]]' = {
|
||||
'dataclasses': (__package__, '__module__'),
|
||||
# functional validators
|
||||
'field_validator': (__package__, '.functional_validators'),
|
||||
'model_validator': (__package__, '.functional_validators'),
|
||||
'AfterValidator': (__package__, '.functional_validators'),
|
||||
'BeforeValidator': (__package__, '.functional_validators'),
|
||||
'PlainValidator': (__package__, '.functional_validators'),
|
||||
'WrapValidator': (__package__, '.functional_validators'),
|
||||
'SkipValidation': (__package__, '.functional_validators'),
|
||||
'InstanceOf': (__package__, '.functional_validators'),
|
||||
# JSON Schema
|
||||
'WithJsonSchema': (__package__, '.json_schema'),
|
||||
# functional serializers
|
||||
'field_serializer': (__package__, '.functional_serializers'),
|
||||
'model_serializer': (__package__, '.functional_serializers'),
|
||||
'PlainSerializer': (__package__, '.functional_serializers'),
|
||||
'SerializeAsAny': (__package__, '.functional_serializers'),
|
||||
'WrapSerializer': (__package__, '.functional_serializers'),
|
||||
# config
|
||||
'ConfigDict': (__package__, '.config'),
|
||||
# validate call
|
||||
'validate_call': (__package__, '.validate_call_decorator'),
|
||||
# errors
|
||||
'PydanticErrorCodes': (__package__, '.errors'),
|
||||
'PydanticUserError': (__package__, '.errors'),
|
||||
'PydanticSchemaGenerationError': (__package__, '.errors'),
|
||||
'PydanticImportError': (__package__, '.errors'),
|
||||
'PydanticUndefinedAnnotation': (__package__, '.errors'),
|
||||
'PydanticInvalidForJsonSchema': (__package__, '.errors'),
|
||||
# fields
|
||||
'AliasPath': (__package__, '.fields'),
|
||||
'AliasChoices': (__package__, '.fields'),
|
||||
'Field': (__package__, '.fields'),
|
||||
'computed_field': (__package__, '.fields'),
|
||||
'PrivateAttr': (__package__, '.fields'),
|
||||
# main
|
||||
'BaseModel': (__package__, '.main'),
|
||||
'create_model': (__package__, '.main'),
|
||||
# network
|
||||
'AnyUrl': (__package__, '.networks'),
|
||||
'AnyHttpUrl': (__package__, '.networks'),
|
||||
'FileUrl': (__package__, '.networks'),
|
||||
'HttpUrl': (__package__, '.networks'),
|
||||
'UrlConstraints': (__package__, '.networks'),
|
||||
'EmailStr': (__package__, '.networks'),
|
||||
'NameEmail': (__package__, '.networks'),
|
||||
'IPvAnyAddress': (__package__, '.networks'),
|
||||
'IPvAnyInterface': (__package__, '.networks'),
|
||||
'IPvAnyNetwork': (__package__, '.networks'),
|
||||
'PostgresDsn': (__package__, '.networks'),
|
||||
'CockroachDsn': (__package__, '.networks'),
|
||||
'AmqpDsn': (__package__, '.networks'),
|
||||
'RedisDsn': (__package__, '.networks'),
|
||||
'MongoDsn': (__package__, '.networks'),
|
||||
'KafkaDsn': (__package__, '.networks'),
|
||||
'MySQLDsn': (__package__, '.networks'),
|
||||
'MariaDBDsn': (__package__, '.networks'),
|
||||
'validate_email': (__package__, '.networks'),
|
||||
# root_model
|
||||
'RootModel': (__package__, '.root_model'),
|
||||
# types
|
||||
'Strict': (__package__, '.types'),
|
||||
'StrictStr': (__package__, '.types'),
|
||||
'conbytes': (__package__, '.types'),
|
||||
'conlist': (__package__, '.types'),
|
||||
'conset': (__package__, '.types'),
|
||||
'confrozenset': (__package__, '.types'),
|
||||
'constr': (__package__, '.types'),
|
||||
'StringConstraints': (__package__, '.types'),
|
||||
'ImportString': (__package__, '.types'),
|
||||
'conint': (__package__, '.types'),
|
||||
'PositiveInt': (__package__, '.types'),
|
||||
'NegativeInt': (__package__, '.types'),
|
||||
'NonNegativeInt': (__package__, '.types'),
|
||||
'NonPositiveInt': (__package__, '.types'),
|
||||
'confloat': (__package__, '.types'),
|
||||
'PositiveFloat': (__package__, '.types'),
|
||||
'NegativeFloat': (__package__, '.types'),
|
||||
'NonNegativeFloat': (__package__, '.types'),
|
||||
'NonPositiveFloat': (__package__, '.types'),
|
||||
'FiniteFloat': (__package__, '.types'),
|
||||
'condecimal': (__package__, '.types'),
|
||||
'condate': (__package__, '.types'),
|
||||
'UUID1': (__package__, '.types'),
|
||||
'UUID3': (__package__, '.types'),
|
||||
'UUID4': (__package__, '.types'),
|
||||
'UUID5': (__package__, '.types'),
|
||||
'FilePath': (__package__, '.types'),
|
||||
'DirectoryPath': (__package__, '.types'),
|
||||
'NewPath': (__package__, '.types'),
|
||||
'Json': (__package__, '.types'),
|
||||
'SecretStr': (__package__, '.types'),
|
||||
'SecretBytes': (__package__, '.types'),
|
||||
'StrictBool': (__package__, '.types'),
|
||||
'StrictBytes': (__package__, '.types'),
|
||||
'StrictInt': (__package__, '.types'),
|
||||
'StrictFloat': (__package__, '.types'),
|
||||
'PaymentCardNumber': (__package__, '.types'),
|
||||
'ByteSize': (__package__, '.types'),
|
||||
'PastDate': (__package__, '.types'),
|
||||
'FutureDate': (__package__, '.types'),
|
||||
'PastDatetime': (__package__, '.types'),
|
||||
'FutureDatetime': (__package__, '.types'),
|
||||
'AwareDatetime': (__package__, '.types'),
|
||||
'NaiveDatetime': (__package__, '.types'),
|
||||
'AllowInfNan': (__package__, '.types'),
|
||||
'EncoderProtocol': (__package__, '.types'),
|
||||
'EncodedBytes': (__package__, '.types'),
|
||||
'EncodedStr': (__package__, '.types'),
|
||||
'Base64Encoder': (__package__, '.types'),
|
||||
'Base64Bytes': (__package__, '.types'),
|
||||
'Base64Str': (__package__, '.types'),
|
||||
'Base64UrlBytes': (__package__, '.types'),
|
||||
'Base64UrlStr': (__package__, '.types'),
|
||||
'GetPydanticSchema': (__package__, '.types'),
|
||||
'Tag': (__package__, '.types'),
|
||||
'Discriminator': (__package__, '.types'),
|
||||
'JsonValue': (__package__, '.types'),
|
||||
# type_adapter
|
||||
'TypeAdapter': (__package__, '.type_adapter'),
|
||||
# warnings
|
||||
'PydanticDeprecatedSince20': (__package__, '.warnings'),
|
||||
'PydanticDeprecationWarning': (__package__, '.warnings'),
|
||||
# annotated handlers
|
||||
'GetCoreSchemaHandler': (__package__, '.annotated_handlers'),
|
||||
'GetJsonSchemaHandler': (__package__, '.annotated_handlers'),
|
||||
# generate schema from ._internal
|
||||
'GenerateSchema': (__package__, '._internal._generate_schema'),
|
||||
# pydantic_core stuff
|
||||
'ValidationError': ('pydantic_core', '.'),
|
||||
'ValidationInfo': ('pydantic_core', '.core_schema'),
|
||||
'SerializationInfo': ('pydantic_core', '.core_schema'),
|
||||
'ValidatorFunctionWrapHandler': ('pydantic_core', '.core_schema'),
|
||||
'FieldSerializationInfo': ('pydantic_core', '.core_schema'),
|
||||
'SerializerFunctionWrapHandler': ('pydantic_core', '.core_schema'),
|
||||
# deprecated, mostly not included in __all__
|
||||
'root_validator': (__package__, '.deprecated.class_validators'),
|
||||
'validator': (__package__, '.deprecated.class_validators'),
|
||||
'BaseConfig': (__package__, '.deprecated.config'),
|
||||
'Extra': (__package__, '.deprecated.config'),
|
||||
'parse_obj_as': (__package__, '.deprecated.tools'),
|
||||
'schema_of': (__package__, '.deprecated.tools'),
|
||||
'schema_json_of': (__package__, '.deprecated.tools'),
|
||||
'FieldValidationInfo': ('pydantic_core', '.core_schema'),
|
||||
}
|
||||
|
||||
_getattr_migration = getattr_migration(__name__)
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> object:
|
||||
dynamic_attr = _dynamic_imports.get(attr_name)
|
||||
if dynamic_attr is None:
|
||||
return _getattr_migration(attr_name)
|
||||
|
||||
package, module_name = dynamic_attr
|
||||
|
||||
from importlib import import_module
|
||||
|
||||
if module_name == '__module__':
|
||||
return import_module(f'.{attr_name}', package=package)
|
||||
else:
|
||||
module = import_module(module_name, package=package)
|
||||
return getattr(module, attr_name)
|
||||
|
||||
|
||||
def __dir__() -> 'list[str]':
|
||||
return list(__all__)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,318 @@
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
cast,
|
||||
)
|
||||
|
||||
from pydantic_core import core_schema
|
||||
from typing_extensions import (
|
||||
Literal,
|
||||
Self,
|
||||
)
|
||||
|
||||
from ..config import ConfigDict, ExtraValues, JsonDict, JsonEncoder, JsonSchemaExtraCallable
|
||||
from ..errors import PydanticUserError
|
||||
from ..warnings import PydanticDeprecatedSince20
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
||||
# and https://youtrack.jetbrains.com/issue/PY-51428
|
||||
DeprecationWarning = PydanticDeprecatedSince20
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .._internal._schema_generation_shared import GenerateSchema
|
||||
|
||||
DEPRECATION_MESSAGE = 'Support for class-based `config` is deprecated, use ConfigDict instead.'
|
||||
|
||||
|
||||
class ConfigWrapper:
|
||||
"""Internal wrapper for Config which exposes ConfigDict items as attributes."""
|
||||
|
||||
__slots__ = ('config_dict',)
|
||||
|
||||
config_dict: ConfigDict
|
||||
|
||||
# all annotations are copied directly from ConfigDict, and should be kept up to date, a test will fail if they
|
||||
# stop matching
|
||||
title: str | None
|
||||
str_to_lower: bool
|
||||
str_to_upper: bool
|
||||
str_strip_whitespace: bool
|
||||
str_min_length: int
|
||||
str_max_length: int | None
|
||||
extra: ExtraValues | None
|
||||
frozen: bool
|
||||
populate_by_name: bool
|
||||
use_enum_values: bool
|
||||
validate_assignment: bool
|
||||
arbitrary_types_allowed: bool
|
||||
from_attributes: bool
|
||||
# whether to use the actual key provided in the data (e.g. alias or first alias for "field required" errors) instead of field_names
|
||||
# to construct error `loc`s, default `True`
|
||||
loc_by_alias: bool
|
||||
alias_generator: Callable[[str], str] | None
|
||||
ignored_types: tuple[type, ...]
|
||||
allow_inf_nan: bool
|
||||
json_schema_extra: JsonDict | JsonSchemaExtraCallable | None
|
||||
json_encoders: dict[type[object], JsonEncoder] | None
|
||||
|
||||
# new in V2
|
||||
strict: bool
|
||||
# whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never'
|
||||
revalidate_instances: Literal['always', 'never', 'subclass-instances']
|
||||
ser_json_timedelta: Literal['iso8601', 'float']
|
||||
ser_json_bytes: Literal['utf8', 'base64']
|
||||
# whether to validate default values during validation, default False
|
||||
validate_default: bool
|
||||
validate_return: bool
|
||||
protected_namespaces: tuple[str, ...]
|
||||
hide_input_in_errors: bool
|
||||
defer_build: bool
|
||||
plugin_settings: dict[str, object] | None
|
||||
schema_generator: type[GenerateSchema] | None
|
||||
json_schema_serialization_defaults_required: bool
|
||||
json_schema_mode_override: Literal['validation', 'serialization', None]
|
||||
coerce_numbers_to_str: bool
|
||||
regex_engine: Literal['rust-regex', 'python-re']
|
||||
validation_error_cause: bool
|
||||
|
||||
def __init__(self, config: ConfigDict | dict[str, Any] | type[Any] | None, *, check: bool = True):
|
||||
if check:
|
||||
self.config_dict = prepare_config(config)
|
||||
else:
|
||||
self.config_dict = cast(ConfigDict, config)
|
||||
|
||||
@classmethod
|
||||
def for_model(cls, bases: tuple[type[Any], ...], namespace: dict[str, Any], kwargs: dict[str, Any]) -> Self:
|
||||
"""Build a new `ConfigWrapper` instance for a `BaseModel`.
|
||||
|
||||
The config wrapper built based on (in descending order of priority):
|
||||
- options from `kwargs`
|
||||
- options from the `namespace`
|
||||
- options from the base classes (`bases`)
|
||||
|
||||
Args:
|
||||
bases: A tuple of base classes.
|
||||
namespace: The namespace of the class being created.
|
||||
kwargs: The kwargs passed to the class being created.
|
||||
|
||||
Returns:
|
||||
A `ConfigWrapper` instance for `BaseModel`.
|
||||
"""
|
||||
config_new = ConfigDict()
|
||||
for base in bases:
|
||||
config = getattr(base, 'model_config', None)
|
||||
if config:
|
||||
config_new.update(config.copy())
|
||||
|
||||
config_class_from_namespace = namespace.get('Config')
|
||||
config_dict_from_namespace = namespace.get('model_config')
|
||||
|
||||
if config_class_from_namespace and config_dict_from_namespace:
|
||||
raise PydanticUserError('"Config" and "model_config" cannot be used together', code='config-both')
|
||||
|
||||
config_from_namespace = config_dict_from_namespace or prepare_config(config_class_from_namespace)
|
||||
|
||||
config_new.update(config_from_namespace)
|
||||
|
||||
for k in list(kwargs.keys()):
|
||||
if k in config_keys:
|
||||
config_new[k] = kwargs.pop(k)
|
||||
|
||||
return cls(config_new)
|
||||
|
||||
# we don't show `__getattr__` to type checkers so missing attributes cause errors
|
||||
if not TYPE_CHECKING: # pragma: no branch
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
try:
|
||||
return self.config_dict[name]
|
||||
except KeyError:
|
||||
try:
|
||||
return config_defaults[name]
|
||||
except KeyError:
|
||||
raise AttributeError(f'Config has no attribute {name!r}') from None
|
||||
|
||||
def core_config(self, obj: Any) -> core_schema.CoreConfig:
|
||||
"""Create a pydantic-core config, `obj` is just used to populate `title` if not set in config.
|
||||
|
||||
Pass `obj=None` if you do not want to attempt to infer the `title`.
|
||||
|
||||
We don't use getattr here since we don't want to populate with defaults.
|
||||
|
||||
Args:
|
||||
obj: An object used to populate `title` if not set in config.
|
||||
|
||||
Returns:
|
||||
A `CoreConfig` object created from config.
|
||||
"""
|
||||
|
||||
def dict_not_none(**kwargs: Any) -> Any:
|
||||
return {k: v for k, v in kwargs.items() if v is not None}
|
||||
|
||||
core_config = core_schema.CoreConfig(
|
||||
**dict_not_none(
|
||||
title=self.config_dict.get('title') or (obj and obj.__name__),
|
||||
extra_fields_behavior=self.config_dict.get('extra'),
|
||||
allow_inf_nan=self.config_dict.get('allow_inf_nan'),
|
||||
populate_by_name=self.config_dict.get('populate_by_name'),
|
||||
str_strip_whitespace=self.config_dict.get('str_strip_whitespace'),
|
||||
str_to_lower=self.config_dict.get('str_to_lower'),
|
||||
str_to_upper=self.config_dict.get('str_to_upper'),
|
||||
strict=self.config_dict.get('strict'),
|
||||
ser_json_timedelta=self.config_dict.get('ser_json_timedelta'),
|
||||
ser_json_bytes=self.config_dict.get('ser_json_bytes'),
|
||||
from_attributes=self.config_dict.get('from_attributes'),
|
||||
loc_by_alias=self.config_dict.get('loc_by_alias'),
|
||||
revalidate_instances=self.config_dict.get('revalidate_instances'),
|
||||
validate_default=self.config_dict.get('validate_default'),
|
||||
str_max_length=self.config_dict.get('str_max_length'),
|
||||
str_min_length=self.config_dict.get('str_min_length'),
|
||||
hide_input_in_errors=self.config_dict.get('hide_input_in_errors'),
|
||||
coerce_numbers_to_str=self.config_dict.get('coerce_numbers_to_str'),
|
||||
regex_engine=self.config_dict.get('regex_engine'),
|
||||
validation_error_cause=self.config_dict.get('validation_error_cause'),
|
||||
)
|
||||
)
|
||||
return core_config
|
||||
|
||||
def __repr__(self):
|
||||
c = ', '.join(f'{k}={v!r}' for k, v in self.config_dict.items())
|
||||
return f'ConfigWrapper({c})'
|
||||
|
||||
|
||||
class ConfigWrapperStack:
|
||||
"""A stack of `ConfigWrapper` instances."""
|
||||
|
||||
def __init__(self, config_wrapper: ConfigWrapper):
|
||||
self._config_wrapper_stack: list[ConfigWrapper] = [config_wrapper]
|
||||
|
||||
@property
|
||||
def tail(self) -> ConfigWrapper:
|
||||
return self._config_wrapper_stack[-1]
|
||||
|
||||
@contextmanager
|
||||
def push(self, config_wrapper: ConfigWrapper | ConfigDict | None):
|
||||
if config_wrapper is None:
|
||||
yield
|
||||
return
|
||||
|
||||
if not isinstance(config_wrapper, ConfigWrapper):
|
||||
config_wrapper = ConfigWrapper(config_wrapper, check=False)
|
||||
|
||||
self._config_wrapper_stack.append(config_wrapper)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._config_wrapper_stack.pop()
|
||||
|
||||
|
||||
config_defaults = ConfigDict(
|
||||
title=None,
|
||||
str_to_lower=False,
|
||||
str_to_upper=False,
|
||||
str_strip_whitespace=False,
|
||||
str_min_length=0,
|
||||
str_max_length=None,
|
||||
# let the model / dataclass decide how to handle it
|
||||
extra=None,
|
||||
frozen=False,
|
||||
populate_by_name=False,
|
||||
use_enum_values=False,
|
||||
validate_assignment=False,
|
||||
arbitrary_types_allowed=False,
|
||||
from_attributes=False,
|
||||
loc_by_alias=True,
|
||||
alias_generator=None,
|
||||
ignored_types=(),
|
||||
allow_inf_nan=True,
|
||||
json_schema_extra=None,
|
||||
strict=False,
|
||||
revalidate_instances='never',
|
||||
ser_json_timedelta='iso8601',
|
||||
ser_json_bytes='utf8',
|
||||
validate_default=False,
|
||||
validate_return=False,
|
||||
protected_namespaces=('model_',),
|
||||
hide_input_in_errors=False,
|
||||
json_encoders=None,
|
||||
defer_build=False,
|
||||
plugin_settings=None,
|
||||
schema_generator=None,
|
||||
json_schema_serialization_defaults_required=False,
|
||||
json_schema_mode_override=None,
|
||||
coerce_numbers_to_str=False,
|
||||
regex_engine='rust-regex',
|
||||
validation_error_cause=False,
|
||||
)
|
||||
|
||||
|
||||
def prepare_config(config: ConfigDict | dict[str, Any] | type[Any] | None) -> ConfigDict:
|
||||
"""Create a `ConfigDict` instance from an existing dict, a class (e.g. old class-based config) or None.
|
||||
|
||||
Args:
|
||||
config: The input config.
|
||||
|
||||
Returns:
|
||||
A ConfigDict object created from config.
|
||||
"""
|
||||
if config is None:
|
||||
return ConfigDict()
|
||||
|
||||
if not isinstance(config, dict):
|
||||
warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning)
|
||||
config = {k: getattr(config, k) for k in dir(config) if not k.startswith('__')}
|
||||
|
||||
config_dict = cast(ConfigDict, config)
|
||||
check_deprecated(config_dict)
|
||||
return config_dict
|
||||
|
||||
|
||||
config_keys = set(ConfigDict.__annotations__.keys())
|
||||
|
||||
|
||||
V2_REMOVED_KEYS = {
|
||||
'allow_mutation',
|
||||
'error_msg_templates',
|
||||
'fields',
|
||||
'getter_dict',
|
||||
'smart_union',
|
||||
'underscore_attrs_are_private',
|
||||
'json_loads',
|
||||
'json_dumps',
|
||||
'copy_on_model_validation',
|
||||
'post_init_call',
|
||||
}
|
||||
V2_RENAMED_KEYS = {
|
||||
'allow_population_by_field_name': 'populate_by_name',
|
||||
'anystr_lower': 'str_to_lower',
|
||||
'anystr_strip_whitespace': 'str_strip_whitespace',
|
||||
'anystr_upper': 'str_to_upper',
|
||||
'keep_untouched': 'ignored_types',
|
||||
'max_anystr_length': 'str_max_length',
|
||||
'min_anystr_length': 'str_min_length',
|
||||
'orm_mode': 'from_attributes',
|
||||
'schema_extra': 'json_schema_extra',
|
||||
'validate_all': 'validate_default',
|
||||
}
|
||||
|
||||
|
||||
def check_deprecated(config_dict: ConfigDict) -> None:
|
||||
"""Check for deprecated config keys and warn the user.
|
||||
|
||||
Args:
|
||||
config_dict: The input config.
|
||||
"""
|
||||
deprecated_removed_keys = V2_REMOVED_KEYS & config_dict.keys()
|
||||
deprecated_renamed_keys = V2_RENAMED_KEYS.keys() & config_dict.keys()
|
||||
if deprecated_removed_keys or deprecated_renamed_keys:
|
||||
renamings = {k: V2_RENAMED_KEYS[k] for k in sorted(deprecated_renamed_keys)}
|
||||
renamed_bullets = [f'* {k!r} has been renamed to {v!r}' for k, v in renamings.items()]
|
||||
removed_bullets = [f'* {k!r} has been removed' for k in sorted(deprecated_removed_keys)]
|
||||
message = '\n'.join(['Valid config keys have changed in V2:'] + renamed_bullets + removed_bullets)
|
||||
warnings.warn(message, UserWarning)
|
||||
@@ -0,0 +1,92 @@
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import typing
|
||||
from typing import Any
|
||||
|
||||
import typing_extensions
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ._schema_generation_shared import (
|
||||
CoreSchemaOrField as CoreSchemaOrField,
|
||||
)
|
||||
from ._schema_generation_shared import (
|
||||
GetJsonSchemaFunction,
|
||||
)
|
||||
|
||||
|
||||
class CoreMetadata(typing_extensions.TypedDict, total=False):
|
||||
"""A `TypedDict` for holding the metadata dict of the schema.
|
||||
|
||||
Attributes:
|
||||
pydantic_js_functions: List of JSON schema functions.
|
||||
pydantic_js_prefer_positional_arguments: Whether JSON schema generator will
|
||||
prefer positional over keyword arguments for an 'arguments' schema.
|
||||
"""
|
||||
|
||||
pydantic_js_functions: list[GetJsonSchemaFunction]
|
||||
pydantic_js_annotation_functions: list[GetJsonSchemaFunction]
|
||||
|
||||
# If `pydantic_js_prefer_positional_arguments` is True, the JSON schema generator will
|
||||
# prefer positional over keyword arguments for an 'arguments' schema.
|
||||
pydantic_js_prefer_positional_arguments: bool | None
|
||||
|
||||
pydantic_typed_dict_cls: type[Any] | None # TODO: Consider moving this into the pydantic-core TypedDictSchema
|
||||
|
||||
|
||||
class CoreMetadataHandler:
|
||||
"""Because the metadata field in pydantic_core is of type `Any`, we can't assume much about its contents.
|
||||
|
||||
This class is used to interact with the metadata field on a CoreSchema object in a consistent
|
||||
way throughout pydantic.
|
||||
"""
|
||||
|
||||
__slots__ = ('_schema',)
|
||||
|
||||
def __init__(self, schema: CoreSchemaOrField):
|
||||
self._schema = schema
|
||||
|
||||
metadata = schema.get('metadata')
|
||||
if metadata is None:
|
||||
schema['metadata'] = CoreMetadata()
|
||||
elif not isinstance(metadata, dict):
|
||||
raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')
|
||||
|
||||
@property
|
||||
def metadata(self) -> CoreMetadata:
|
||||
"""Retrieves the metadata dict from the schema, initializing it to a dict if it is None
|
||||
and raises an error if it is not a dict.
|
||||
"""
|
||||
metadata = self._schema.get('metadata')
|
||||
if metadata is None:
|
||||
self._schema['metadata'] = metadata = CoreMetadata()
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')
|
||||
return metadata
|
||||
|
||||
|
||||
def build_metadata_dict(
|
||||
*, # force keyword arguments to make it easier to modify this signature in a backwards-compatible way
|
||||
js_functions: list[GetJsonSchemaFunction] | None = None,
|
||||
js_annotation_functions: list[GetJsonSchemaFunction] | None = None,
|
||||
js_prefer_positional_arguments: bool | None = None,
|
||||
typed_dict_cls: type[Any] | None = None,
|
||||
initial_metadata: Any | None = None,
|
||||
) -> Any:
|
||||
"""Builds a dict to use as the metadata field of a CoreSchema object in a manner that is consistent
|
||||
with the CoreMetadataHandler class.
|
||||
"""
|
||||
if initial_metadata is not None and not isinstance(initial_metadata, dict):
|
||||
raise TypeError(f'CoreSchema metadata should be a dict; got {initial_metadata!r}.')
|
||||
|
||||
metadata = CoreMetadata(
|
||||
pydantic_js_functions=js_functions or [],
|
||||
pydantic_js_annotation_functions=js_annotation_functions or [],
|
||||
pydantic_js_prefer_positional_arguments=js_prefer_positional_arguments,
|
||||
pydantic_typed_dict_cls=typed_dict_cls,
|
||||
)
|
||||
metadata = {k: v for k, v in metadata.items() if v is not None}
|
||||
|
||||
if initial_metadata is not None:
|
||||
metadata = {**initial_metadata, **metadata}
|
||||
|
||||
return metadata
|
||||
@@ -0,0 +1,585 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Hashable,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from pydantic_core import CoreSchema, core_schema
|
||||
from pydantic_core import validate_core_schema as _validate_core_schema
|
||||
from typing_extensions import TypeAliasType, TypeGuard, get_args, get_origin
|
||||
|
||||
from . import _repr
|
||||
from ._typing_extra import is_generic_alias
|
||||
|
||||
AnyFunctionSchema = Union[
|
||||
core_schema.AfterValidatorFunctionSchema,
|
||||
core_schema.BeforeValidatorFunctionSchema,
|
||||
core_schema.WrapValidatorFunctionSchema,
|
||||
core_schema.PlainValidatorFunctionSchema,
|
||||
]
|
||||
|
||||
|
||||
FunctionSchemaWithInnerSchema = Union[
|
||||
core_schema.AfterValidatorFunctionSchema,
|
||||
core_schema.BeforeValidatorFunctionSchema,
|
||||
core_schema.WrapValidatorFunctionSchema,
|
||||
]
|
||||
|
||||
CoreSchemaField = Union[
|
||||
core_schema.ModelField, core_schema.DataclassField, core_schema.TypedDictField, core_schema.ComputedField
|
||||
]
|
||||
CoreSchemaOrField = Union[core_schema.CoreSchema, CoreSchemaField]
|
||||
|
||||
_CORE_SCHEMA_FIELD_TYPES = {'typed-dict-field', 'dataclass-field', 'model-field', 'computed-field'}
|
||||
_FUNCTION_WITH_INNER_SCHEMA_TYPES = {'function-before', 'function-after', 'function-wrap'}
|
||||
_LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES = {'list', 'tuple-variable', 'set', 'frozenset'}
|
||||
|
||||
_DEFINITIONS_CACHE_METADATA_KEY = 'pydantic.definitions_cache'
|
||||
|
||||
NEEDS_APPLY_DISCRIMINATED_UNION_METADATA_KEY = 'pydantic.internal.needs_apply_discriminated_union'
|
||||
"""Used to mark a schema that has a discriminated union that needs to be checked for validity at the end of
|
||||
schema building because one of it's members refers to a definition that was not yet defined when the union
|
||||
was first encountered.
|
||||
"""
|
||||
TAGGED_UNION_TAG_KEY = 'pydantic.internal.tagged_union_tag'
|
||||
"""
|
||||
Used in a `Tag` schema to specify the tag used for a discriminated union.
|
||||
"""
|
||||
HAS_INVALID_SCHEMAS_METADATA_KEY = 'pydantic.internal.invalid'
|
||||
"""Used to mark a schema that is invalid because it refers to a definition that was not yet defined when the
|
||||
schema was first encountered.
|
||||
"""
|
||||
|
||||
|
||||
def is_core_schema(
|
||||
schema: CoreSchemaOrField,
|
||||
) -> TypeGuard[CoreSchema]:
|
||||
return schema['type'] not in _CORE_SCHEMA_FIELD_TYPES
|
||||
|
||||
|
||||
def is_core_schema_field(
|
||||
schema: CoreSchemaOrField,
|
||||
) -> TypeGuard[CoreSchemaField]:
|
||||
return schema['type'] in _CORE_SCHEMA_FIELD_TYPES
|
||||
|
||||
|
||||
def is_function_with_inner_schema(
|
||||
schema: CoreSchemaOrField,
|
||||
) -> TypeGuard[FunctionSchemaWithInnerSchema]:
|
||||
return schema['type'] in _FUNCTION_WITH_INNER_SCHEMA_TYPES
|
||||
|
||||
|
||||
def is_list_like_schema_with_items_schema(
|
||||
schema: CoreSchema,
|
||||
) -> TypeGuard[
|
||||
core_schema.ListSchema | core_schema.TupleVariableSchema | core_schema.SetSchema | core_schema.FrozenSetSchema
|
||||
]:
|
||||
return schema['type'] in _LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES
|
||||
|
||||
|
||||
def get_type_ref(type_: type[Any], args_override: tuple[type[Any], ...] | None = None) -> str:
|
||||
"""Produces the ref to be used for this type by pydantic_core's core schemas.
|
||||
|
||||
This `args_override` argument was added for the purpose of creating valid recursive references
|
||||
when creating generic models without needing to create a concrete class.
|
||||
"""
|
||||
origin = get_origin(type_) or type_
|
||||
|
||||
args = get_args(type_) if is_generic_alias(type_) else (args_override or ())
|
||||
generic_metadata = getattr(type_, '__pydantic_generic_metadata__', None)
|
||||
if generic_metadata:
|
||||
origin = generic_metadata['origin'] or origin
|
||||
args = generic_metadata['args'] or args
|
||||
|
||||
module_name = getattr(origin, '__module__', '<No __module__>')
|
||||
if isinstance(origin, TypeAliasType):
|
||||
type_ref = f'{module_name}.{origin.__name__}:{id(origin)}'
|
||||
else:
|
||||
try:
|
||||
qualname = getattr(origin, '__qualname__', f'<No __qualname__: {origin}>')
|
||||
except Exception:
|
||||
qualname = getattr(origin, '__qualname__', '<No __qualname__>')
|
||||
type_ref = f'{module_name}.{qualname}:{id(origin)}'
|
||||
|
||||
arg_refs: list[str] = []
|
||||
for arg in args:
|
||||
if isinstance(arg, str):
|
||||
# Handle string literals as a special case; we may be able to remove this special handling if we
|
||||
# wrap them in a ForwardRef at some point.
|
||||
arg_ref = f'{arg}:str-{id(arg)}'
|
||||
else:
|
||||
arg_ref = f'{_repr.display_as_type(arg)}:{id(arg)}'
|
||||
arg_refs.append(arg_ref)
|
||||
if arg_refs:
|
||||
type_ref = f'{type_ref}[{",".join(arg_refs)}]'
|
||||
return type_ref
|
||||
|
||||
|
||||
def get_ref(s: core_schema.CoreSchema) -> None | str:
|
||||
"""Get the ref from the schema if it has one.
|
||||
This exists just for type checking to work correctly.
|
||||
"""
|
||||
return s.get('ref', None)
|
||||
|
||||
|
||||
def collect_definitions(schema: core_schema.CoreSchema) -> dict[str, core_schema.CoreSchema]:
|
||||
defs: dict[str, CoreSchema] = {}
|
||||
|
||||
def _record_valid_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
ref = get_ref(s)
|
||||
if ref:
|
||||
defs[ref] = s
|
||||
return recurse(s, _record_valid_refs)
|
||||
|
||||
walk_core_schema(schema, _record_valid_refs)
|
||||
|
||||
return defs
|
||||
|
||||
|
||||
def define_expected_missing_refs(
|
||||
schema: core_schema.CoreSchema, allowed_missing_refs: set[str]
|
||||
) -> core_schema.CoreSchema | None:
|
||||
if not allowed_missing_refs:
|
||||
# in this case, there are no missing refs to potentially substitute, so there's no need to walk the schema
|
||||
# this is a common case (will be hit for all non-generic models), so it's worth optimizing for
|
||||
return None
|
||||
|
||||
refs = collect_definitions(schema).keys()
|
||||
|
||||
expected_missing_refs = allowed_missing_refs.difference(refs)
|
||||
if expected_missing_refs:
|
||||
definitions: list[core_schema.CoreSchema] = [
|
||||
# TODO: Replace this with a (new) CoreSchema that, if present at any level, makes validation fail
|
||||
# Issue: https://github.com/pydantic/pydantic-core/issues/619
|
||||
core_schema.none_schema(ref=ref, metadata={HAS_INVALID_SCHEMAS_METADATA_KEY: True})
|
||||
for ref in expected_missing_refs
|
||||
]
|
||||
return core_schema.definitions_schema(schema, definitions)
|
||||
return None
|
||||
|
||||
|
||||
def collect_invalid_schemas(schema: core_schema.CoreSchema) -> bool:
|
||||
invalid = False
|
||||
|
||||
def _is_schema_valid(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
nonlocal invalid
|
||||
if 'metadata' in s:
|
||||
metadata = s['metadata']
|
||||
if HAS_INVALID_SCHEMAS_METADATA_KEY in metadata:
|
||||
invalid = metadata[HAS_INVALID_SCHEMAS_METADATA_KEY]
|
||||
return s
|
||||
return recurse(s, _is_schema_valid)
|
||||
|
||||
walk_core_schema(schema, _is_schema_valid)
|
||||
return invalid
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
Recurse = Callable[[core_schema.CoreSchema, 'Walk'], core_schema.CoreSchema]
|
||||
Walk = Callable[[core_schema.CoreSchema, Recurse], core_schema.CoreSchema]
|
||||
|
||||
# TODO: Should we move _WalkCoreSchema into pydantic_core proper?
|
||||
# Issue: https://github.com/pydantic/pydantic-core/issues/615
|
||||
|
||||
|
||||
class _WalkCoreSchema:
|
||||
def __init__(self):
|
||||
self._schema_type_to_method = self._build_schema_type_to_method()
|
||||
|
||||
def _build_schema_type_to_method(self) -> dict[core_schema.CoreSchemaType, Recurse]:
|
||||
mapping: dict[core_schema.CoreSchemaType, Recurse] = {}
|
||||
key: core_schema.CoreSchemaType
|
||||
for key in get_args(core_schema.CoreSchemaType):
|
||||
method_name = f"handle_{key.replace('-', '_')}_schema"
|
||||
mapping[key] = getattr(self, method_name, self._handle_other_schemas)
|
||||
return mapping
|
||||
|
||||
def walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
return f(schema, self._walk)
|
||||
|
||||
def _walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema = self._schema_type_to_method[schema['type']](schema.copy(), f)
|
||||
ser_schema: core_schema.SerSchema | None = schema.get('serialization') # type: ignore
|
||||
if ser_schema:
|
||||
schema['serialization'] = self._handle_ser_schemas(ser_schema, f)
|
||||
return schema
|
||||
|
||||
def _handle_other_schemas(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
sub_schema = schema.get('schema', None)
|
||||
if sub_schema is not None:
|
||||
schema['schema'] = self.walk(sub_schema, f) # type: ignore
|
||||
return schema
|
||||
|
||||
def _handle_ser_schemas(self, ser_schema: core_schema.SerSchema, f: Walk) -> core_schema.SerSchema:
|
||||
schema: core_schema.CoreSchema | None = ser_schema.get('schema', None)
|
||||
if schema is not None:
|
||||
ser_schema['schema'] = self.walk(schema, f) # type: ignore
|
||||
return_schema: core_schema.CoreSchema | None = ser_schema.get('return_schema', None)
|
||||
if return_schema is not None:
|
||||
ser_schema['return_schema'] = self.walk(return_schema, f) # type: ignore
|
||||
return ser_schema
|
||||
|
||||
def handle_definitions_schema(self, schema: core_schema.DefinitionsSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
new_definitions: list[core_schema.CoreSchema] = []
|
||||
for definition in schema['definitions']:
|
||||
updated_definition = self.walk(definition, f)
|
||||
if 'ref' in updated_definition:
|
||||
# If the updated definition schema doesn't have a 'ref', it shouldn't go in the definitions
|
||||
# This is most likely to happen due to replacing something with a definition reference, in
|
||||
# which case it should certainly not go in the definitions list
|
||||
new_definitions.append(updated_definition)
|
||||
new_inner_schema = self.walk(schema['schema'], f)
|
||||
|
||||
if not new_definitions and len(schema) == 3:
|
||||
# This means we'd be returning a "trivial" definitions schema that just wrapped the inner schema
|
||||
return new_inner_schema
|
||||
|
||||
new_schema = schema.copy()
|
||||
new_schema['schema'] = new_inner_schema
|
||||
new_schema['definitions'] = new_definitions
|
||||
return new_schema
|
||||
|
||||
def handle_list_schema(self, schema: core_schema.ListSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_set_schema(self, schema: core_schema.SetSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_frozenset_schema(self, schema: core_schema.FrozenSetSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_generator_schema(self, schema: core_schema.GeneratorSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_tuple_variable_schema(
|
||||
self, schema: core_schema.TupleVariableSchema | core_schema.TuplePositionalSchema, f: Walk
|
||||
) -> core_schema.CoreSchema:
|
||||
schema = cast(core_schema.TupleVariableSchema, schema)
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_tuple_positional_schema(
|
||||
self, schema: core_schema.TupleVariableSchema | core_schema.TuplePositionalSchema, f: Walk
|
||||
) -> core_schema.CoreSchema:
|
||||
schema = cast(core_schema.TuplePositionalSchema, schema)
|
||||
schema['items_schema'] = [self.walk(v, f) for v in schema['items_schema']]
|
||||
extras_schema = schema.get('extras_schema')
|
||||
if extras_schema is not None:
|
||||
schema['extras_schema'] = self.walk(extras_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_dict_schema(self, schema: core_schema.DictSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
keys_schema = schema.get('keys_schema')
|
||||
if keys_schema is not None:
|
||||
schema['keys_schema'] = self.walk(keys_schema, f)
|
||||
values_schema = schema.get('values_schema')
|
||||
if values_schema:
|
||||
schema['values_schema'] = self.walk(values_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_function_schema(self, schema: AnyFunctionSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
if not is_function_with_inner_schema(schema):
|
||||
return schema
|
||||
schema['schema'] = self.walk(schema['schema'], f)
|
||||
return schema
|
||||
|
||||
def handle_union_schema(self, schema: core_schema.UnionSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
new_choices: list[CoreSchema | tuple[CoreSchema, str]] = []
|
||||
for v in schema['choices']:
|
||||
if isinstance(v, tuple):
|
||||
new_choices.append((self.walk(v[0], f), v[1]))
|
||||
else:
|
||||
new_choices.append(self.walk(v, f))
|
||||
schema['choices'] = new_choices
|
||||
return schema
|
||||
|
||||
def handle_tagged_union_schema(self, schema: core_schema.TaggedUnionSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
new_choices: dict[Hashable, core_schema.CoreSchema] = {}
|
||||
for k, v in schema['choices'].items():
|
||||
new_choices[k] = v if isinstance(v, (str, int)) else self.walk(v, f)
|
||||
schema['choices'] = new_choices
|
||||
return schema
|
||||
|
||||
def handle_chain_schema(self, schema: core_schema.ChainSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['steps'] = [self.walk(v, f) for v in schema['steps']]
|
||||
return schema
|
||||
|
||||
def handle_lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['lax_schema'] = self.walk(schema['lax_schema'], f)
|
||||
schema['strict_schema'] = self.walk(schema['strict_schema'], f)
|
||||
return schema
|
||||
|
||||
def handle_json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['json_schema'] = self.walk(schema['json_schema'], f)
|
||||
schema['python_schema'] = self.walk(schema['python_schema'], f)
|
||||
return schema
|
||||
|
||||
def handle_model_fields_schema(self, schema: core_schema.ModelFieldsSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
extras_schema = schema.get('extras_schema')
|
||||
if extras_schema is not None:
|
||||
schema['extras_schema'] = self.walk(extras_schema, f)
|
||||
replaced_fields: dict[str, core_schema.ModelField] = {}
|
||||
replaced_computed_fields: list[core_schema.ComputedField] = []
|
||||
for computed_field in schema.get('computed_fields', ()):
|
||||
replaced_field = computed_field.copy()
|
||||
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
||||
replaced_computed_fields.append(replaced_field)
|
||||
if replaced_computed_fields:
|
||||
schema['computed_fields'] = replaced_computed_fields
|
||||
for k, v in schema['fields'].items():
|
||||
replaced_field = v.copy()
|
||||
replaced_field['schema'] = self.walk(v['schema'], f)
|
||||
replaced_fields[k] = replaced_field
|
||||
schema['fields'] = replaced_fields
|
||||
return schema
|
||||
|
||||
def handle_typed_dict_schema(self, schema: core_schema.TypedDictSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
extras_schema = schema.get('extras_schema')
|
||||
if extras_schema is not None:
|
||||
schema['extras_schema'] = self.walk(extras_schema, f)
|
||||
replaced_computed_fields: list[core_schema.ComputedField] = []
|
||||
for computed_field in schema.get('computed_fields', ()):
|
||||
replaced_field = computed_field.copy()
|
||||
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
||||
replaced_computed_fields.append(replaced_field)
|
||||
if replaced_computed_fields:
|
||||
schema['computed_fields'] = replaced_computed_fields
|
||||
replaced_fields: dict[str, core_schema.TypedDictField] = {}
|
||||
for k, v in schema['fields'].items():
|
||||
replaced_field = v.copy()
|
||||
replaced_field['schema'] = self.walk(v['schema'], f)
|
||||
replaced_fields[k] = replaced_field
|
||||
schema['fields'] = replaced_fields
|
||||
return schema
|
||||
|
||||
def handle_dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
replaced_fields: list[core_schema.DataclassField] = []
|
||||
replaced_computed_fields: list[core_schema.ComputedField] = []
|
||||
for computed_field in schema.get('computed_fields', ()):
|
||||
replaced_field = computed_field.copy()
|
||||
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
||||
replaced_computed_fields.append(replaced_field)
|
||||
if replaced_computed_fields:
|
||||
schema['computed_fields'] = replaced_computed_fields
|
||||
for field in schema['fields']:
|
||||
replaced_field = field.copy()
|
||||
replaced_field['schema'] = self.walk(field['schema'], f)
|
||||
replaced_fields.append(replaced_field)
|
||||
schema['fields'] = replaced_fields
|
||||
return schema
|
||||
|
||||
def handle_arguments_schema(self, schema: core_schema.ArgumentsSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
replaced_arguments_schema: list[core_schema.ArgumentsParameter] = []
|
||||
for param in schema['arguments_schema']:
|
||||
replaced_param = param.copy()
|
||||
replaced_param['schema'] = self.walk(param['schema'], f)
|
||||
replaced_arguments_schema.append(replaced_param)
|
||||
schema['arguments_schema'] = replaced_arguments_schema
|
||||
if 'var_args_schema' in schema:
|
||||
schema['var_args_schema'] = self.walk(schema['var_args_schema'], f)
|
||||
if 'var_kwargs_schema' in schema:
|
||||
schema['var_kwargs_schema'] = self.walk(schema['var_kwargs_schema'], f)
|
||||
return schema
|
||||
|
||||
def handle_call_schema(self, schema: core_schema.CallSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['arguments_schema'] = self.walk(schema['arguments_schema'], f)
|
||||
if 'return_schema' in schema:
|
||||
schema['return_schema'] = self.walk(schema['return_schema'], f)
|
||||
return schema
|
||||
|
||||
|
||||
_dispatch = _WalkCoreSchema().walk
|
||||
|
||||
|
||||
def walk_core_schema(schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
"""Recursively traverse a CoreSchema.
|
||||
|
||||
Args:
|
||||
schema (core_schema.CoreSchema): The CoreSchema to process, it will not be modified.
|
||||
f (Walk): A function to apply. This function takes two arguments:
|
||||
1. The current CoreSchema that is being processed
|
||||
(not the same one you passed into this function, one level down).
|
||||
2. The "next" `f` to call. This lets you for example use `f=functools.partial(some_method, some_context)`
|
||||
to pass data down the recursive calls without using globals or other mutable state.
|
||||
|
||||
Returns:
|
||||
core_schema.CoreSchema: A processed CoreSchema.
|
||||
"""
|
||||
return f(schema.copy(), _dispatch)
|
||||
|
||||
|
||||
def simplify_schema_references(schema: core_schema.CoreSchema) -> core_schema.CoreSchema: # noqa: C901
|
||||
definitions: dict[str, core_schema.CoreSchema] = {}
|
||||
ref_counts: dict[str, int] = defaultdict(int)
|
||||
involved_in_recursion: dict[str, bool] = {}
|
||||
current_recursion_ref_count: dict[str, int] = defaultdict(int)
|
||||
|
||||
def collect_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
if s['type'] == 'definitions':
|
||||
for definition in s['definitions']:
|
||||
ref = get_ref(definition)
|
||||
assert ref is not None
|
||||
if ref not in definitions:
|
||||
definitions[ref] = definition
|
||||
recurse(definition, collect_refs)
|
||||
return recurse(s['schema'], collect_refs)
|
||||
else:
|
||||
ref = get_ref(s)
|
||||
if ref is not None:
|
||||
new = recurse(s, collect_refs)
|
||||
new_ref = get_ref(new)
|
||||
if new_ref:
|
||||
definitions[new_ref] = new
|
||||
return core_schema.definition_reference_schema(schema_ref=ref)
|
||||
else:
|
||||
return recurse(s, collect_refs)
|
||||
|
||||
schema = walk_core_schema(schema, collect_refs)
|
||||
|
||||
def count_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
if s['type'] != 'definition-ref':
|
||||
return recurse(s, count_refs)
|
||||
ref = s['schema_ref']
|
||||
ref_counts[ref] += 1
|
||||
|
||||
if ref_counts[ref] >= 2:
|
||||
# If this model is involved in a recursion this should be detected
|
||||
# on its second encounter, we can safely stop the walk here.
|
||||
if current_recursion_ref_count[ref] != 0:
|
||||
involved_in_recursion[ref] = True
|
||||
return s
|
||||
|
||||
current_recursion_ref_count[ref] += 1
|
||||
recurse(definitions[ref], count_refs)
|
||||
current_recursion_ref_count[ref] -= 1
|
||||
return s
|
||||
|
||||
schema = walk_core_schema(schema, count_refs)
|
||||
|
||||
assert all(c == 0 for c in current_recursion_ref_count.values()), 'this is a bug! please report it'
|
||||
|
||||
def can_be_inlined(s: core_schema.DefinitionReferenceSchema, ref: str) -> bool:
|
||||
if ref_counts[ref] > 1:
|
||||
return False
|
||||
if involved_in_recursion.get(ref, False):
|
||||
return False
|
||||
if 'serialization' in s:
|
||||
return False
|
||||
if 'metadata' in s:
|
||||
metadata = s['metadata']
|
||||
for k in (
|
||||
'pydantic_js_functions',
|
||||
'pydantic_js_annotation_functions',
|
||||
'pydantic.internal.union_discriminator',
|
||||
):
|
||||
if k in metadata:
|
||||
# we need to keep this as a ref
|
||||
return False
|
||||
return True
|
||||
|
||||
def inline_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
if s['type'] == 'definition-ref':
|
||||
ref = s['schema_ref']
|
||||
# Check if the reference is only used once, not involved in recursion and does not have
|
||||
# any extra keys (like 'serialization')
|
||||
if can_be_inlined(s, ref):
|
||||
# Inline the reference by replacing the reference with the actual schema
|
||||
new = definitions.pop(ref)
|
||||
ref_counts[ref] -= 1 # because we just replaced it!
|
||||
# put all other keys that were on the def-ref schema into the inlined version
|
||||
# in particular this is needed for `serialization`
|
||||
if 'serialization' in s:
|
||||
new['serialization'] = s['serialization']
|
||||
s = recurse(new, inline_refs)
|
||||
return s
|
||||
else:
|
||||
return recurse(s, inline_refs)
|
||||
else:
|
||||
return recurse(s, inline_refs)
|
||||
|
||||
schema = walk_core_schema(schema, inline_refs)
|
||||
|
||||
def_values = [v for v in definitions.values() if ref_counts[v['ref']] > 0] # type: ignore
|
||||
|
||||
if def_values:
|
||||
schema = core_schema.definitions_schema(schema=schema, definitions=def_values)
|
||||
return schema
|
||||
|
||||
|
||||
def _strip_metadata(schema: CoreSchema) -> CoreSchema:
|
||||
def strip_metadata(s: CoreSchema, recurse: Recurse) -> CoreSchema:
|
||||
s = s.copy()
|
||||
s.pop('metadata', None)
|
||||
if s['type'] == 'model-fields':
|
||||
s = s.copy()
|
||||
s['fields'] = {k: v.copy() for k, v in s['fields'].items()}
|
||||
for field_name, field_schema in s['fields'].items():
|
||||
field_schema.pop('metadata', None)
|
||||
s['fields'][field_name] = field_schema
|
||||
computed_fields = s.get('computed_fields', None)
|
||||
if computed_fields:
|
||||
s['computed_fields'] = [cf.copy() for cf in computed_fields]
|
||||
for cf in computed_fields:
|
||||
cf.pop('metadata', None)
|
||||
else:
|
||||
s.pop('computed_fields', None)
|
||||
elif s['type'] == 'model':
|
||||
# remove some defaults
|
||||
if s.get('custom_init', True) is False:
|
||||
s.pop('custom_init')
|
||||
if s.get('root_model', True) is False:
|
||||
s.pop('root_model')
|
||||
if {'title'}.issuperset(s.get('config', {}).keys()):
|
||||
s.pop('config', None)
|
||||
|
||||
return recurse(s, strip_metadata)
|
||||
|
||||
return walk_core_schema(schema, strip_metadata)
|
||||
|
||||
|
||||
def pretty_print_core_schema(
|
||||
schema: CoreSchema,
|
||||
include_metadata: bool = False,
|
||||
) -> None:
|
||||
"""Pretty print a CoreSchema using rich.
|
||||
This is intended for debugging purposes.
|
||||
|
||||
Args:
|
||||
schema: The CoreSchema to print.
|
||||
include_metadata: Whether to include metadata in the output. Defaults to `False`.
|
||||
"""
|
||||
from rich import print # type: ignore # install it manually in your dev env
|
||||
|
||||
if not include_metadata:
|
||||
schema = _strip_metadata(schema)
|
||||
|
||||
return print(schema)
|
||||
|
||||
|
||||
def validate_core_schema(schema: CoreSchema) -> CoreSchema:
|
||||
if 'PYDANTIC_SKIP_VALIDATING_CORE_SCHEMAS' in os.environ:
|
||||
return schema
|
||||
return _validate_core_schema(schema)
|
||||
@@ -0,0 +1,281 @@
|
||||
"""Private logic for creating pydantic dataclasses."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import dataclasses
|
||||
import inspect
|
||||
import typing
|
||||
import warnings
|
||||
from functools import partial, wraps
|
||||
from inspect import Parameter, Signature
|
||||
from typing import Any, Callable, ClassVar
|
||||
|
||||
from pydantic_core import (
|
||||
ArgsKwargs,
|
||||
PydanticUndefined,
|
||||
SchemaSerializer,
|
||||
SchemaValidator,
|
||||
core_schema,
|
||||
)
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
from ..errors import PydanticUndefinedAnnotation
|
||||
from ..fields import FieldInfo
|
||||
from ..plugin._schema_validator import create_schema_validator
|
||||
from ..warnings import PydanticDeprecatedSince20
|
||||
from . import _config, _decorators, _typing_extra
|
||||
from ._config import ConfigWrapper
|
||||
from ._fields import collect_dataclass_fields
|
||||
from ._generate_schema import GenerateSchema, generate_pydantic_signature
|
||||
from ._generics import get_standard_typevars_map
|
||||
from ._mock_val_ser import set_dataclass_mocks
|
||||
from ._schema_generation_shared import CallbackGetCoreSchemaHandler
|
||||
from ._utils import is_valid_identifier
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ..config import ConfigDict
|
||||
|
||||
class StandardDataclass(typing.Protocol):
|
||||
__dataclass_fields__: ClassVar[dict[str, Any]]
|
||||
__dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams`
|
||||
__post_init__: ClassVar[Callable[..., None]]
|
||||
|
||||
def __init__(self, *args: object, **kwargs: object) -> None:
|
||||
pass
|
||||
|
||||
class PydanticDataclass(StandardDataclass, typing.Protocol):
|
||||
"""A protocol containing attributes only available once a class has been decorated as a Pydantic dataclass.
|
||||
|
||||
Attributes:
|
||||
__pydantic_config__: Pydantic-specific configuration settings for the dataclass.
|
||||
__pydantic_complete__: Whether dataclass building is completed, or if there are still undefined fields.
|
||||
__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
|
||||
__pydantic_decorators__: Metadata containing the decorators defined on the dataclass.
|
||||
__pydantic_fields__: Metadata about the fields defined on the dataclass.
|
||||
__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the dataclass.
|
||||
__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the dataclass.
|
||||
"""
|
||||
|
||||
__pydantic_config__: ClassVar[ConfigDict]
|
||||
__pydantic_complete__: ClassVar[bool]
|
||||
__pydantic_core_schema__: ClassVar[core_schema.CoreSchema]
|
||||
__pydantic_decorators__: ClassVar[_decorators.DecoratorInfos]
|
||||
__pydantic_fields__: ClassVar[dict[str, FieldInfo]]
|
||||
__pydantic_serializer__: ClassVar[SchemaSerializer]
|
||||
__pydantic_validator__: ClassVar[SchemaValidator]
|
||||
|
||||
else:
|
||||
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
||||
# and https://youtrack.jetbrains.com/issue/PY-51428
|
||||
DeprecationWarning = PydanticDeprecatedSince20
|
||||
|
||||
|
||||
def set_dataclass_fields(cls: type[StandardDataclass], types_namespace: dict[str, Any] | None = None) -> None:
|
||||
"""Collect and set `cls.__pydantic_fields__`.
|
||||
|
||||
Args:
|
||||
cls: The class.
|
||||
types_namespace: The types namespace, defaults to `None`.
|
||||
"""
|
||||
typevars_map = get_standard_typevars_map(cls)
|
||||
fields = collect_dataclass_fields(cls, types_namespace, typevars_map=typevars_map)
|
||||
|
||||
cls.__pydantic_fields__ = fields # type: ignore
|
||||
|
||||
|
||||
def complete_dataclass(
|
||||
cls: type[Any],
|
||||
config_wrapper: _config.ConfigWrapper,
|
||||
*,
|
||||
raise_errors: bool = True,
|
||||
types_namespace: dict[str, Any] | None,
|
||||
) -> bool:
|
||||
"""Finish building a pydantic dataclass.
|
||||
|
||||
This logic is called on a class which has already been wrapped in `dataclasses.dataclass()`.
|
||||
|
||||
This is somewhat analogous to `pydantic._internal._model_construction.complete_model_class`.
|
||||
|
||||
Args:
|
||||
cls: The class.
|
||||
config_wrapper: The config wrapper instance.
|
||||
raise_errors: Whether to raise errors, defaults to `True`.
|
||||
types_namespace: The types namespace.
|
||||
|
||||
Returns:
|
||||
`True` if building a pydantic dataclass is successfully completed, `False` otherwise.
|
||||
|
||||
Raises:
|
||||
PydanticUndefinedAnnotation: If `raise_error` is `True` and there is an undefined annotations.
|
||||
"""
|
||||
if hasattr(cls, '__post_init_post_parse__'):
|
||||
warnings.warn(
|
||||
'Support for `__post_init_post_parse__` has been dropped, the method will not be called', DeprecationWarning
|
||||
)
|
||||
|
||||
if types_namespace is None:
|
||||
types_namespace = _typing_extra.get_cls_types_namespace(cls)
|
||||
|
||||
set_dataclass_fields(cls, types_namespace)
|
||||
|
||||
typevars_map = get_standard_typevars_map(cls)
|
||||
gen_schema = GenerateSchema(
|
||||
config_wrapper,
|
||||
types_namespace,
|
||||
typevars_map,
|
||||
)
|
||||
|
||||
# This needs to be called before we change the __init__
|
||||
sig = generate_dataclass_signature(cls, cls.__pydantic_fields__, config_wrapper) # type: ignore
|
||||
|
||||
# dataclass.__init__ must be defined here so its `__qualname__` can be changed since functions can't be copied.
|
||||
def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
|
||||
__tracebackhide__ = True
|
||||
s = __dataclass_self__
|
||||
s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
|
||||
|
||||
__init__.__qualname__ = f'{cls.__qualname__}.__init__'
|
||||
|
||||
cls.__init__ = __init__ # type: ignore
|
||||
cls.__pydantic_config__ = config_wrapper.config_dict # type: ignore
|
||||
cls.__signature__ = sig # type: ignore
|
||||
get_core_schema = getattr(cls, '__get_pydantic_core_schema__', None)
|
||||
try:
|
||||
if get_core_schema:
|
||||
schema = get_core_schema(
|
||||
cls,
|
||||
CallbackGetCoreSchemaHandler(
|
||||
partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
|
||||
gen_schema,
|
||||
ref_mode='unpack',
|
||||
),
|
||||
)
|
||||
else:
|
||||
schema = gen_schema.generate_schema(cls, from_dunder_get_core_schema=False)
|
||||
except PydanticUndefinedAnnotation as e:
|
||||
if raise_errors:
|
||||
raise
|
||||
set_dataclass_mocks(cls, cls.__name__, f'`{e.name}`')
|
||||
return False
|
||||
|
||||
core_config = config_wrapper.core_config(cls)
|
||||
|
||||
try:
|
||||
schema = gen_schema.clean_schema(schema)
|
||||
except gen_schema.CollectedInvalid:
|
||||
set_dataclass_mocks(cls, cls.__name__, 'all referenced types')
|
||||
return False
|
||||
|
||||
# We are about to set all the remaining required properties expected for this cast;
|
||||
# __pydantic_decorators__ and __pydantic_fields__ should already be set
|
||||
cls = typing.cast('type[PydanticDataclass]', cls)
|
||||
# debug(schema)
|
||||
|
||||
cls.__pydantic_core_schema__ = schema
|
||||
cls.__pydantic_validator__ = validator = create_schema_validator(
|
||||
schema, cls, cls.__module__, cls.__qualname__, 'dataclass', core_config, config_wrapper.plugin_settings
|
||||
)
|
||||
cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
|
||||
|
||||
if config_wrapper.validate_assignment:
|
||||
|
||||
@wraps(cls.__setattr__)
|
||||
def validated_setattr(instance: Any, __field: str, __value: str) -> None:
|
||||
validator.validate_assignment(instance, __field, __value)
|
||||
|
||||
cls.__setattr__ = validated_setattr.__get__(None, cls) # type: ignore
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def process_param_defaults(param: Parameter) -> Parameter:
|
||||
"""Custom processing where the parameter default is of type FieldInfo
|
||||
|
||||
Args:
|
||||
param (Parameter): The parameter
|
||||
|
||||
Returns:
|
||||
Parameter: The custom processed parameter
|
||||
"""
|
||||
param_default = param.default
|
||||
if isinstance(param_default, FieldInfo):
|
||||
annotation = param.annotation
|
||||
# Replace the annotation if appropriate
|
||||
# inspect does "clever" things to show annotations as strings because we have
|
||||
# `from __future__ import annotations` in main, we don't want that
|
||||
if annotation == 'Any':
|
||||
annotation = Any
|
||||
|
||||
# Replace the field name with the alias if present
|
||||
name = param.name
|
||||
alias = param_default.alias
|
||||
validation_alias = param_default.validation_alias
|
||||
if validation_alias is None and isinstance(alias, str) and is_valid_identifier(alias):
|
||||
name = alias
|
||||
elif isinstance(validation_alias, str) and is_valid_identifier(validation_alias):
|
||||
name = validation_alias
|
||||
|
||||
# Replace the field default
|
||||
default = param_default.default
|
||||
if default is PydanticUndefined:
|
||||
if param_default.default_factory is PydanticUndefined:
|
||||
default = inspect.Signature.empty
|
||||
else:
|
||||
# this is used by dataclasses to indicate a factory exists:
|
||||
default = dataclasses._HAS_DEFAULT_FACTORY # type: ignore
|
||||
return param.replace(annotation=annotation, name=name, default=default)
|
||||
return param
|
||||
|
||||
|
||||
def generate_dataclass_signature(
|
||||
cls: type[StandardDataclass], fields: dict[str, FieldInfo], config_wrapper: ConfigWrapper
|
||||
) -> Signature:
|
||||
"""Generate signature for a pydantic dataclass.
|
||||
|
||||
Args:
|
||||
cls: The dataclass.
|
||||
fields: The model fields.
|
||||
config_wrapper: The config wrapper instance.
|
||||
|
||||
Returns:
|
||||
The dataclass signature.
|
||||
"""
|
||||
return generate_pydantic_signature(
|
||||
init=cls.__init__, fields=fields, config_wrapper=config_wrapper, post_process_parameter=process_param_defaults
|
||||
)
|
||||
|
||||
|
||||
def is_builtin_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]:
|
||||
"""Returns True if a class is a stdlib dataclass and *not* a pydantic dataclass.
|
||||
|
||||
We check that
|
||||
- `_cls` is a dataclass
|
||||
- `_cls` does not inherit from a processed pydantic dataclass (and thus have a `__pydantic_validator__`)
|
||||
- `_cls` does not have any annotations that are not dataclass fields
|
||||
e.g.
|
||||
```py
|
||||
import dataclasses
|
||||
|
||||
import pydantic.dataclasses
|
||||
|
||||
@dataclasses.dataclass
|
||||
class A:
|
||||
x: int
|
||||
|
||||
@pydantic.dataclasses.dataclass
|
||||
class B(A):
|
||||
y: int
|
||||
```
|
||||
In this case, when we first check `B`, we make an extra check and look at the annotations ('y'),
|
||||
which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x')
|
||||
|
||||
Args:
|
||||
cls: The class.
|
||||
|
||||
Returns:
|
||||
`True` if the class is a stdlib dataclass, `False` otherwise.
|
||||
"""
|
||||
return (
|
||||
dataclasses.is_dataclass(_cls)
|
||||
and not hasattr(_cls, '__pydantic_validator__')
|
||||
and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {})))
|
||||
)
|
||||
@@ -0,0 +1,804 @@
|
||||
"""Logic related to validators applied to models etc. via the `@field_validator` and `@model_validator` decorators."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from collections import deque
|
||||
from dataclasses import dataclass, field
|
||||
from functools import partial, partialmethod
|
||||
from inspect import Parameter, Signature, isdatadescriptor, ismethoddescriptor, signature
|
||||
from itertools import islice
|
||||
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Iterable, TypeVar, Union
|
||||
|
||||
from pydantic_core import PydanticUndefined, core_schema
|
||||
from typing_extensions import Literal, TypeAlias, is_typeddict
|
||||
|
||||
from ..errors import PydanticUserError
|
||||
from ._core_utils import get_type_ref
|
||||
from ._internal_dataclass import slots_true
|
||||
from ._typing_extra import get_function_type_hints
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..fields import ComputedFieldInfo
|
||||
from ..functional_validators import FieldValidatorModes
|
||||
|
||||
try:
|
||||
from functools import cached_property # type: ignore
|
||||
except ImportError:
|
||||
# python 3.7
|
||||
cached_property = None
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class ValidatorDecoratorInfo:
|
||||
"""A container for data from `@validator` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@validator'.
|
||||
fields: A tuple of field names the validator should be called on.
|
||||
mode: The proposed validator mode.
|
||||
each_item: For complex objects (sets, lists etc.) whether to validate individual
|
||||
elements rather than the whole object.
|
||||
always: Whether this method and other validators should be called even if the value is missing.
|
||||
check_fields: Whether to check that the fields actually exist on the model.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@validator'
|
||||
|
||||
fields: tuple[str, ...]
|
||||
mode: Literal['before', 'after']
|
||||
each_item: bool
|
||||
always: bool
|
||||
check_fields: bool | None
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class FieldValidatorDecoratorInfo:
|
||||
"""A container for data from `@field_validator` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@field_validator'.
|
||||
fields: A tuple of field names the validator should be called on.
|
||||
mode: The proposed validator mode.
|
||||
check_fields: Whether to check that the fields actually exist on the model.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@field_validator'
|
||||
|
||||
fields: tuple[str, ...]
|
||||
mode: FieldValidatorModes
|
||||
check_fields: bool | None
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class RootValidatorDecoratorInfo:
|
||||
"""A container for data from `@root_validator` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@root_validator'.
|
||||
mode: The proposed validator mode.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@root_validator'
|
||||
mode: Literal['before', 'after']
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class FieldSerializerDecoratorInfo:
|
||||
"""A container for data from `@field_serializer` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@field_serializer'.
|
||||
fields: A tuple of field names the serializer should be called on.
|
||||
mode: The proposed serializer mode.
|
||||
return_type: The type of the serializer's return value.
|
||||
when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
|
||||
and `'json-unless-none'`.
|
||||
check_fields: Whether to check that the fields actually exist on the model.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@field_serializer'
|
||||
fields: tuple[str, ...]
|
||||
mode: Literal['plain', 'wrap']
|
||||
return_type: Any
|
||||
when_used: core_schema.WhenUsed
|
||||
check_fields: bool | None
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class ModelSerializerDecoratorInfo:
|
||||
"""A container for data from `@model_serializer` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@model_serializer'.
|
||||
mode: The proposed serializer mode.
|
||||
return_type: The type of the serializer's return value.
|
||||
when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
|
||||
and `'json-unless-none'`.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@model_serializer'
|
||||
mode: Literal['plain', 'wrap']
|
||||
return_type: Any
|
||||
when_used: core_schema.WhenUsed
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class ModelValidatorDecoratorInfo:
|
||||
"""A container for data from `@model_validator` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@model_serializer'.
|
||||
mode: The proposed serializer mode.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@model_validator'
|
||||
mode: Literal['wrap', 'before', 'after']
|
||||
|
||||
|
||||
DecoratorInfo: TypeAlias = """Union[
|
||||
ValidatorDecoratorInfo,
|
||||
FieldValidatorDecoratorInfo,
|
||||
RootValidatorDecoratorInfo,
|
||||
FieldSerializerDecoratorInfo,
|
||||
ModelSerializerDecoratorInfo,
|
||||
ModelValidatorDecoratorInfo,
|
||||
ComputedFieldInfo,
|
||||
]"""
|
||||
|
||||
ReturnType = TypeVar('ReturnType')
|
||||
DecoratedType: TypeAlias = (
|
||||
'Union[classmethod[Any, Any, ReturnType], staticmethod[Any, ReturnType], Callable[..., ReturnType], property]'
|
||||
)
|
||||
|
||||
|
||||
@dataclass # can't use slots here since we set attributes on `__post_init__`
|
||||
class PydanticDescriptorProxy(Generic[ReturnType]):
|
||||
"""Wrap a classmethod, staticmethod, property or unbound function
|
||||
and act as a descriptor that allows us to detect decorated items
|
||||
from the class' attributes.
|
||||
|
||||
This class' __get__ returns the wrapped item's __get__ result,
|
||||
which makes it transparent for classmethods and staticmethods.
|
||||
|
||||
Attributes:
|
||||
wrapped: The decorator that has to be wrapped.
|
||||
decorator_info: The decorator info.
|
||||
shim: A wrapper function to wrap V1 style function.
|
||||
"""
|
||||
|
||||
wrapped: DecoratedType[ReturnType]
|
||||
decorator_info: DecoratorInfo
|
||||
shim: Callable[[Callable[..., Any]], Callable[..., Any]] | None = None
|
||||
|
||||
def __post_init__(self):
|
||||
for attr in 'setter', 'deleter':
|
||||
if hasattr(self.wrapped, attr):
|
||||
f = partial(self._call_wrapped_attr, name=attr)
|
||||
setattr(self, attr, f)
|
||||
|
||||
def _call_wrapped_attr(self, func: Callable[[Any], None], *, name: str) -> PydanticDescriptorProxy[ReturnType]:
|
||||
self.wrapped = getattr(self.wrapped, name)(func)
|
||||
return self
|
||||
|
||||
def __get__(self, obj: object | None, obj_type: type[object] | None = None) -> PydanticDescriptorProxy[ReturnType]:
|
||||
try:
|
||||
return self.wrapped.__get__(obj, obj_type)
|
||||
except AttributeError:
|
||||
# not a descriptor, e.g. a partial object
|
||||
return self.wrapped # type: ignore[return-value]
|
||||
|
||||
def __set_name__(self, instance: Any, name: str) -> None:
|
||||
if hasattr(self.wrapped, '__set_name__'):
|
||||
self.wrapped.__set_name__(instance, name)
|
||||
|
||||
def __getattr__(self, __name: str) -> Any:
|
||||
"""Forward checks for __isabstractmethod__ and such."""
|
||||
return getattr(self.wrapped, __name)
|
||||
|
||||
|
||||
DecoratorInfoType = TypeVar('DecoratorInfoType', bound=DecoratorInfo)
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class Decorator(Generic[DecoratorInfoType]):
|
||||
"""A generic container class to join together the decorator metadata
|
||||
(metadata from decorator itself, which we have when the
|
||||
decorator is called but not when we are building the core-schema)
|
||||
and the bound function (which we have after the class itself is created).
|
||||
|
||||
Attributes:
|
||||
cls_ref: The class ref.
|
||||
cls_var_name: The decorated function name.
|
||||
func: The decorated function.
|
||||
shim: A wrapper function to wrap V1 style function.
|
||||
info: The decorator info.
|
||||
"""
|
||||
|
||||
cls_ref: str
|
||||
cls_var_name: str
|
||||
func: Callable[..., Any]
|
||||
shim: Callable[[Any], Any] | None
|
||||
info: DecoratorInfoType
|
||||
|
||||
@staticmethod
|
||||
def build(
|
||||
cls_: Any,
|
||||
*,
|
||||
cls_var_name: str,
|
||||
shim: Callable[[Any], Any] | None,
|
||||
info: DecoratorInfoType,
|
||||
) -> Decorator[DecoratorInfoType]:
|
||||
"""Build a new decorator.
|
||||
|
||||
Args:
|
||||
cls_: The class.
|
||||
cls_var_name: The decorated function name.
|
||||
shim: A wrapper function to wrap V1 style function.
|
||||
info: The decorator info.
|
||||
|
||||
Returns:
|
||||
The new decorator instance.
|
||||
"""
|
||||
func = get_attribute_from_bases(cls_, cls_var_name)
|
||||
if shim is not None:
|
||||
func = shim(func)
|
||||
func = unwrap_wrapped_function(func, unwrap_partial=False)
|
||||
if not callable(func):
|
||||
# This branch will get hit for classmethod properties
|
||||
attribute = get_attribute_from_base_dicts(cls_, cls_var_name) # prevents the binding call to `__get__`
|
||||
if isinstance(attribute, PydanticDescriptorProxy):
|
||||
func = unwrap_wrapped_function(attribute.wrapped)
|
||||
return Decorator(
|
||||
cls_ref=get_type_ref(cls_),
|
||||
cls_var_name=cls_var_name,
|
||||
func=func,
|
||||
shim=shim,
|
||||
info=info,
|
||||
)
|
||||
|
||||
def bind_to_cls(self, cls: Any) -> Decorator[DecoratorInfoType]:
|
||||
"""Bind the decorator to a class.
|
||||
|
||||
Args:
|
||||
cls: the class.
|
||||
|
||||
Returns:
|
||||
The new decorator instance.
|
||||
"""
|
||||
return self.build(
|
||||
cls,
|
||||
cls_var_name=self.cls_var_name,
|
||||
shim=self.shim,
|
||||
info=self.info,
|
||||
)
|
||||
|
||||
|
||||
def get_bases(tp: type[Any]) -> tuple[type[Any], ...]:
|
||||
"""Get the base classes of a class or typeddict.
|
||||
|
||||
Args:
|
||||
tp: The type or class to get the bases.
|
||||
|
||||
Returns:
|
||||
The base classes.
|
||||
"""
|
||||
if is_typeddict(tp):
|
||||
return tp.__orig_bases__ # type: ignore
|
||||
try:
|
||||
return tp.__bases__
|
||||
except AttributeError:
|
||||
return ()
|
||||
|
||||
|
||||
def mro(tp: type[Any]) -> tuple[type[Any], ...]:
|
||||
"""Calculate the Method Resolution Order of bases using the C3 algorithm.
|
||||
|
||||
See https://www.python.org/download/releases/2.3/mro/
|
||||
"""
|
||||
# try to use the existing mro, for performance mainly
|
||||
# but also because it helps verify the implementation below
|
||||
if not is_typeddict(tp):
|
||||
try:
|
||||
return tp.__mro__
|
||||
except AttributeError:
|
||||
# GenericAlias and some other cases
|
||||
pass
|
||||
|
||||
bases = get_bases(tp)
|
||||
return (tp,) + mro_for_bases(bases)
|
||||
|
||||
|
||||
def mro_for_bases(bases: tuple[type[Any], ...]) -> tuple[type[Any], ...]:
|
||||
def merge_seqs(seqs: list[deque[type[Any]]]) -> Iterable[type[Any]]:
|
||||
while True:
|
||||
non_empty = [seq for seq in seqs if seq]
|
||||
if not non_empty:
|
||||
# Nothing left to process, we're done.
|
||||
return
|
||||
candidate: type[Any] | None = None
|
||||
for seq in non_empty: # Find merge candidates among seq heads.
|
||||
candidate = seq[0]
|
||||
not_head = [s for s in non_empty if candidate in islice(s, 1, None)]
|
||||
if not_head:
|
||||
# Reject the candidate.
|
||||
candidate = None
|
||||
else:
|
||||
break
|
||||
if not candidate:
|
||||
raise TypeError('Inconsistent hierarchy, no C3 MRO is possible')
|
||||
yield candidate
|
||||
for seq in non_empty:
|
||||
# Remove candidate.
|
||||
if seq[0] == candidate:
|
||||
seq.popleft()
|
||||
|
||||
seqs = [deque(mro(base)) for base in bases] + [deque(bases)]
|
||||
return tuple(merge_seqs(seqs))
|
||||
|
||||
|
||||
_sentinel = object()
|
||||
|
||||
|
||||
def get_attribute_from_bases(tp: type[Any] | tuple[type[Any], ...], name: str) -> Any:
|
||||
"""Get the attribute from the next class in the MRO that has it,
|
||||
aiming to simulate calling the method on the actual class.
|
||||
|
||||
The reason for iterating over the mro instead of just getting
|
||||
the attribute (which would do that for us) is to support TypedDict,
|
||||
which lacks a real __mro__, but can have a virtual one constructed
|
||||
from its bases (as done here).
|
||||
|
||||
Args:
|
||||
tp: The type or class to search for the attribute. If a tuple, this is treated as a set of base classes.
|
||||
name: The name of the attribute to retrieve.
|
||||
|
||||
Returns:
|
||||
Any: The attribute value, if found.
|
||||
|
||||
Raises:
|
||||
AttributeError: If the attribute is not found in any class in the MRO.
|
||||
"""
|
||||
if isinstance(tp, tuple):
|
||||
for base in mro_for_bases(tp):
|
||||
attribute = base.__dict__.get(name, _sentinel)
|
||||
if attribute is not _sentinel:
|
||||
attribute_get = getattr(attribute, '__get__', None)
|
||||
if attribute_get is not None:
|
||||
return attribute_get(None, tp)
|
||||
return attribute
|
||||
raise AttributeError(f'{name} not found in {tp}')
|
||||
else:
|
||||
try:
|
||||
return getattr(tp, name)
|
||||
except AttributeError:
|
||||
return get_attribute_from_bases(mro(tp), name)
|
||||
|
||||
|
||||
def get_attribute_from_base_dicts(tp: type[Any], name: str) -> Any:
|
||||
"""Get an attribute out of the `__dict__` following the MRO.
|
||||
This prevents the call to `__get__` on the descriptor, and allows
|
||||
us to get the original function for classmethod properties.
|
||||
|
||||
Args:
|
||||
tp: The type or class to search for the attribute.
|
||||
name: The name of the attribute to retrieve.
|
||||
|
||||
Returns:
|
||||
Any: The attribute value, if found.
|
||||
|
||||
Raises:
|
||||
KeyError: If the attribute is not found in any class's `__dict__` in the MRO.
|
||||
"""
|
||||
for base in reversed(mro(tp)):
|
||||
if name in base.__dict__:
|
||||
return base.__dict__[name]
|
||||
return tp.__dict__[name] # raise the error
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class DecoratorInfos:
|
||||
"""Mapping of name in the class namespace to decorator info.
|
||||
|
||||
note that the name in the class namespace is the function or attribute name
|
||||
not the field name!
|
||||
"""
|
||||
|
||||
validators: dict[str, Decorator[ValidatorDecoratorInfo]] = field(default_factory=dict)
|
||||
field_validators: dict[str, Decorator[FieldValidatorDecoratorInfo]] = field(default_factory=dict)
|
||||
root_validators: dict[str, Decorator[RootValidatorDecoratorInfo]] = field(default_factory=dict)
|
||||
field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]] = field(default_factory=dict)
|
||||
model_serializers: dict[str, Decorator[ModelSerializerDecoratorInfo]] = field(default_factory=dict)
|
||||
model_validators: dict[str, Decorator[ModelValidatorDecoratorInfo]] = field(default_factory=dict)
|
||||
computed_fields: dict[str, Decorator[ComputedFieldInfo]] = field(default_factory=dict)
|
||||
|
||||
@staticmethod
|
||||
def build(model_dc: type[Any]) -> DecoratorInfos: # noqa: C901 (ignore complexity)
|
||||
"""We want to collect all DecFunc instances that exist as
|
||||
attributes in the namespace of the class (a BaseModel or dataclass)
|
||||
that called us
|
||||
But we want to collect these in the order of the bases
|
||||
So instead of getting them all from the leaf class (the class that called us),
|
||||
we traverse the bases from root (the oldest ancestor class) to leaf
|
||||
and collect all of the instances as we go, taking care to replace
|
||||
any duplicate ones with the last one we see to mimic how function overriding
|
||||
works with inheritance.
|
||||
If we do replace any functions we put the replacement into the position
|
||||
the replaced function was in; that is, we maintain the order.
|
||||
"""
|
||||
# reminder: dicts are ordered and replacement does not alter the order
|
||||
res = DecoratorInfos()
|
||||
for base in reversed(mro(model_dc)[1:]):
|
||||
existing: DecoratorInfos | None = base.__dict__.get('__pydantic_decorators__')
|
||||
if existing is None:
|
||||
existing = DecoratorInfos.build(base)
|
||||
res.validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.validators.items()})
|
||||
res.field_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_validators.items()})
|
||||
res.root_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.root_validators.items()})
|
||||
res.field_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_serializers.items()})
|
||||
res.model_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_serializers.items()})
|
||||
res.model_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_validators.items()})
|
||||
res.computed_fields.update({k: v.bind_to_cls(model_dc) for k, v in existing.computed_fields.items()})
|
||||
|
||||
to_replace: list[tuple[str, Any]] = []
|
||||
|
||||
for var_name, var_value in vars(model_dc).items():
|
||||
if isinstance(var_value, PydanticDescriptorProxy):
|
||||
info = var_value.decorator_info
|
||||
if isinstance(info, ValidatorDecoratorInfo):
|
||||
res.validators[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, FieldValidatorDecoratorInfo):
|
||||
res.field_validators[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, RootValidatorDecoratorInfo):
|
||||
res.root_validators[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, FieldSerializerDecoratorInfo):
|
||||
# check whether a serializer function is already registered for fields
|
||||
for field_serializer_decorator in res.field_serializers.values():
|
||||
# check that each field has at most one serializer function.
|
||||
# serializer functions for the same field in subclasses are allowed,
|
||||
# and are treated as overrides
|
||||
if field_serializer_decorator.cls_var_name == var_name:
|
||||
continue
|
||||
for f in info.fields:
|
||||
if f in field_serializer_decorator.info.fields:
|
||||
raise PydanticUserError(
|
||||
'Multiple field serializer functions were defined '
|
||||
f'for field {f!r}, this is not allowed.',
|
||||
code='multiple-field-serializers',
|
||||
)
|
||||
res.field_serializers[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, ModelValidatorDecoratorInfo):
|
||||
res.model_validators[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, ModelSerializerDecoratorInfo):
|
||||
res.model_serializers[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
else:
|
||||
from ..fields import ComputedFieldInfo
|
||||
|
||||
isinstance(var_value, ComputedFieldInfo)
|
||||
res.computed_fields[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=None, info=info
|
||||
)
|
||||
to_replace.append((var_name, var_value.wrapped))
|
||||
if to_replace:
|
||||
# If we can save `__pydantic_decorators__` on the class we'll be able to check for it above
|
||||
# so then we don't need to re-process the type, which means we can discard our descriptor wrappers
|
||||
# and replace them with the thing they are wrapping (see the other setattr call below)
|
||||
# which allows validator class methods to also function as regular class methods
|
||||
setattr(model_dc, '__pydantic_decorators__', res)
|
||||
for name, value in to_replace:
|
||||
setattr(model_dc, name, value)
|
||||
return res
|
||||
|
||||
|
||||
def inspect_validator(validator: Callable[..., Any], mode: FieldValidatorModes) -> bool:
|
||||
"""Look at a field or model validator function and determine whether it takes an info argument.
|
||||
|
||||
An error is raised if the function has an invalid signature.
|
||||
|
||||
Args:
|
||||
validator: The validator function to inspect.
|
||||
mode: The proposed validator mode.
|
||||
|
||||
Returns:
|
||||
Whether the validator takes an info argument.
|
||||
"""
|
||||
try:
|
||||
sig = signature(validator)
|
||||
except ValueError:
|
||||
# builtins and some C extensions don't have signatures
|
||||
# assume that they don't take an info argument and only take a single argument
|
||||
# e.g. `str.strip` or `datetime.datetime`
|
||||
return False
|
||||
n_positional = count_positional_params(sig)
|
||||
if mode == 'wrap':
|
||||
if n_positional == 3:
|
||||
return True
|
||||
elif n_positional == 2:
|
||||
return False
|
||||
else:
|
||||
assert mode in {'before', 'after', 'plain'}, f"invalid mode: {mode!r}, expected 'before', 'after' or 'plain"
|
||||
if n_positional == 2:
|
||||
return True
|
||||
elif n_positional == 1:
|
||||
return False
|
||||
|
||||
raise PydanticUserError(
|
||||
f'Unrecognized field_validator function signature for {validator} with `mode={mode}`:{sig}',
|
||||
code='validator-signature',
|
||||
)
|
||||
|
||||
|
||||
def inspect_field_serializer(
|
||||
serializer: Callable[..., Any], mode: Literal['plain', 'wrap'], computed_field: bool = False
|
||||
) -> tuple[bool, bool]:
|
||||
"""Look at a field serializer function and determine if it is a field serializer,
|
||||
and whether it takes an info argument.
|
||||
|
||||
An error is raised if the function has an invalid signature.
|
||||
|
||||
Args:
|
||||
serializer: The serializer function to inspect.
|
||||
mode: The serializer mode, either 'plain' or 'wrap'.
|
||||
computed_field: When serializer is applied on computed_field. It doesn't require
|
||||
info signature.
|
||||
|
||||
Returns:
|
||||
Tuple of (is_field_serializer, info_arg).
|
||||
"""
|
||||
sig = signature(serializer)
|
||||
|
||||
first = next(iter(sig.parameters.values()), None)
|
||||
is_field_serializer = first is not None and first.name == 'self'
|
||||
|
||||
n_positional = count_positional_params(sig)
|
||||
if is_field_serializer:
|
||||
# -1 to correct for self parameter
|
||||
info_arg = _serializer_info_arg(mode, n_positional - 1)
|
||||
else:
|
||||
info_arg = _serializer_info_arg(mode, n_positional)
|
||||
|
||||
if info_arg is None:
|
||||
raise PydanticUserError(
|
||||
f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
||||
code='field-serializer-signature',
|
||||
)
|
||||
if info_arg and computed_field:
|
||||
raise PydanticUserError(
|
||||
'field_serializer on computed_field does not use info signature', code='field-serializer-signature'
|
||||
)
|
||||
|
||||
else:
|
||||
return is_field_serializer, info_arg
|
||||
|
||||
|
||||
def inspect_annotated_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
|
||||
"""Look at a serializer function used via `Annotated` and determine whether it takes an info argument.
|
||||
|
||||
An error is raised if the function has an invalid signature.
|
||||
|
||||
Args:
|
||||
serializer: The serializer function to check.
|
||||
mode: The serializer mode, either 'plain' or 'wrap'.
|
||||
|
||||
Returns:
|
||||
info_arg
|
||||
"""
|
||||
sig = signature(serializer)
|
||||
info_arg = _serializer_info_arg(mode, count_positional_params(sig))
|
||||
if info_arg is None:
|
||||
raise PydanticUserError(
|
||||
f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
||||
code='field-serializer-signature',
|
||||
)
|
||||
else:
|
||||
return info_arg
|
||||
|
||||
|
||||
def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
|
||||
"""Look at a model serializer function and determine whether it takes an info argument.
|
||||
|
||||
An error is raised if the function has an invalid signature.
|
||||
|
||||
Args:
|
||||
serializer: The serializer function to check.
|
||||
mode: The serializer mode, either 'plain' or 'wrap'.
|
||||
|
||||
Returns:
|
||||
`info_arg` - whether the function expects an info argument.
|
||||
"""
|
||||
if isinstance(serializer, (staticmethod, classmethod)) or not is_instance_method_from_sig(serializer):
|
||||
raise PydanticUserError(
|
||||
'`@model_serializer` must be applied to instance methods', code='model-serializer-instance-method'
|
||||
)
|
||||
|
||||
sig = signature(serializer)
|
||||
info_arg = _serializer_info_arg(mode, count_positional_params(sig))
|
||||
if info_arg is None:
|
||||
raise PydanticUserError(
|
||||
f'Unrecognized model_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
||||
code='model-serializer-signature',
|
||||
)
|
||||
else:
|
||||
return info_arg
|
||||
|
||||
|
||||
def _serializer_info_arg(mode: Literal['plain', 'wrap'], n_positional: int) -> bool | None:
|
||||
if mode == 'plain':
|
||||
if n_positional == 1:
|
||||
# (__input_value: Any) -> Any
|
||||
return False
|
||||
elif n_positional == 2:
|
||||
# (__model: Any, __input_value: Any) -> Any
|
||||
return True
|
||||
else:
|
||||
assert mode == 'wrap', f"invalid mode: {mode!r}, expected 'plain' or 'wrap'"
|
||||
if n_positional == 2:
|
||||
# (__input_value: Any, __serializer: SerializerFunctionWrapHandler) -> Any
|
||||
return False
|
||||
elif n_positional == 3:
|
||||
# (__input_value: Any, __serializer: SerializerFunctionWrapHandler, __info: SerializationInfo) -> Any
|
||||
return True
|
||||
|
||||
return None
|
||||
|
||||
|
||||
AnyDecoratorCallable: TypeAlias = (
|
||||
'Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any], Callable[..., Any]]'
|
||||
)
|
||||
|
||||
|
||||
def is_instance_method_from_sig(function: AnyDecoratorCallable) -> bool:
|
||||
"""Whether the function is an instance method.
|
||||
|
||||
It will consider a function as instance method if the first parameter of
|
||||
function is `self`.
|
||||
|
||||
Args:
|
||||
function: The function to check.
|
||||
|
||||
Returns:
|
||||
`True` if the function is an instance method, `False` otherwise.
|
||||
"""
|
||||
sig = signature(unwrap_wrapped_function(function))
|
||||
first = next(iter(sig.parameters.values()), None)
|
||||
if first and first.name == 'self':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable) -> Any:
|
||||
"""Apply the `@classmethod` decorator on the function.
|
||||
|
||||
Args:
|
||||
function: The function to apply the decorator on.
|
||||
|
||||
Return:
|
||||
The `@classmethod` decorator applied function.
|
||||
"""
|
||||
if not isinstance(
|
||||
unwrap_wrapped_function(function, unwrap_class_static_method=False), classmethod
|
||||
) and _is_classmethod_from_sig(function):
|
||||
return classmethod(function) # type: ignore[arg-type]
|
||||
return function
|
||||
|
||||
|
||||
def _is_classmethod_from_sig(function: AnyDecoratorCallable) -> bool:
|
||||
sig = signature(unwrap_wrapped_function(function))
|
||||
first = next(iter(sig.parameters.values()), None)
|
||||
if first and first.name == 'cls':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def unwrap_wrapped_function(
|
||||
func: Any,
|
||||
*,
|
||||
unwrap_partial: bool = True,
|
||||
unwrap_class_static_method: bool = True,
|
||||
) -> Any:
|
||||
"""Recursively unwraps a wrapped function until the underlying function is reached.
|
||||
This handles property, functools.partial, functools.partialmethod, staticmethod and classmethod.
|
||||
|
||||
Args:
|
||||
func: The function to unwrap.
|
||||
unwrap_partial: If True (default), unwrap partial and partialmethod decorators, otherwise don't.
|
||||
decorators.
|
||||
unwrap_class_static_method: If True (default), also unwrap classmethod and staticmethod
|
||||
decorators. If False, only unwrap partial and partialmethod decorators.
|
||||
|
||||
Returns:
|
||||
The underlying function of the wrapped function.
|
||||
"""
|
||||
all: set[Any] = {property}
|
||||
|
||||
if unwrap_partial:
|
||||
all.update({partial, partialmethod})
|
||||
|
||||
try:
|
||||
from functools import cached_property # type: ignore
|
||||
except ImportError:
|
||||
cached_property = type('', (), {})
|
||||
else:
|
||||
all.add(cached_property)
|
||||
|
||||
if unwrap_class_static_method:
|
||||
all.update({staticmethod, classmethod})
|
||||
|
||||
while isinstance(func, tuple(all)):
|
||||
if unwrap_class_static_method and isinstance(func, (classmethod, staticmethod)):
|
||||
func = func.__func__
|
||||
elif isinstance(func, (partial, partialmethod)):
|
||||
func = func.func
|
||||
elif isinstance(func, property):
|
||||
func = func.fget # arbitrary choice, convenient for computed fields
|
||||
else:
|
||||
# Make coverage happy as it can only get here in the last possible case
|
||||
assert isinstance(func, cached_property)
|
||||
func = func.func # type: ignore
|
||||
|
||||
return func
|
||||
|
||||
|
||||
def get_function_return_type(
|
||||
func: Any, explicit_return_type: Any, types_namespace: dict[str, Any] | None = None
|
||||
) -> Any:
|
||||
"""Get the function return type.
|
||||
|
||||
It gets the return type from the type annotation if `explicit_return_type` is `None`.
|
||||
Otherwise, it returns `explicit_return_type`.
|
||||
|
||||
Args:
|
||||
func: The function to get its return type.
|
||||
explicit_return_type: The explicit return type.
|
||||
types_namespace: The types namespace, defaults to `None`.
|
||||
|
||||
Returns:
|
||||
The function return type.
|
||||
"""
|
||||
if explicit_return_type is PydanticUndefined:
|
||||
# try to get it from the type annotation
|
||||
hints = get_function_type_hints(
|
||||
unwrap_wrapped_function(func), include_keys={'return'}, types_namespace=types_namespace
|
||||
)
|
||||
return hints.get('return', PydanticUndefined)
|
||||
else:
|
||||
return explicit_return_type
|
||||
|
||||
|
||||
def count_positional_params(sig: Signature) -> int:
|
||||
return sum(1 for param in sig.parameters.values() if can_be_positional(param))
|
||||
|
||||
|
||||
def can_be_positional(param: Parameter) -> bool:
|
||||
return param.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
|
||||
|
||||
|
||||
def ensure_property(f: Any) -> Any:
|
||||
"""Ensure that a function is a `property` or `cached_property`, or is a valid descriptor.
|
||||
|
||||
Args:
|
||||
f: The function to check.
|
||||
|
||||
Returns:
|
||||
The function, or a `property` or `cached_property` instance wrapping the function.
|
||||
"""
|
||||
if ismethoddescriptor(f) or isdatadescriptor(f):
|
||||
return f
|
||||
else:
|
||||
return property(f)
|
||||
@@ -0,0 +1,181 @@
|
||||
"""Logic for V1 validators, e.g. `@validator` and `@root_validator`."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from inspect import Parameter, signature
|
||||
from typing import Any, Dict, Tuple, Union, cast
|
||||
|
||||
from pydantic_core import core_schema
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from ..errors import PydanticUserError
|
||||
from ._decorators import can_be_positional
|
||||
|
||||
|
||||
class V1OnlyValueValidator(Protocol):
|
||||
"""A simple validator, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any) -> Any:
|
||||
...
|
||||
|
||||
|
||||
class V1ValidatorWithValues(Protocol):
|
||||
"""A validator with `values` argument, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any, values: dict[str, Any]) -> Any:
|
||||
...
|
||||
|
||||
|
||||
class V1ValidatorWithValuesKwOnly(Protocol):
|
||||
"""A validator with keyword only `values` argument, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any, *, values: dict[str, Any]) -> Any:
|
||||
...
|
||||
|
||||
|
||||
class V1ValidatorWithKwargs(Protocol):
|
||||
"""A validator with `kwargs` argument, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any, **kwargs: Any) -> Any:
|
||||
...
|
||||
|
||||
|
||||
class V1ValidatorWithValuesAndKwargs(Protocol):
|
||||
"""A validator with `values` and `kwargs` arguments, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any, values: dict[str, Any], **kwargs: Any) -> Any:
|
||||
...
|
||||
|
||||
|
||||
V1Validator = Union[
|
||||
V1ValidatorWithValues, V1ValidatorWithValuesKwOnly, V1ValidatorWithKwargs, V1ValidatorWithValuesAndKwargs
|
||||
]
|
||||
|
||||
|
||||
def can_be_keyword(param: Parameter) -> bool:
|
||||
return param.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY)
|
||||
|
||||
|
||||
def make_generic_v1_field_validator(validator: V1Validator) -> core_schema.WithInfoValidatorFunction:
|
||||
"""Wrap a V1 style field validator for V2 compatibility.
|
||||
|
||||
Args:
|
||||
validator: The V1 style field validator.
|
||||
|
||||
Returns:
|
||||
A wrapped V2 style field validator.
|
||||
|
||||
Raises:
|
||||
PydanticUserError: If the signature is not supported or the parameters are
|
||||
not available in Pydantic V2.
|
||||
"""
|
||||
sig = signature(validator)
|
||||
|
||||
needs_values_kw = False
|
||||
|
||||
for param_num, (param_name, parameter) in enumerate(sig.parameters.items()):
|
||||
if can_be_keyword(parameter) and param_name in ('field', 'config'):
|
||||
raise PydanticUserError(
|
||||
'The `field` and `config` parameters are not available in Pydantic V2, '
|
||||
'please use the `info` parameter instead.',
|
||||
code='validator-field-config-info',
|
||||
)
|
||||
if parameter.kind is Parameter.VAR_KEYWORD:
|
||||
needs_values_kw = True
|
||||
elif can_be_keyword(parameter) and param_name == 'values':
|
||||
needs_values_kw = True
|
||||
elif can_be_positional(parameter) and param_num == 0:
|
||||
# value
|
||||
continue
|
||||
elif parameter.default is Parameter.empty: # ignore params with defaults e.g. bound by functools.partial
|
||||
raise PydanticUserError(
|
||||
f'Unsupported signature for V1 style validator {validator}: {sig} is not supported.',
|
||||
code='validator-v1-signature',
|
||||
)
|
||||
|
||||
if needs_values_kw:
|
||||
# (v, **kwargs), (v, values, **kwargs), (v, *, values, **kwargs) or (v, *, values)
|
||||
val1 = cast(V1ValidatorWithValues, validator)
|
||||
|
||||
def wrapper1(value: Any, info: core_schema.ValidationInfo) -> Any:
|
||||
return val1(value, values=info.data)
|
||||
|
||||
return wrapper1
|
||||
else:
|
||||
val2 = cast(V1OnlyValueValidator, validator)
|
||||
|
||||
def wrapper2(value: Any, _: core_schema.ValidationInfo) -> Any:
|
||||
return val2(value)
|
||||
|
||||
return wrapper2
|
||||
|
||||
|
||||
RootValidatorValues = Dict[str, Any]
|
||||
# technically tuple[model_dict, model_extra, fields_set] | tuple[dataclass_dict, init_vars]
|
||||
RootValidatorFieldsTuple = Tuple[Any, ...]
|
||||
|
||||
|
||||
class V1RootValidatorFunction(Protocol):
|
||||
"""A simple root validator, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __values: RootValidatorValues) -> RootValidatorValues:
|
||||
...
|
||||
|
||||
|
||||
class V2CoreBeforeRootValidator(Protocol):
|
||||
"""V2 validator with mode='before'."""
|
||||
|
||||
def __call__(self, __values: RootValidatorValues, __info: core_schema.ValidationInfo) -> RootValidatorValues:
|
||||
...
|
||||
|
||||
|
||||
class V2CoreAfterRootValidator(Protocol):
|
||||
"""V2 validator with mode='after'."""
|
||||
|
||||
def __call__(
|
||||
self, __fields_tuple: RootValidatorFieldsTuple, __info: core_schema.ValidationInfo
|
||||
) -> RootValidatorFieldsTuple:
|
||||
...
|
||||
|
||||
|
||||
def make_v1_generic_root_validator(
|
||||
validator: V1RootValidatorFunction, pre: bool
|
||||
) -> V2CoreBeforeRootValidator | V2CoreAfterRootValidator:
|
||||
"""Wrap a V1 style root validator for V2 compatibility.
|
||||
|
||||
Args:
|
||||
validator: The V1 style field validator.
|
||||
pre: Whether the validator is a pre validator.
|
||||
|
||||
Returns:
|
||||
A wrapped V2 style validator.
|
||||
"""
|
||||
if pre is True:
|
||||
# mode='before' for pydantic-core
|
||||
def _wrapper1(values: RootValidatorValues, _: core_schema.ValidationInfo) -> RootValidatorValues:
|
||||
return validator(values)
|
||||
|
||||
return _wrapper1
|
||||
|
||||
# mode='after' for pydantic-core
|
||||
def _wrapper2(fields_tuple: RootValidatorFieldsTuple, _: core_schema.ValidationInfo) -> RootValidatorFieldsTuple:
|
||||
if len(fields_tuple) == 2:
|
||||
# dataclass, this is easy
|
||||
values, init_vars = fields_tuple
|
||||
values = validator(values)
|
||||
return values, init_vars
|
||||
else:
|
||||
# ugly hack: to match v1 behaviour, we merge values and model_extra, then split them up based on fields
|
||||
# afterwards
|
||||
model_dict, model_extra, fields_set = fields_tuple
|
||||
if model_extra:
|
||||
fields = set(model_dict.keys())
|
||||
model_dict.update(model_extra)
|
||||
model_dict_new = validator(model_dict)
|
||||
for k in list(model_dict_new.keys()):
|
||||
if k not in fields:
|
||||
model_extra[k] = model_dict_new.pop(k)
|
||||
else:
|
||||
model_dict_new = validator(model_dict)
|
||||
return model_dict_new, model_extra, fields_set
|
||||
|
||||
return _wrapper2
|
||||
@@ -0,0 +1,509 @@
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Hashable, Sequence
|
||||
|
||||
from pydantic_core import CoreSchema, core_schema
|
||||
|
||||
from ..errors import PydanticUserError
|
||||
from . import _core_utils
|
||||
from ._core_utils import (
|
||||
NEEDS_APPLY_DISCRIMINATED_UNION_METADATA_KEY,
|
||||
CoreSchemaField,
|
||||
collect_definitions,
|
||||
simplify_schema_references,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..types import Discriminator
|
||||
|
||||
CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY = 'pydantic.internal.union_discriminator'
|
||||
|
||||
|
||||
class MissingDefinitionForUnionRef(Exception):
|
||||
"""Raised when applying a discriminated union discriminator to a schema
|
||||
requires a definition that is not yet defined
|
||||
"""
|
||||
|
||||
def __init__(self, ref: str) -> None:
|
||||
self.ref = ref
|
||||
super().__init__(f'Missing definition for ref {self.ref!r}')
|
||||
|
||||
|
||||
def set_discriminator(schema: CoreSchema, discriminator: Any) -> None:
|
||||
schema.setdefault('metadata', {})
|
||||
metadata = schema.get('metadata')
|
||||
assert metadata is not None
|
||||
metadata[CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY] = discriminator
|
||||
|
||||
|
||||
def apply_discriminators(schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
definitions: dict[str, CoreSchema] | None = None
|
||||
|
||||
def inner(s: core_schema.CoreSchema, recurse: _core_utils.Recurse) -> core_schema.CoreSchema:
|
||||
nonlocal definitions
|
||||
if 'metadata' in s:
|
||||
if s['metadata'].get(NEEDS_APPLY_DISCRIMINATED_UNION_METADATA_KEY, True) is False:
|
||||
return s
|
||||
|
||||
s = recurse(s, inner)
|
||||
if s['type'] == 'tagged-union':
|
||||
return s
|
||||
|
||||
metadata = s.get('metadata', {})
|
||||
discriminator = metadata.get(CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY, None)
|
||||
if discriminator is not None:
|
||||
if definitions is None:
|
||||
definitions = collect_definitions(schema)
|
||||
s = apply_discriminator(s, discriminator, definitions)
|
||||
return s
|
||||
|
||||
return simplify_schema_references(_core_utils.walk_core_schema(schema, inner))
|
||||
|
||||
|
||||
def apply_discriminator(
|
||||
schema: core_schema.CoreSchema,
|
||||
discriminator: str | Discriminator,
|
||||
definitions: dict[str, core_schema.CoreSchema] | None = None,
|
||||
) -> core_schema.CoreSchema:
|
||||
"""Applies the discriminator and returns a new core schema.
|
||||
|
||||
Args:
|
||||
schema: The input schema.
|
||||
discriminator: The name of the field which will serve as the discriminator.
|
||||
definitions: A mapping of schema ref to schema.
|
||||
|
||||
Returns:
|
||||
The new core schema.
|
||||
|
||||
Raises:
|
||||
TypeError:
|
||||
- If `discriminator` is used with invalid union variant.
|
||||
- If `discriminator` is used with `Union` type with one variant.
|
||||
- If `discriminator` value mapped to multiple choices.
|
||||
MissingDefinitionForUnionRef:
|
||||
If the definition for ref is missing.
|
||||
PydanticUserError:
|
||||
- If a model in union doesn't have a discriminator field.
|
||||
- If discriminator field has a non-string alias.
|
||||
- If discriminator fields have different aliases.
|
||||
- If discriminator field not of type `Literal`.
|
||||
"""
|
||||
from ..types import Discriminator
|
||||
|
||||
if isinstance(discriminator, Discriminator):
|
||||
if isinstance(discriminator.discriminator, str):
|
||||
discriminator = discriminator.discriminator
|
||||
else:
|
||||
return discriminator._convert_schema(schema)
|
||||
|
||||
return _ApplyInferredDiscriminator(discriminator, definitions or {}).apply(schema)
|
||||
|
||||
|
||||
class _ApplyInferredDiscriminator:
|
||||
"""This class is used to convert an input schema containing a union schema into one where that union is
|
||||
replaced with a tagged-union, with all the associated debugging and performance benefits.
|
||||
|
||||
This is done by:
|
||||
* Validating that the input schema is compatible with the provided discriminator
|
||||
* Introspecting the schema to determine which discriminator values should map to which union choices
|
||||
* Handling various edge cases such as 'definitions', 'default', 'nullable' schemas, and more
|
||||
|
||||
I have chosen to implement the conversion algorithm in this class, rather than a function,
|
||||
to make it easier to maintain state while recursively walking the provided CoreSchema.
|
||||
"""
|
||||
|
||||
def __init__(self, discriminator: str, definitions: dict[str, core_schema.CoreSchema]):
|
||||
# `discriminator` should be the name of the field which will serve as the discriminator.
|
||||
# It must be the python name of the field, and *not* the field's alias. Note that as of now,
|
||||
# all members of a discriminated union _must_ use a field with the same name as the discriminator.
|
||||
# This may change if/when we expose a way to manually specify the TaggedUnionSchema's choices.
|
||||
self.discriminator = discriminator
|
||||
|
||||
# `definitions` should contain a mapping of schema ref to schema for all schemas which might
|
||||
# be referenced by some choice
|
||||
self.definitions = definitions
|
||||
|
||||
# `_discriminator_alias` will hold the value, if present, of the alias for the discriminator
|
||||
#
|
||||
# Note: following the v1 implementation, we currently disallow the use of different aliases
|
||||
# for different choices. This is not a limitation of pydantic_core, but if we try to handle
|
||||
# this, the inference logic gets complicated very quickly, and could result in confusing
|
||||
# debugging challenges for users making subtle mistakes.
|
||||
#
|
||||
# Rather than trying to do the most powerful inference possible, I think we should eventually
|
||||
# expose a way to more-manually control the way the TaggedUnionSchema is constructed through
|
||||
# the use of a new type which would be placed as an Annotation on the Union type. This would
|
||||
# provide the full flexibility/power of pydantic_core's TaggedUnionSchema where necessary for
|
||||
# more complex cases, without over-complicating the inference logic for the common cases.
|
||||
self._discriminator_alias: str | None = None
|
||||
|
||||
# `_should_be_nullable` indicates whether the converted union has `None` as an allowed value.
|
||||
# If `None` is an acceptable value of the (possibly-wrapped) union, we ignore it while
|
||||
# constructing the TaggedUnionSchema, but set the `_should_be_nullable` attribute to True.
|
||||
# Once we have constructed the TaggedUnionSchema, if `_should_be_nullable` is True, we ensure
|
||||
# that the final schema gets wrapped as a NullableSchema. This has the same semantics on the
|
||||
# python side, but resolves the issue that `None` cannot correspond to any discriminator values.
|
||||
self._should_be_nullable = False
|
||||
|
||||
# `_is_nullable` is used to track if the final produced schema will definitely be nullable;
|
||||
# we set it to True if the input schema is wrapped in a nullable schema that we know will be preserved
|
||||
# as an indication that, even if None is discovered as one of the union choices, we will not need to wrap
|
||||
# the final value in another nullable schema.
|
||||
#
|
||||
# This is more complicated than just checking for the final outermost schema having type 'nullable' thanks
|
||||
# to the possible presence of other wrapper schemas such as DefinitionsSchema, WithDefaultSchema, etc.
|
||||
self._is_nullable = False
|
||||
|
||||
# `_choices_to_handle` serves as a stack of choices to add to the tagged union. Initially, choices
|
||||
# from the union in the wrapped schema will be appended to this list, and the recursive choice-handling
|
||||
# algorithm may add more choices to this stack as (nested) unions are encountered.
|
||||
self._choices_to_handle: list[core_schema.CoreSchema] = []
|
||||
|
||||
# `_tagged_union_choices` is built during the call to `apply`, and will hold the choices to be included
|
||||
# in the output TaggedUnionSchema that will replace the union from the input schema
|
||||
self._tagged_union_choices: dict[Hashable, core_schema.CoreSchema] = {}
|
||||
|
||||
# `_used` is changed to True after applying the discriminator to prevent accidental re-use
|
||||
self._used = False
|
||||
|
||||
def apply(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
"""Return a new CoreSchema based on `schema` that uses a tagged-union with the discriminator provided
|
||||
to this class.
|
||||
|
||||
Args:
|
||||
schema: The input schema.
|
||||
|
||||
Returns:
|
||||
The new core schema.
|
||||
|
||||
Raises:
|
||||
TypeError:
|
||||
- If `discriminator` is used with invalid union variant.
|
||||
- If `discriminator` is used with `Union` type with one variant.
|
||||
- If `discriminator` value mapped to multiple choices.
|
||||
ValueError:
|
||||
If the definition for ref is missing.
|
||||
PydanticUserError:
|
||||
- If a model in union doesn't have a discriminator field.
|
||||
- If discriminator field has a non-string alias.
|
||||
- If discriminator fields have different aliases.
|
||||
- If discriminator field not of type `Literal`.
|
||||
"""
|
||||
self.definitions.update(collect_definitions(schema))
|
||||
assert not self._used
|
||||
schema = self._apply_to_root(schema)
|
||||
if self._should_be_nullable and not self._is_nullable:
|
||||
schema = core_schema.nullable_schema(schema)
|
||||
self._used = True
|
||||
new_defs = collect_definitions(schema)
|
||||
missing_defs = self.definitions.keys() - new_defs.keys()
|
||||
if missing_defs:
|
||||
schema = core_schema.definitions_schema(schema, [self.definitions[ref] for ref in missing_defs])
|
||||
return schema
|
||||
|
||||
def _apply_to_root(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
"""This method handles the outer-most stage of recursion over the input schema:
|
||||
unwrapping nullable or definitions schemas, and calling the `_handle_choice`
|
||||
method iteratively on the choices extracted (recursively) from the possibly-wrapped union.
|
||||
"""
|
||||
if schema['type'] == 'nullable':
|
||||
self._is_nullable = True
|
||||
wrapped = self._apply_to_root(schema['schema'])
|
||||
nullable_wrapper = schema.copy()
|
||||
nullable_wrapper['schema'] = wrapped
|
||||
return nullable_wrapper
|
||||
|
||||
if schema['type'] == 'definitions':
|
||||
wrapped = self._apply_to_root(schema['schema'])
|
||||
definitions_wrapper = schema.copy()
|
||||
definitions_wrapper['schema'] = wrapped
|
||||
return definitions_wrapper
|
||||
|
||||
if schema['type'] != 'union':
|
||||
# If the schema is not a union, it probably means it just had a single member and
|
||||
# was flattened by pydantic_core.
|
||||
# However, it still may make sense to apply the discriminator to this schema,
|
||||
# as a way to get discriminated-union-style error messages, so we allow this here.
|
||||
schema = core_schema.union_schema([schema])
|
||||
|
||||
# Reverse the choices list before extending the stack so that they get handled in the order they occur
|
||||
choices_schemas = [v[0] if isinstance(v, tuple) else v for v in schema['choices'][::-1]]
|
||||
self._choices_to_handle.extend(choices_schemas)
|
||||
while self._choices_to_handle:
|
||||
choice = self._choices_to_handle.pop()
|
||||
self._handle_choice(choice)
|
||||
|
||||
if self._discriminator_alias is not None and self._discriminator_alias != self.discriminator:
|
||||
# * We need to annotate `discriminator` as a union here to handle both branches of this conditional
|
||||
# * We need to annotate `discriminator` as list[list[str | int]] and not list[list[str]] due to the
|
||||
# invariance of list, and because list[list[str | int]] is the type of the discriminator argument
|
||||
# to tagged_union_schema below
|
||||
# * See the docstring of pydantic_core.core_schema.tagged_union_schema for more details about how to
|
||||
# interpret the value of the discriminator argument to tagged_union_schema. (The list[list[str]] here
|
||||
# is the appropriate way to provide a list of fallback attributes to check for a discriminator value.)
|
||||
discriminator: str | list[list[str | int]] = [[self.discriminator], [self._discriminator_alias]]
|
||||
else:
|
||||
discriminator = self.discriminator
|
||||
return core_schema.tagged_union_schema(
|
||||
choices=self._tagged_union_choices,
|
||||
discriminator=discriminator,
|
||||
custom_error_type=schema.get('custom_error_type'),
|
||||
custom_error_message=schema.get('custom_error_message'),
|
||||
custom_error_context=schema.get('custom_error_context'),
|
||||
strict=False,
|
||||
from_attributes=True,
|
||||
ref=schema.get('ref'),
|
||||
metadata=schema.get('metadata'),
|
||||
serialization=schema.get('serialization'),
|
||||
)
|
||||
|
||||
def _handle_choice(self, choice: core_schema.CoreSchema) -> None:
|
||||
"""This method handles the "middle" stage of recursion over the input schema.
|
||||
Specifically, it is responsible for handling each choice of the outermost union
|
||||
(and any "coalesced" choices obtained from inner unions).
|
||||
|
||||
Here, "handling" entails:
|
||||
* Coalescing nested unions and compatible tagged-unions
|
||||
* Tracking the presence of 'none' and 'nullable' schemas occurring as choices
|
||||
* Validating that each allowed discriminator value maps to a unique choice
|
||||
* Updating the _tagged_union_choices mapping that will ultimately be used to build the TaggedUnionSchema.
|
||||
"""
|
||||
if choice['type'] == 'none':
|
||||
self._should_be_nullable = True
|
||||
elif choice['type'] == 'definitions':
|
||||
self._handle_choice(choice['schema'])
|
||||
elif choice['type'] == 'nullable':
|
||||
self._should_be_nullable = True
|
||||
self._handle_choice(choice['schema']) # unwrap the nullable schema
|
||||
elif choice['type'] == 'union':
|
||||
# Reverse the choices list before extending the stack so that they get handled in the order they occur
|
||||
choices_schemas = [v[0] if isinstance(v, tuple) else v for v in choice['choices'][::-1]]
|
||||
self._choices_to_handle.extend(choices_schemas)
|
||||
elif choice['type'] == 'definition-ref':
|
||||
if choice['schema_ref'] not in self.definitions:
|
||||
raise MissingDefinitionForUnionRef(choice['schema_ref'])
|
||||
self._handle_choice(self.definitions[choice['schema_ref']])
|
||||
elif choice['type'] not in {
|
||||
'model',
|
||||
'typed-dict',
|
||||
'tagged-union',
|
||||
'lax-or-strict',
|
||||
'dataclass',
|
||||
'dataclass-args',
|
||||
} and not _core_utils.is_function_with_inner_schema(choice):
|
||||
# We should eventually handle 'definition-ref' as well
|
||||
raise TypeError(
|
||||
f'{choice["type"]!r} is not a valid discriminated union variant;'
|
||||
' should be a `BaseModel` or `dataclass`'
|
||||
)
|
||||
else:
|
||||
if choice['type'] == 'tagged-union' and self._is_discriminator_shared(choice):
|
||||
# In this case, this inner tagged-union is compatible with the outer tagged-union,
|
||||
# and its choices can be coalesced into the outer TaggedUnionSchema.
|
||||
subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))]
|
||||
# Reverse the choices list before extending the stack so that they get handled in the order they occur
|
||||
self._choices_to_handle.extend(subchoices[::-1])
|
||||
return
|
||||
|
||||
inferred_discriminator_values = self._infer_discriminator_values_for_choice(choice, source_name=None)
|
||||
self._set_unique_choice_for_values(choice, inferred_discriminator_values)
|
||||
|
||||
def _is_discriminator_shared(self, choice: core_schema.TaggedUnionSchema) -> bool:
|
||||
"""This method returns a boolean indicating whether the discriminator for the `choice`
|
||||
is the same as that being used for the outermost tagged union. This is used to
|
||||
determine whether this TaggedUnionSchema choice should be "coalesced" into the top level,
|
||||
or whether it should be treated as a separate (nested) choice.
|
||||
"""
|
||||
inner_discriminator = choice['discriminator']
|
||||
return inner_discriminator == self.discriminator or (
|
||||
isinstance(inner_discriminator, list)
|
||||
and (self.discriminator in inner_discriminator or [self.discriminator] in inner_discriminator)
|
||||
)
|
||||
|
||||
def _infer_discriminator_values_for_choice( # noqa C901
|
||||
self, choice: core_schema.CoreSchema, source_name: str | None
|
||||
) -> list[str | int]:
|
||||
"""This function recurses over `choice`, extracting all discriminator values that should map to this choice.
|
||||
|
||||
`model_name` is accepted for the purpose of producing useful error messages.
|
||||
"""
|
||||
if choice['type'] == 'definitions':
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name)
|
||||
elif choice['type'] == 'function-plain':
|
||||
raise TypeError(
|
||||
f'{choice["type"]!r} is not a valid discriminated union variant;'
|
||||
' should be a `BaseModel` or `dataclass`'
|
||||
)
|
||||
elif _core_utils.is_function_with_inner_schema(choice):
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name)
|
||||
elif choice['type'] == 'lax-or-strict':
|
||||
return sorted(
|
||||
set(
|
||||
self._infer_discriminator_values_for_choice(choice['lax_schema'], source_name=None)
|
||||
+ self._infer_discriminator_values_for_choice(choice['strict_schema'], source_name=None)
|
||||
)
|
||||
)
|
||||
|
||||
elif choice['type'] == 'tagged-union':
|
||||
values: list[str | int] = []
|
||||
# Ignore str/int "choices" since these are just references to other choices
|
||||
subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))]
|
||||
for subchoice in subchoices:
|
||||
subchoice_values = self._infer_discriminator_values_for_choice(subchoice, source_name=None)
|
||||
values.extend(subchoice_values)
|
||||
return values
|
||||
|
||||
elif choice['type'] == 'union':
|
||||
values = []
|
||||
for subchoice in choice['choices']:
|
||||
subchoice_schema = subchoice[0] if isinstance(subchoice, tuple) else subchoice
|
||||
subchoice_values = self._infer_discriminator_values_for_choice(subchoice_schema, source_name=None)
|
||||
values.extend(subchoice_values)
|
||||
return values
|
||||
|
||||
elif choice['type'] == 'nullable':
|
||||
self._should_be_nullable = True
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=None)
|
||||
|
||||
elif choice['type'] == 'model':
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__)
|
||||
|
||||
elif choice['type'] == 'dataclass':
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__)
|
||||
|
||||
elif choice['type'] == 'model-fields':
|
||||
return self._infer_discriminator_values_for_model_choice(choice, source_name=source_name)
|
||||
|
||||
elif choice['type'] == 'dataclass-args':
|
||||
return self._infer_discriminator_values_for_dataclass_choice(choice, source_name=source_name)
|
||||
|
||||
elif choice['type'] == 'typed-dict':
|
||||
return self._infer_discriminator_values_for_typed_dict_choice(choice, source_name=source_name)
|
||||
|
||||
elif choice['type'] == 'definition-ref':
|
||||
schema_ref = choice['schema_ref']
|
||||
if schema_ref not in self.definitions:
|
||||
raise MissingDefinitionForUnionRef(schema_ref)
|
||||
return self._infer_discriminator_values_for_choice(self.definitions[schema_ref], source_name=source_name)
|
||||
else:
|
||||
raise TypeError(
|
||||
f'{choice["type"]!r} is not a valid discriminated union variant;'
|
||||
' should be a `BaseModel` or `dataclass`'
|
||||
)
|
||||
|
||||
def _infer_discriminator_values_for_typed_dict_choice(
|
||||
self, choice: core_schema.TypedDictSchema, source_name: str | None = None
|
||||
) -> list[str | int]:
|
||||
"""This method just extracts the _infer_discriminator_values_for_choice logic specific to TypedDictSchema
|
||||
for the sake of readability.
|
||||
"""
|
||||
source = 'TypedDict' if source_name is None else f'TypedDict {source_name!r}'
|
||||
field = choice['fields'].get(self.discriminator)
|
||||
if field is None:
|
||||
raise PydanticUserError(
|
||||
f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
|
||||
)
|
||||
return self._infer_discriminator_values_for_field(field, source)
|
||||
|
||||
def _infer_discriminator_values_for_model_choice(
|
||||
self, choice: core_schema.ModelFieldsSchema, source_name: str | None = None
|
||||
) -> list[str | int]:
|
||||
source = 'ModelFields' if source_name is None else f'Model {source_name!r}'
|
||||
field = choice['fields'].get(self.discriminator)
|
||||
if field is None:
|
||||
raise PydanticUserError(
|
||||
f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
|
||||
)
|
||||
return self._infer_discriminator_values_for_field(field, source)
|
||||
|
||||
def _infer_discriminator_values_for_dataclass_choice(
|
||||
self, choice: core_schema.DataclassArgsSchema, source_name: str | None = None
|
||||
) -> list[str | int]:
|
||||
source = 'DataclassArgs' if source_name is None else f'Dataclass {source_name!r}'
|
||||
for field in choice['fields']:
|
||||
if field['name'] == self.discriminator:
|
||||
break
|
||||
else:
|
||||
raise PydanticUserError(
|
||||
f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
|
||||
)
|
||||
return self._infer_discriminator_values_for_field(field, source)
|
||||
|
||||
def _infer_discriminator_values_for_field(self, field: CoreSchemaField, source: str) -> list[str | int]:
|
||||
if field['type'] == 'computed-field':
|
||||
# This should never occur as a discriminator, as it is only relevant to serialization
|
||||
return []
|
||||
alias = field.get('validation_alias', self.discriminator)
|
||||
if not isinstance(alias, str):
|
||||
raise PydanticUserError(
|
||||
f'Alias {alias!r} is not supported in a discriminated union', code='discriminator-alias-type'
|
||||
)
|
||||
if self._discriminator_alias is None:
|
||||
self._discriminator_alias = alias
|
||||
elif self._discriminator_alias != alias:
|
||||
raise PydanticUserError(
|
||||
f'Aliases for discriminator {self.discriminator!r} must be the same '
|
||||
f'(got {alias}, {self._discriminator_alias})',
|
||||
code='discriminator-alias',
|
||||
)
|
||||
return self._infer_discriminator_values_for_inner_schema(field['schema'], source)
|
||||
|
||||
def _infer_discriminator_values_for_inner_schema(
|
||||
self, schema: core_schema.CoreSchema, source: str
|
||||
) -> list[str | int]:
|
||||
"""When inferring discriminator values for a field, we typically extract the expected values from a literal
|
||||
schema. This function does that, but also handles nested unions and defaults.
|
||||
"""
|
||||
if schema['type'] == 'literal':
|
||||
return schema['expected']
|
||||
|
||||
elif schema['type'] == 'union':
|
||||
# Generally when multiple values are allowed they should be placed in a single `Literal`, but
|
||||
# we add this case to handle the situation where a field is annotated as a `Union` of `Literal`s.
|
||||
# For example, this lets us handle `Union[Literal['key'], Union[Literal['Key'], Literal['KEY']]]`
|
||||
values: list[Any] = []
|
||||
for choice in schema['choices']:
|
||||
choice_schema = choice[0] if isinstance(choice, tuple) else choice
|
||||
choice_values = self._infer_discriminator_values_for_inner_schema(choice_schema, source)
|
||||
values.extend(choice_values)
|
||||
return values
|
||||
|
||||
elif schema['type'] == 'default':
|
||||
# This will happen if the field has a default value; we ignore it while extracting the discriminator values
|
||||
return self._infer_discriminator_values_for_inner_schema(schema['schema'], source)
|
||||
|
||||
elif schema['type'] == 'function-after':
|
||||
# After validators don't affect the discriminator values
|
||||
return self._infer_discriminator_values_for_inner_schema(schema['schema'], source)
|
||||
|
||||
elif schema['type'] in {'function-before', 'function-wrap', 'function-plain'}:
|
||||
validator_type = repr(schema['type'].split('-')[1])
|
||||
raise PydanticUserError(
|
||||
f'Cannot use a mode={validator_type} validator in the'
|
||||
f' discriminator field {self.discriminator!r} of {source}',
|
||||
code='discriminator-validator',
|
||||
)
|
||||
|
||||
else:
|
||||
raise PydanticUserError(
|
||||
f'{source} needs field {self.discriminator!r} to be of type `Literal`',
|
||||
code='discriminator-needs-literal',
|
||||
)
|
||||
|
||||
def _set_unique_choice_for_values(self, choice: core_schema.CoreSchema, values: Sequence[str | int]) -> None:
|
||||
"""This method updates `self.tagged_union_choices` so that all provided (discriminator) `values` map to the
|
||||
provided `choice`, validating that none of these values already map to another (different) choice.
|
||||
"""
|
||||
for discriminator_value in values:
|
||||
if discriminator_value in self._tagged_union_choices:
|
||||
# It is okay if `value` is already in tagged_union_choices as long as it maps to the same value.
|
||||
# Because tagged_union_choices may map values to other values, we need to walk the choices dict
|
||||
# until we get to a "real" choice, and confirm that is equal to the one assigned.
|
||||
existing_choice = self._tagged_union_choices[discriminator_value]
|
||||
if existing_choice != choice:
|
||||
raise TypeError(
|
||||
f'Value {discriminator_value!r} for discriminator '
|
||||
f'{self.discriminator!r} mapped to multiple choices'
|
||||
)
|
||||
else:
|
||||
self._tagged_union_choices[discriminator_value] = choice
|
||||
@@ -0,0 +1,306 @@
|
||||
"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import dataclasses
|
||||
import sys
|
||||
import warnings
|
||||
from copy import copy
|
||||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
from . import _typing_extra
|
||||
from ._config import ConfigWrapper
|
||||
from ._repr import Representation
|
||||
from ._typing_extra import get_cls_type_hints_lenient, get_type_hints, is_classvar, is_finalvar
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from annotated_types import BaseMetadata
|
||||
|
||||
from ..fields import FieldInfo
|
||||
from ..main import BaseModel
|
||||
from ._dataclasses import StandardDataclass
|
||||
from ._decorators import DecoratorInfos
|
||||
|
||||
|
||||
def get_type_hints_infer_globalns(
|
||||
obj: Any,
|
||||
localns: dict[str, Any] | None = None,
|
||||
include_extras: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""Gets type hints for an object by inferring the global namespace.
|
||||
|
||||
It uses the `typing.get_type_hints`, The only thing that we do here is fetching
|
||||
global namespace from `obj.__module__` if it is not `None`.
|
||||
|
||||
Args:
|
||||
obj: The object to get its type hints.
|
||||
localns: The local namespaces.
|
||||
include_extras: Whether to recursively include annotation metadata.
|
||||
|
||||
Returns:
|
||||
The object type hints.
|
||||
"""
|
||||
module_name = getattr(obj, '__module__', None)
|
||||
globalns: dict[str, Any] | None = None
|
||||
if module_name:
|
||||
try:
|
||||
globalns = sys.modules[module_name].__dict__
|
||||
except KeyError:
|
||||
# happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
|
||||
pass
|
||||
return get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras)
|
||||
|
||||
|
||||
class PydanticMetadata(Representation):
|
||||
"""Base class for annotation markers like `Strict`."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
def pydantic_general_metadata(**metadata: Any) -> BaseMetadata:
|
||||
"""Create a new `_PydanticGeneralMetadata` class with the given metadata.
|
||||
|
||||
Args:
|
||||
**metadata: The metadata to add.
|
||||
|
||||
Returns:
|
||||
The new `_PydanticGeneralMetadata` class.
|
||||
"""
|
||||
return _general_metadata_cls()(metadata) # type: ignore
|
||||
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def _general_metadata_cls() -> type[BaseMetadata]:
|
||||
"""Do it this way to avoid importing `annotated_types` at import time."""
|
||||
from annotated_types import BaseMetadata
|
||||
|
||||
class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata):
|
||||
"""Pydantic general metadata like `max_digits`."""
|
||||
|
||||
def __init__(self, metadata: Any):
|
||||
self.__dict__ = metadata
|
||||
|
||||
return _PydanticGeneralMetadata # type: ignore
|
||||
|
||||
|
||||
def collect_model_fields( # noqa: C901
|
||||
cls: type[BaseModel],
|
||||
bases: tuple[type[Any], ...],
|
||||
config_wrapper: ConfigWrapper,
|
||||
types_namespace: dict[str, Any] | None,
|
||||
*,
|
||||
typevars_map: dict[Any, Any] | None = None,
|
||||
) -> tuple[dict[str, FieldInfo], set[str]]:
|
||||
"""Collect the fields of a nascent pydantic model.
|
||||
|
||||
Also collect the names of any ClassVars present in the type hints.
|
||||
|
||||
The returned value is a tuple of two items: the fields dict, and the set of ClassVar names.
|
||||
|
||||
Args:
|
||||
cls: BaseModel or dataclass.
|
||||
bases: Parents of the class, generally `cls.__bases__`.
|
||||
config_wrapper: The config wrapper instance.
|
||||
types_namespace: Optional extra namespace to look for types in.
|
||||
typevars_map: A dictionary mapping type variables to their concrete types.
|
||||
|
||||
Returns:
|
||||
A tuple contains fields and class variables.
|
||||
|
||||
Raises:
|
||||
NameError:
|
||||
- If there is a conflict between a field name and protected namespaces.
|
||||
- If there is a field other than `root` in `RootModel`.
|
||||
- If a field shadows an attribute in the parent model.
|
||||
"""
|
||||
from ..fields import FieldInfo
|
||||
|
||||
type_hints = get_cls_type_hints_lenient(cls, types_namespace)
|
||||
|
||||
# https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
|
||||
# annotations is only used for finding fields in parent classes
|
||||
annotations = cls.__dict__.get('__annotations__', {})
|
||||
fields: dict[str, FieldInfo] = {}
|
||||
|
||||
class_vars: set[str] = set()
|
||||
for ann_name, ann_type in type_hints.items():
|
||||
if ann_name == 'model_config':
|
||||
# We never want to treat `model_config` as a field
|
||||
# Note: we may need to change this logic if/when we introduce a `BareModel` class with no
|
||||
# protected namespaces (where `model_config` might be allowed as a field name)
|
||||
continue
|
||||
for protected_namespace in config_wrapper.protected_namespaces:
|
||||
if ann_name.startswith(protected_namespace):
|
||||
for b in bases:
|
||||
if hasattr(b, ann_name):
|
||||
from ..main import BaseModel
|
||||
|
||||
if not (issubclass(b, BaseModel) and ann_name in b.model_fields):
|
||||
raise NameError(
|
||||
f'Field "{ann_name}" conflicts with member {getattr(b, ann_name)}'
|
||||
f' of protected namespace "{protected_namespace}".'
|
||||
)
|
||||
else:
|
||||
valid_namespaces = tuple(
|
||||
x for x in config_wrapper.protected_namespaces if not ann_name.startswith(x)
|
||||
)
|
||||
warnings.warn(
|
||||
f'Field "{ann_name}" has conflict with protected namespace "{protected_namespace}".'
|
||||
'\n\nYou may be able to resolve this warning by setting'
|
||||
f" `model_config['protected_namespaces'] = {valid_namespaces}`.",
|
||||
UserWarning,
|
||||
)
|
||||
if is_classvar(ann_type):
|
||||
class_vars.add(ann_name)
|
||||
continue
|
||||
if _is_finalvar_with_default_val(ann_type, getattr(cls, ann_name, PydanticUndefined)):
|
||||
class_vars.add(ann_name)
|
||||
continue
|
||||
if not is_valid_field_name(ann_name):
|
||||
continue
|
||||
if cls.__pydantic_root_model__ and ann_name != 'root':
|
||||
raise NameError(
|
||||
f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`"
|
||||
)
|
||||
|
||||
# when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get
|
||||
# "... shadows an attribute" errors
|
||||
generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin')
|
||||
for base in bases:
|
||||
dataclass_fields = {
|
||||
field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ())
|
||||
}
|
||||
if hasattr(base, ann_name):
|
||||
if base is generic_origin:
|
||||
# Don't error when "shadowing" of attributes in parametrized generics
|
||||
continue
|
||||
|
||||
if ann_name in dataclass_fields:
|
||||
# Don't error when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set
|
||||
# on the class instance.
|
||||
continue
|
||||
warnings.warn(
|
||||
f'Field name "{ann_name}" shadows an attribute in parent "{base.__qualname__}"; ',
|
||||
UserWarning,
|
||||
)
|
||||
|
||||
try:
|
||||
default = getattr(cls, ann_name, PydanticUndefined)
|
||||
if default is PydanticUndefined:
|
||||
raise AttributeError
|
||||
except AttributeError:
|
||||
if ann_name in annotations:
|
||||
field_info = FieldInfo.from_annotation(ann_type)
|
||||
else:
|
||||
# if field has no default value and is not in __annotations__ this means that it is
|
||||
# defined in a base class and we can take it from there
|
||||
model_fields_lookup: dict[str, FieldInfo] = {}
|
||||
for x in cls.__bases__[::-1]:
|
||||
model_fields_lookup.update(getattr(x, 'model_fields', {}))
|
||||
if ann_name in model_fields_lookup:
|
||||
# The field was present on one of the (possibly multiple) base classes
|
||||
# copy the field to make sure typevar substitutions don't cause issues with the base classes
|
||||
field_info = copy(model_fields_lookup[ann_name])
|
||||
else:
|
||||
# The field was not found on any base classes; this seems to be caused by fields not getting
|
||||
# generated thanks to models not being fully defined while initializing recursive models.
|
||||
# Nothing stops us from just creating a new FieldInfo for this type hint, so we do this.
|
||||
field_info = FieldInfo.from_annotation(ann_type)
|
||||
else:
|
||||
field_info = FieldInfo.from_annotated_attribute(ann_type, default)
|
||||
# attributes which are fields are removed from the class namespace:
|
||||
# 1. To match the behaviour of annotation-only fields
|
||||
# 2. To avoid false positives in the NameError check above
|
||||
try:
|
||||
delattr(cls, ann_name)
|
||||
except AttributeError:
|
||||
pass # indicates the attribute was on a parent class
|
||||
|
||||
# Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__
|
||||
# to make sure the decorators have already been built for this exact class
|
||||
decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__']
|
||||
if ann_name in decorators.computed_fields:
|
||||
raise ValueError("you can't override a field with a computed field")
|
||||
fields[ann_name] = field_info
|
||||
|
||||
if typevars_map:
|
||||
for field in fields.values():
|
||||
field.apply_typevars_map(typevars_map, types_namespace)
|
||||
|
||||
return fields, class_vars
|
||||
|
||||
|
||||
def _is_finalvar_with_default_val(type_: type[Any], val: Any) -> bool:
|
||||
from ..fields import FieldInfo
|
||||
|
||||
if not is_finalvar(type_):
|
||||
return False
|
||||
elif val is PydanticUndefined:
|
||||
return False
|
||||
elif isinstance(val, FieldInfo) and (val.default is PydanticUndefined and val.default_factory is None):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def collect_dataclass_fields(
|
||||
cls: type[StandardDataclass], types_namespace: dict[str, Any] | None, *, typevars_map: dict[Any, Any] | None = None
|
||||
) -> dict[str, FieldInfo]:
|
||||
"""Collect the fields of a dataclass.
|
||||
|
||||
Args:
|
||||
cls: dataclass.
|
||||
types_namespace: Optional extra namespace to look for types in.
|
||||
typevars_map: A dictionary mapping type variables to their concrete types.
|
||||
|
||||
Returns:
|
||||
The dataclass fields.
|
||||
"""
|
||||
from ..fields import FieldInfo
|
||||
|
||||
fields: dict[str, FieldInfo] = {}
|
||||
dataclass_fields: dict[str, dataclasses.Field] = cls.__dataclass_fields__
|
||||
cls_localns = dict(vars(cls)) # this matches get_cls_type_hints_lenient, but all tests pass with `= None` instead
|
||||
|
||||
for ann_name, dataclass_field in dataclass_fields.items():
|
||||
ann_type = _typing_extra.eval_type_lenient(dataclass_field.type, types_namespace, cls_localns)
|
||||
if is_classvar(ann_type):
|
||||
continue
|
||||
|
||||
if (
|
||||
not dataclass_field.init
|
||||
and dataclass_field.default == dataclasses.MISSING
|
||||
and dataclass_field.default_factory == dataclasses.MISSING
|
||||
):
|
||||
# TODO: We should probably do something with this so that validate_assignment behaves properly
|
||||
# Issue: https://github.com/pydantic/pydantic/issues/5470
|
||||
continue
|
||||
|
||||
if isinstance(dataclass_field.default, FieldInfo):
|
||||
if dataclass_field.default.init_var:
|
||||
# TODO: same note as above
|
||||
continue
|
||||
field_info = FieldInfo.from_annotated_attribute(ann_type, dataclass_field.default)
|
||||
else:
|
||||
field_info = FieldInfo.from_annotated_attribute(ann_type, dataclass_field)
|
||||
fields[ann_name] = field_info
|
||||
|
||||
if field_info.default is not PydanticUndefined and isinstance(getattr(cls, ann_name, field_info), FieldInfo):
|
||||
# We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo
|
||||
setattr(cls, ann_name, field_info.default)
|
||||
|
||||
if typevars_map:
|
||||
for field in fields.values():
|
||||
field.apply_typevars_map(typevars_map, types_namespace)
|
||||
|
||||
return fields
|
||||
|
||||
|
||||
def is_valid_field_name(name: str) -> bool:
|
||||
return not name.startswith('_')
|
||||
|
||||
|
||||
def is_valid_privateattr_name(name: str) -> bool:
|
||||
return name.startswith('_') and not name.startswith('__')
|
||||
@@ -0,0 +1,23 @@
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
|
||||
@dataclass
|
||||
class PydanticRecursiveRef:
|
||||
type_ref: str
|
||||
|
||||
__name__ = 'PydanticRecursiveRef'
|
||||
__hash__ = object.__hash__
|
||||
|
||||
def __call__(self) -> None:
|
||||
"""Defining __call__ is necessary for the `typing` module to let you use an instance of
|
||||
this class as the result of resolving a standard ForwardRef.
|
||||
"""
|
||||
|
||||
def __or__(self, other):
|
||||
return Union[self, other] # type: ignore
|
||||
|
||||
def __ror__(self, other):
|
||||
return Union[other, self] # type: ignore
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,521 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import types
|
||||
import typing
|
||||
from collections import ChainMap
|
||||
from contextlib import contextmanager
|
||||
from contextvars import ContextVar
|
||||
from types import prepare_class
|
||||
from typing import TYPE_CHECKING, Any, Iterator, List, Mapping, MutableMapping, Tuple, TypeVar
|
||||
from weakref import WeakValueDictionary
|
||||
|
||||
import typing_extensions
|
||||
|
||||
from ._core_utils import get_type_ref
|
||||
from ._forward_ref import PydanticRecursiveRef
|
||||
from ._typing_extra import TypeVarType, typing_base
|
||||
from ._utils import all_identical, is_model_class
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import _UnionGenericAlias # type: ignore[attr-defined]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..main import BaseModel
|
||||
|
||||
GenericTypesCacheKey = Tuple[Any, Any, Tuple[Any, ...]]
|
||||
|
||||
# Note: We want to remove LimitedDict, but to do this, we'd need to improve the handling of generics caching.
|
||||
# Right now, to handle recursive generics, we some types must remain cached for brief periods without references.
|
||||
# By chaining the WeakValuesDict with a LimitedDict, we have a way to retain caching for all types with references,
|
||||
# while also retaining a limited number of types even without references. This is generally enough to build
|
||||
# specific recursive generic models without losing required items out of the cache.
|
||||
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
_LIMITED_DICT_SIZE = 100
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class LimitedDict(dict, MutableMapping[KT, VT]):
|
||||
def __init__(self, size_limit: int = _LIMITED_DICT_SIZE):
|
||||
...
|
||||
|
||||
else:
|
||||
|
||||
class LimitedDict(dict):
|
||||
"""Limit the size/length of a dict used for caching to avoid unlimited increase in memory usage.
|
||||
|
||||
Since the dict is ordered, and we always remove elements from the beginning, this is effectively a FIFO cache.
|
||||
"""
|
||||
|
||||
def __init__(self, size_limit: int = _LIMITED_DICT_SIZE):
|
||||
self.size_limit = size_limit
|
||||
super().__init__()
|
||||
|
||||
def __setitem__(self, __key: Any, __value: Any) -> None:
|
||||
super().__setitem__(__key, __value)
|
||||
if len(self) > self.size_limit:
|
||||
excess = len(self) - self.size_limit + self.size_limit // 10
|
||||
to_remove = list(self.keys())[:excess]
|
||||
for key in to_remove:
|
||||
del self[key]
|
||||
|
||||
def __class_getitem__(cls, *args: Any) -> Any:
|
||||
# to avoid errors with 3.7
|
||||
return cls
|
||||
|
||||
|
||||
# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected
|
||||
# once they are no longer referenced by the caller.
|
||||
if sys.version_info >= (3, 9): # Typing for weak dictionaries available at 3.9
|
||||
GenericTypesCache = WeakValueDictionary[GenericTypesCacheKey, 'type[BaseModel]']
|
||||
else:
|
||||
GenericTypesCache = WeakValueDictionary
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class DeepChainMap(ChainMap[KT, VT]): # type: ignore
|
||||
...
|
||||
|
||||
else:
|
||||
|
||||
class DeepChainMap(ChainMap):
|
||||
"""Variant of ChainMap that allows direct updates to inner scopes.
|
||||
|
||||
Taken from https://docs.python.org/3/library/collections.html#collections.ChainMap,
|
||||
with some light modifications for this use case.
|
||||
"""
|
||||
|
||||
def clear(self) -> None:
|
||||
for mapping in self.maps:
|
||||
mapping.clear()
|
||||
|
||||
def __setitem__(self, key: KT, value: VT) -> None:
|
||||
for mapping in self.maps:
|
||||
mapping[key] = value
|
||||
|
||||
def __delitem__(self, key: KT) -> None:
|
||||
hit = False
|
||||
for mapping in self.maps:
|
||||
if key in mapping:
|
||||
del mapping[key]
|
||||
hit = True
|
||||
if not hit:
|
||||
raise KeyError(key)
|
||||
|
||||
|
||||
# Despite the fact that LimitedDict _seems_ no longer necessary, I'm very nervous to actually remove it
|
||||
# and discover later on that we need to re-add all this infrastructure...
|
||||
# _GENERIC_TYPES_CACHE = DeepChainMap(GenericTypesCache(), LimitedDict())
|
||||
|
||||
_GENERIC_TYPES_CACHE = GenericTypesCache()
|
||||
|
||||
|
||||
class PydanticGenericMetadata(typing_extensions.TypedDict):
|
||||
origin: type[BaseModel] | None # analogous to typing._GenericAlias.__origin__
|
||||
args: tuple[Any, ...] # analogous to typing._GenericAlias.__args__
|
||||
parameters: tuple[type[Any], ...] # analogous to typing.Generic.__parameters__
|
||||
|
||||
|
||||
def create_generic_submodel(
|
||||
model_name: str, origin: type[BaseModel], args: tuple[Any, ...], params: tuple[Any, ...]
|
||||
) -> type[BaseModel]:
|
||||
"""Dynamically create a submodel of a provided (generic) BaseModel.
|
||||
|
||||
This is used when producing concrete parametrizations of generic models. This function
|
||||
only *creates* the new subclass; the schema/validators/serialization must be updated to
|
||||
reflect a concrete parametrization elsewhere.
|
||||
|
||||
Args:
|
||||
model_name: The name of the newly created model.
|
||||
origin: The base class for the new model to inherit from.
|
||||
args: A tuple of generic metadata arguments.
|
||||
params: A tuple of generic metadata parameters.
|
||||
|
||||
Returns:
|
||||
The created submodel.
|
||||
"""
|
||||
namespace: dict[str, Any] = {'__module__': origin.__module__}
|
||||
bases = (origin,)
|
||||
meta, ns, kwds = prepare_class(model_name, bases)
|
||||
namespace.update(ns)
|
||||
created_model = meta(
|
||||
model_name,
|
||||
bases,
|
||||
namespace,
|
||||
__pydantic_generic_metadata__={
|
||||
'origin': origin,
|
||||
'args': args,
|
||||
'parameters': params,
|
||||
},
|
||||
__pydantic_reset_parent_namespace__=False,
|
||||
**kwds,
|
||||
)
|
||||
|
||||
model_module, called_globally = _get_caller_frame_info(depth=3)
|
||||
if called_globally: # create global reference and therefore allow pickling
|
||||
object_by_reference = None
|
||||
reference_name = model_name
|
||||
reference_module_globals = sys.modules[created_model.__module__].__dict__
|
||||
while object_by_reference is not created_model:
|
||||
object_by_reference = reference_module_globals.setdefault(reference_name, created_model)
|
||||
reference_name += '_'
|
||||
|
||||
return created_model
|
||||
|
||||
|
||||
def _get_caller_frame_info(depth: int = 2) -> tuple[str | None, bool]:
|
||||
"""Used inside a function to check whether it was called globally.
|
||||
|
||||
Args:
|
||||
depth: The depth to get the frame.
|
||||
|
||||
Returns:
|
||||
A tuple contains `module_nam` and `called_globally`.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the function is not called inside a function.
|
||||
"""
|
||||
try:
|
||||
previous_caller_frame = sys._getframe(depth)
|
||||
except ValueError as e:
|
||||
raise RuntimeError('This function must be used inside another function') from e
|
||||
except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it
|
||||
return None, False
|
||||
frame_globals = previous_caller_frame.f_globals
|
||||
return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals
|
||||
|
||||
|
||||
DictValues: type[Any] = {}.values().__class__
|
||||
|
||||
|
||||
def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]:
|
||||
"""Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.
|
||||
|
||||
This is inspired as an alternative to directly accessing the `__parameters__` attribute of a GenericAlias,
|
||||
since __parameters__ of (nested) generic BaseModel subclasses won't show up in that list.
|
||||
"""
|
||||
if isinstance(v, TypeVar):
|
||||
yield v
|
||||
elif is_model_class(v):
|
||||
yield from v.__pydantic_generic_metadata__['parameters']
|
||||
elif isinstance(v, (DictValues, list)):
|
||||
for var in v:
|
||||
yield from iter_contained_typevars(var)
|
||||
else:
|
||||
args = get_args(v)
|
||||
for arg in args:
|
||||
yield from iter_contained_typevars(arg)
|
||||
|
||||
|
||||
def get_args(v: Any) -> Any:
|
||||
pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None)
|
||||
if pydantic_generic_metadata:
|
||||
return pydantic_generic_metadata.get('args')
|
||||
return typing_extensions.get_args(v)
|
||||
|
||||
|
||||
def get_origin(v: Any) -> Any:
|
||||
pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None)
|
||||
if pydantic_generic_metadata:
|
||||
return pydantic_generic_metadata.get('origin')
|
||||
return typing_extensions.get_origin(v)
|
||||
|
||||
|
||||
def get_standard_typevars_map(cls: type[Any]) -> dict[TypeVarType, Any] | None:
|
||||
"""Package a generic type's typevars and parametrization (if present) into a dictionary compatible with the
|
||||
`replace_types` function. Specifically, this works with standard typing generics and typing._GenericAlias.
|
||||
"""
|
||||
origin = get_origin(cls)
|
||||
if origin is None:
|
||||
return None
|
||||
if not hasattr(origin, '__parameters__'):
|
||||
return None
|
||||
|
||||
# In this case, we know that cls is a _GenericAlias, and origin is the generic type
|
||||
# So it is safe to access cls.__args__ and origin.__parameters__
|
||||
args: tuple[Any, ...] = cls.__args__ # type: ignore
|
||||
parameters: tuple[TypeVarType, ...] = origin.__parameters__
|
||||
return dict(zip(parameters, args))
|
||||
|
||||
|
||||
def get_model_typevars_map(cls: type[BaseModel]) -> dict[TypeVarType, Any] | None:
|
||||
"""Package a generic BaseModel's typevars and concrete parametrization (if present) into a dictionary compatible
|
||||
with the `replace_types` function.
|
||||
|
||||
Since BaseModel.__class_getitem__ does not produce a typing._GenericAlias, and the BaseModel generic info is
|
||||
stored in the __pydantic_generic_metadata__ attribute, we need special handling here.
|
||||
"""
|
||||
# TODO: This could be unified with `get_standard_typevars_map` if we stored the generic metadata
|
||||
# in the __origin__, __args__, and __parameters__ attributes of the model.
|
||||
generic_metadata = cls.__pydantic_generic_metadata__
|
||||
origin = generic_metadata['origin']
|
||||
args = generic_metadata['args']
|
||||
return dict(zip(iter_contained_typevars(origin), args))
|
||||
|
||||
|
||||
def replace_types(type_: Any, type_map: Mapping[Any, Any] | None) -> Any:
|
||||
"""Return type with all occurrences of `type_map` keys recursively replaced with their values.
|
||||
|
||||
Args:
|
||||
type_: The class or generic alias.
|
||||
type_map: Mapping from `TypeVar` instance to concrete types.
|
||||
|
||||
Returns:
|
||||
A new type representing the basic structure of `type_` with all
|
||||
`typevar_map` keys recursively replaced.
|
||||
|
||||
Example:
|
||||
```py
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
from pydantic._internal._generics import replace_types
|
||||
|
||||
replace_types(Tuple[str, Union[List[str], float]], {str: int})
|
||||
#> Tuple[int, Union[List[int], float]]
|
||||
```
|
||||
"""
|
||||
if not type_map:
|
||||
return type_
|
||||
|
||||
type_args = get_args(type_)
|
||||
origin_type = get_origin(type_)
|
||||
|
||||
if origin_type is typing_extensions.Annotated:
|
||||
annotated_type, *annotations = type_args
|
||||
annotated = replace_types(annotated_type, type_map)
|
||||
for annotation in annotations:
|
||||
annotated = typing_extensions.Annotated[annotated, annotation]
|
||||
return annotated
|
||||
|
||||
# Having type args is a good indicator that this is a typing module
|
||||
# class instantiation or a generic alias of some sort.
|
||||
if type_args:
|
||||
resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args)
|
||||
if all_identical(type_args, resolved_type_args):
|
||||
# If all arguments are the same, there is no need to modify the
|
||||
# type or create a new object at all
|
||||
return type_
|
||||
if (
|
||||
origin_type is not None
|
||||
and isinstance(type_, typing_base)
|
||||
and not isinstance(origin_type, typing_base)
|
||||
and getattr(type_, '_name', None) is not None
|
||||
):
|
||||
# In python < 3.9 generic aliases don't exist so any of these like `list`,
|
||||
# `type` or `collections.abc.Callable` need to be translated.
|
||||
# See: https://www.python.org/dev/peps/pep-0585
|
||||
origin_type = getattr(typing, type_._name)
|
||||
assert origin_type is not None
|
||||
# PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__.
|
||||
# We also cannot use isinstance() since we have to compare types.
|
||||
if sys.version_info >= (3, 10) and origin_type is types.UnionType:
|
||||
return _UnionGenericAlias(origin_type, resolved_type_args)
|
||||
# NotRequired[T] and Required[T] don't support tuple type resolved_type_args, hence the condition below
|
||||
return origin_type[resolved_type_args[0] if len(resolved_type_args) == 1 else resolved_type_args]
|
||||
|
||||
# We handle pydantic generic models separately as they don't have the same
|
||||
# semantics as "typing" classes or generic aliases
|
||||
|
||||
if not origin_type and is_model_class(type_):
|
||||
parameters = type_.__pydantic_generic_metadata__['parameters']
|
||||
if not parameters:
|
||||
return type_
|
||||
resolved_type_args = tuple(replace_types(t, type_map) for t in parameters)
|
||||
if all_identical(parameters, resolved_type_args):
|
||||
return type_
|
||||
return type_[resolved_type_args]
|
||||
|
||||
# Handle special case for typehints that can have lists as arguments.
|
||||
# `typing.Callable[[int, str], int]` is an example for this.
|
||||
if isinstance(type_, (List, list)):
|
||||
resolved_list = list(replace_types(element, type_map) for element in type_)
|
||||
if all_identical(type_, resolved_list):
|
||||
return type_
|
||||
return resolved_list
|
||||
|
||||
# If all else fails, we try to resolve the type directly and otherwise just
|
||||
# return the input with no modifications.
|
||||
return type_map.get(type_, type_)
|
||||
|
||||
|
||||
def has_instance_in_type(type_: Any, isinstance_target: Any) -> bool:
|
||||
"""Checks if the type, or any of its arbitrary nested args, satisfy
|
||||
`isinstance(<type>, isinstance_target)`.
|
||||
"""
|
||||
if isinstance(type_, isinstance_target):
|
||||
return True
|
||||
|
||||
type_args = get_args(type_)
|
||||
origin_type = get_origin(type_)
|
||||
|
||||
if origin_type is typing_extensions.Annotated:
|
||||
annotated_type, *annotations = type_args
|
||||
return has_instance_in_type(annotated_type, isinstance_target)
|
||||
|
||||
# Having type args is a good indicator that this is a typing module
|
||||
# class instantiation or a generic alias of some sort.
|
||||
if any(has_instance_in_type(a, isinstance_target) for a in type_args):
|
||||
return True
|
||||
|
||||
# Handle special case for typehints that can have lists as arguments.
|
||||
# `typing.Callable[[int, str], int]` is an example for this.
|
||||
if isinstance(type_, (List, list)) and not isinstance(type_, typing_extensions.ParamSpec):
|
||||
if any(has_instance_in_type(element, isinstance_target) for element in type_):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def check_parameters_count(cls: type[BaseModel], parameters: tuple[Any, ...]) -> None:
|
||||
"""Check the generic model parameters count is equal.
|
||||
|
||||
Args:
|
||||
cls: The generic model.
|
||||
parameters: A tuple of passed parameters to the generic model.
|
||||
|
||||
Raises:
|
||||
TypeError: If the passed parameters count is not equal to generic model parameters count.
|
||||
"""
|
||||
actual = len(parameters)
|
||||
expected = len(cls.__pydantic_generic_metadata__['parameters'])
|
||||
if actual != expected:
|
||||
description = 'many' if actual > expected else 'few'
|
||||
raise TypeError(f'Too {description} parameters for {cls}; actual {actual}, expected {expected}')
|
||||
|
||||
|
||||
_generic_recursion_cache: ContextVar[set[str] | None] = ContextVar('_generic_recursion_cache', default=None)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def generic_recursion_self_type(
|
||||
origin: type[BaseModel], args: tuple[Any, ...]
|
||||
) -> Iterator[PydanticRecursiveRef | None]:
|
||||
"""This contextmanager should be placed around the recursive calls used to build a generic type,
|
||||
and accept as arguments the generic origin type and the type arguments being passed to it.
|
||||
|
||||
If the same origin and arguments are observed twice, it implies that a self-reference placeholder
|
||||
can be used while building the core schema, and will produce a schema_ref that will be valid in the
|
||||
final parent schema.
|
||||
"""
|
||||
previously_seen_type_refs = _generic_recursion_cache.get()
|
||||
if previously_seen_type_refs is None:
|
||||
previously_seen_type_refs = set()
|
||||
token = _generic_recursion_cache.set(previously_seen_type_refs)
|
||||
else:
|
||||
token = None
|
||||
|
||||
try:
|
||||
type_ref = get_type_ref(origin, args_override=args)
|
||||
if type_ref in previously_seen_type_refs:
|
||||
self_type = PydanticRecursiveRef(type_ref=type_ref)
|
||||
yield self_type
|
||||
else:
|
||||
previously_seen_type_refs.add(type_ref)
|
||||
yield None
|
||||
finally:
|
||||
if token:
|
||||
_generic_recursion_cache.reset(token)
|
||||
|
||||
|
||||
def recursively_defined_type_refs() -> set[str]:
|
||||
visited = _generic_recursion_cache.get()
|
||||
if not visited:
|
||||
return set() # not in a generic recursion, so there are no types
|
||||
|
||||
return visited.copy() # don't allow modifications
|
||||
|
||||
|
||||
def get_cached_generic_type_early(parent: type[BaseModel], typevar_values: Any) -> type[BaseModel] | None:
|
||||
"""The use of a two-stage cache lookup approach was necessary to have the highest performance possible for
|
||||
repeated calls to `__class_getitem__` on generic types (which may happen in tighter loops during runtime),
|
||||
while still ensuring that certain alternative parametrizations ultimately resolve to the same type.
|
||||
|
||||
As a concrete example, this approach was necessary to make Model[List[T]][int] equal to Model[List[int]].
|
||||
The approach could be modified to not use two different cache keys at different points, but the
|
||||
_early_cache_key is optimized to be as quick to compute as possible (for repeated-access speed), and the
|
||||
_late_cache_key is optimized to be as "correct" as possible, so that two types that will ultimately be the
|
||||
same after resolving the type arguments will always produce cache hits.
|
||||
|
||||
If we wanted to move to only using a single cache key per type, we would either need to always use the
|
||||
slower/more computationally intensive logic associated with _late_cache_key, or would need to accept
|
||||
that Model[List[T]][int] is a different type than Model[List[T]][int]. Because we rely on subclass relationships
|
||||
during validation, I think it is worthwhile to ensure that types that are functionally equivalent are actually
|
||||
equal.
|
||||
"""
|
||||
return _GENERIC_TYPES_CACHE.get(_early_cache_key(parent, typevar_values))
|
||||
|
||||
|
||||
def get_cached_generic_type_late(
|
||||
parent: type[BaseModel], typevar_values: Any, origin: type[BaseModel], args: tuple[Any, ...]
|
||||
) -> type[BaseModel] | None:
|
||||
"""See the docstring of `get_cached_generic_type_early` for more information about the two-stage cache lookup."""
|
||||
cached = _GENERIC_TYPES_CACHE.get(_late_cache_key(origin, args, typevar_values))
|
||||
if cached is not None:
|
||||
set_cached_generic_type(parent, typevar_values, cached, origin, args)
|
||||
return cached
|
||||
|
||||
|
||||
def set_cached_generic_type(
|
||||
parent: type[BaseModel],
|
||||
typevar_values: tuple[Any, ...],
|
||||
type_: type[BaseModel],
|
||||
origin: type[BaseModel] | None = None,
|
||||
args: tuple[Any, ...] | None = None,
|
||||
) -> None:
|
||||
"""See the docstring of `get_cached_generic_type_early` for more information about why items are cached with
|
||||
two different keys.
|
||||
"""
|
||||
_GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values)] = type_
|
||||
if len(typevar_values) == 1:
|
||||
_GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values[0])] = type_
|
||||
if origin and args:
|
||||
_GENERIC_TYPES_CACHE[_late_cache_key(origin, args, typevar_values)] = type_
|
||||
|
||||
|
||||
def _union_orderings_key(typevar_values: Any) -> Any:
|
||||
"""This is intended to help differentiate between Union types with the same arguments in different order.
|
||||
|
||||
Thanks to caching internal to the `typing` module, it is not possible to distinguish between
|
||||
List[Union[int, float]] and List[Union[float, int]] (and similarly for other "parent" origins besides List)
|
||||
because `typing` considers Union[int, float] to be equal to Union[float, int].
|
||||
|
||||
However, you _can_ distinguish between (top-level) Union[int, float] vs. Union[float, int].
|
||||
Because we parse items as the first Union type that is successful, we get slightly more consistent behavior
|
||||
if we make an effort to distinguish the ordering of items in a union. It would be best if we could _always_
|
||||
get the exact-correct order of items in the union, but that would require a change to the `typing` module itself.
|
||||
(See https://github.com/python/cpython/issues/86483 for reference.)
|
||||
"""
|
||||
if isinstance(typevar_values, tuple):
|
||||
args_data = []
|
||||
for value in typevar_values:
|
||||
args_data.append(_union_orderings_key(value))
|
||||
return tuple(args_data)
|
||||
elif typing_extensions.get_origin(typevar_values) is typing.Union:
|
||||
return get_args(typevar_values)
|
||||
else:
|
||||
return ()
|
||||
|
||||
|
||||
def _early_cache_key(cls: type[BaseModel], typevar_values: Any) -> GenericTypesCacheKey:
|
||||
"""This is intended for minimal computational overhead during lookups of cached types.
|
||||
|
||||
Note that this is overly simplistic, and it's possible that two different cls/typevar_values
|
||||
inputs would ultimately result in the same type being created in BaseModel.__class_getitem__.
|
||||
To handle this, we have a fallback _late_cache_key that is checked later if the _early_cache_key
|
||||
lookup fails, and should result in a cache hit _precisely_ when the inputs to __class_getitem__
|
||||
would result in the same type.
|
||||
"""
|
||||
return cls, typevar_values, _union_orderings_key(typevar_values)
|
||||
|
||||
|
||||
def _late_cache_key(origin: type[BaseModel], args: tuple[Any, ...], typevar_values: Any) -> GenericTypesCacheKey:
|
||||
"""This is intended for use later in the process of creating a new type, when we have more information
|
||||
about the exact args that will be passed. If it turns out that a different set of inputs to
|
||||
__class_getitem__ resulted in the same inputs to the generic type creation process, we can still
|
||||
return the cached type, and update the cache with the _early_cache_key as well.
|
||||
"""
|
||||
# The _union_orderings_key is placed at the start here to ensure there cannot be a collision with an
|
||||
# _early_cache_key, as that function will always produce a BaseModel subclass as the first item in the key,
|
||||
# whereas this function will always produce a tuple as the first item in the key.
|
||||
return _union_orderings_key(typevar_values), origin, args
|
||||
@@ -0,0 +1,10 @@
|
||||
import sys
|
||||
from typing import Any, Dict
|
||||
|
||||
dataclass_kwargs: Dict[str, Any]
|
||||
|
||||
# `slots` is available on Python >= 3.10
|
||||
if sys.version_info >= (3, 10):
|
||||
slots_true = {'slots': True}
|
||||
else:
|
||||
slots_true = {}
|
||||
@@ -0,0 +1,410 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from copy import copy
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterable
|
||||
|
||||
from pydantic_core import CoreSchema, PydanticCustomError, to_jsonable_python
|
||||
from pydantic_core import core_schema as cs
|
||||
|
||||
from ._fields import PydanticMetadata
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..annotated_handlers import GetJsonSchemaHandler
|
||||
|
||||
|
||||
STRICT = {'strict'}
|
||||
SEQUENCE_CONSTRAINTS = {'min_length', 'max_length'}
|
||||
INEQUALITY = {'le', 'ge', 'lt', 'gt'}
|
||||
NUMERIC_CONSTRAINTS = {'multiple_of', 'allow_inf_nan', *INEQUALITY}
|
||||
|
||||
STR_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT, 'strip_whitespace', 'to_lower', 'to_upper', 'pattern'}
|
||||
BYTES_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
|
||||
LIST_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
TUPLE_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
SET_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
DICT_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
GENERATOR_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
|
||||
FLOAT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
INT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
BOOL_CONSTRAINTS = STRICT
|
||||
UUID_CONSTRAINTS = STRICT
|
||||
|
||||
DATE_TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
TIMEDELTA_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
LAX_OR_STRICT_CONSTRAINTS = STRICT
|
||||
|
||||
UNION_CONSTRAINTS = {'union_mode'}
|
||||
URL_CONSTRAINTS = {
|
||||
'max_length',
|
||||
'allowed_schemes',
|
||||
'host_required',
|
||||
'default_host',
|
||||
'default_port',
|
||||
'default_path',
|
||||
}
|
||||
|
||||
TEXT_SCHEMA_TYPES = ('str', 'bytes', 'url', 'multi-host-url')
|
||||
SEQUENCE_SCHEMA_TYPES = ('list', 'tuple', 'set', 'frozenset', 'generator', *TEXT_SCHEMA_TYPES)
|
||||
NUMERIC_SCHEMA_TYPES = ('float', 'int', 'date', 'time', 'timedelta', 'datetime')
|
||||
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS: dict[str, set[str]] = defaultdict(set)
|
||||
for constraint in STR_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(TEXT_SCHEMA_TYPES)
|
||||
for constraint in BYTES_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('bytes',))
|
||||
for constraint in LIST_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('list',))
|
||||
for constraint in TUPLE_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('tuple',))
|
||||
for constraint in SET_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('set', 'frozenset'))
|
||||
for constraint in DICT_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('dict',))
|
||||
for constraint in GENERATOR_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('generator',))
|
||||
for constraint in FLOAT_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('float',))
|
||||
for constraint in INT_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('int',))
|
||||
for constraint in DATE_TIME_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('date', 'time', 'datetime'))
|
||||
for constraint in TIMEDELTA_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('timedelta',))
|
||||
for constraint in TIME_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('time',))
|
||||
for schema_type in (*TEXT_SCHEMA_TYPES, *SEQUENCE_SCHEMA_TYPES, *NUMERIC_SCHEMA_TYPES, 'typed-dict', 'model'):
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS['strict'].add(schema_type)
|
||||
for constraint in UNION_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('union',))
|
||||
for constraint in URL_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('url', 'multi-host-url'))
|
||||
for constraint in BOOL_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('bool',))
|
||||
for constraint in UUID_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('uuid',))
|
||||
for constraint in LAX_OR_STRICT_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('lax-or-strict',))
|
||||
|
||||
|
||||
def add_js_update_schema(s: cs.CoreSchema, f: Callable[[], dict[str, Any]]) -> None:
|
||||
def update_js_schema(s: cs.CoreSchema, handler: GetJsonSchemaHandler) -> dict[str, Any]:
|
||||
js_schema = handler(s)
|
||||
js_schema.update(f())
|
||||
return js_schema
|
||||
|
||||
if 'metadata' in s:
|
||||
metadata = s['metadata']
|
||||
if 'pydantic_js_functions' in s:
|
||||
metadata['pydantic_js_functions'].append(update_js_schema)
|
||||
else:
|
||||
metadata['pydantic_js_functions'] = [update_js_schema]
|
||||
else:
|
||||
s['metadata'] = {'pydantic_js_functions': [update_js_schema]}
|
||||
|
||||
|
||||
def as_jsonable_value(v: Any) -> Any:
|
||||
if type(v) not in (int, str, float, bytes, bool, type(None)):
|
||||
return to_jsonable_python(v)
|
||||
return v
|
||||
|
||||
|
||||
def expand_grouped_metadata(annotations: Iterable[Any]) -> Iterable[Any]:
|
||||
"""Expand the annotations.
|
||||
|
||||
Args:
|
||||
annotations: An iterable of annotations.
|
||||
|
||||
Returns:
|
||||
An iterable of expanded annotations.
|
||||
|
||||
Example:
|
||||
```py
|
||||
from annotated_types import Ge, Len
|
||||
|
||||
from pydantic._internal._known_annotated_metadata import expand_grouped_metadata
|
||||
|
||||
print(list(expand_grouped_metadata([Ge(4), Len(5)])))
|
||||
#> [Ge(ge=4), MinLen(min_length=5)]
|
||||
```
|
||||
"""
|
||||
import annotated_types as at
|
||||
|
||||
from pydantic.fields import FieldInfo # circular import
|
||||
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, at.GroupedMetadata):
|
||||
yield from annotation
|
||||
elif isinstance(annotation, FieldInfo):
|
||||
yield from annotation.metadata
|
||||
# this is a bit problematic in that it results in duplicate metadata
|
||||
# all of our "consumers" can handle it, but it is not ideal
|
||||
# we probably should split up FieldInfo into:
|
||||
# - annotated types metadata
|
||||
# - individual metadata known only to Pydantic
|
||||
annotation = copy(annotation)
|
||||
annotation.metadata = []
|
||||
yield annotation
|
||||
else:
|
||||
yield annotation
|
||||
|
||||
|
||||
def apply_known_metadata(annotation: Any, schema: CoreSchema) -> CoreSchema | None: # noqa: C901
|
||||
"""Apply `annotation` to `schema` if it is an annotation we know about (Gt, Le, etc.).
|
||||
Otherwise return `None`.
|
||||
|
||||
This does not handle all known annotations. If / when it does, it can always
|
||||
return a CoreSchema and return the unmodified schema if the annotation should be ignored.
|
||||
|
||||
Assumes that GroupedMetadata has already been expanded via `expand_grouped_metadata`.
|
||||
|
||||
Args:
|
||||
annotation: The annotation.
|
||||
schema: The schema.
|
||||
|
||||
Returns:
|
||||
An updated schema with annotation if it is an annotation we know about, `None` otherwise.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If `Predicate` fails.
|
||||
"""
|
||||
import annotated_types as at
|
||||
|
||||
from . import _validators
|
||||
|
||||
schema = schema.copy()
|
||||
schema_update, other_metadata = collect_known_metadata([annotation])
|
||||
schema_type = schema['type']
|
||||
for constraint, value in schema_update.items():
|
||||
if constraint not in CONSTRAINTS_TO_ALLOWED_SCHEMAS:
|
||||
raise ValueError(f'Unknown constraint {constraint}')
|
||||
allowed_schemas = CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint]
|
||||
|
||||
if schema_type in allowed_schemas:
|
||||
if constraint == 'union_mode' and schema_type == 'union':
|
||||
schema['mode'] = value # type: ignore # schema is UnionSchema
|
||||
else:
|
||||
schema[constraint] = value
|
||||
continue
|
||||
|
||||
if constraint == 'allow_inf_nan' and value is False:
|
||||
return cs.no_info_after_validator_function(
|
||||
_validators.forbid_inf_nan_check,
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'pattern':
|
||||
# insert a str schema to make sure the regex engine matches
|
||||
return cs.chain_schema(
|
||||
[
|
||||
schema,
|
||||
cs.str_schema(pattern=value),
|
||||
]
|
||||
)
|
||||
elif constraint == 'gt':
|
||||
s = cs.no_info_after_validator_function(
|
||||
partial(_validators.greater_than_validator, gt=value),
|
||||
schema,
|
||||
)
|
||||
add_js_update_schema(s, lambda: {'gt': as_jsonable_value(value)})
|
||||
return s
|
||||
elif constraint == 'ge':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.greater_than_or_equal_validator, ge=value),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'lt':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.less_than_validator, lt=value),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'le':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.less_than_or_equal_validator, le=value),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'multiple_of':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.multiple_of_validator, multiple_of=value),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'min_length':
|
||||
s = cs.no_info_after_validator_function(
|
||||
partial(_validators.min_length_validator, min_length=value),
|
||||
schema,
|
||||
)
|
||||
add_js_update_schema(s, lambda: {'minLength': (as_jsonable_value(value))})
|
||||
return s
|
||||
elif constraint == 'max_length':
|
||||
s = cs.no_info_after_validator_function(
|
||||
partial(_validators.max_length_validator, max_length=value),
|
||||
schema,
|
||||
)
|
||||
add_js_update_schema(s, lambda: {'maxLength': (as_jsonable_value(value))})
|
||||
return s
|
||||
elif constraint == 'strip_whitespace':
|
||||
return cs.chain_schema(
|
||||
[
|
||||
schema,
|
||||
cs.str_schema(strip_whitespace=True),
|
||||
]
|
||||
)
|
||||
elif constraint == 'to_lower':
|
||||
return cs.chain_schema(
|
||||
[
|
||||
schema,
|
||||
cs.str_schema(to_lower=True),
|
||||
]
|
||||
)
|
||||
elif constraint == 'to_upper':
|
||||
return cs.chain_schema(
|
||||
[
|
||||
schema,
|
||||
cs.str_schema(to_upper=True),
|
||||
]
|
||||
)
|
||||
elif constraint == 'min_length':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.min_length_validator, min_length=annotation.min_length),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'max_length':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.max_length_validator, max_length=annotation.max_length),
|
||||
schema,
|
||||
)
|
||||
else:
|
||||
raise RuntimeError(f'Unable to apply constraint {constraint} to schema {schema_type}')
|
||||
|
||||
for annotation in other_metadata:
|
||||
if isinstance(annotation, at.Gt):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.greater_than_validator, gt=annotation.gt),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.Ge):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.greater_than_or_equal_validator, ge=annotation.ge),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.Lt):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.less_than_validator, lt=annotation.lt),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.Le):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.less_than_or_equal_validator, le=annotation.le),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.MultipleOf):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.multiple_of_validator, multiple_of=annotation.multiple_of),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.MinLen):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.min_length_validator, min_length=annotation.min_length),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.MaxLen):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.max_length_validator, max_length=annotation.max_length),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.Predicate):
|
||||
predicate_name = f'{annotation.func.__qualname__} ' if hasattr(annotation.func, '__qualname__') else ''
|
||||
|
||||
def val_func(v: Any) -> Any:
|
||||
# annotation.func may also raise an exception, let it pass through
|
||||
if not annotation.func(v):
|
||||
raise PydanticCustomError(
|
||||
'predicate_failed',
|
||||
f'Predicate {predicate_name}failed', # type: ignore
|
||||
)
|
||||
return v
|
||||
|
||||
return cs.no_info_after_validator_function(val_func, schema)
|
||||
# ignore any other unknown metadata
|
||||
return None
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
def collect_known_metadata(annotations: Iterable[Any]) -> tuple[dict[str, Any], list[Any]]:
|
||||
"""Split `annotations` into known metadata and unknown annotations.
|
||||
|
||||
Args:
|
||||
annotations: An iterable of annotations.
|
||||
|
||||
Returns:
|
||||
A tuple contains a dict of known metadata and a list of unknown annotations.
|
||||
|
||||
Example:
|
||||
```py
|
||||
from annotated_types import Gt, Len
|
||||
|
||||
from pydantic._internal._known_annotated_metadata import collect_known_metadata
|
||||
|
||||
print(collect_known_metadata([Gt(1), Len(42), ...]))
|
||||
#> ({'gt': 1, 'min_length': 42}, [Ellipsis])
|
||||
```
|
||||
"""
|
||||
import annotated_types as at
|
||||
|
||||
annotations = expand_grouped_metadata(annotations)
|
||||
|
||||
res: dict[str, Any] = {}
|
||||
remaining: list[Any] = []
|
||||
for annotation in annotations:
|
||||
# isinstance(annotation, PydanticMetadata) also covers ._fields:_PydanticGeneralMetadata
|
||||
if isinstance(annotation, PydanticMetadata):
|
||||
res.update(annotation.__dict__)
|
||||
# we don't use dataclasses.asdict because that recursively calls asdict on the field values
|
||||
elif isinstance(annotation, at.MinLen):
|
||||
res.update({'min_length': annotation.min_length})
|
||||
elif isinstance(annotation, at.MaxLen):
|
||||
res.update({'max_length': annotation.max_length})
|
||||
elif isinstance(annotation, at.Gt):
|
||||
res.update({'gt': annotation.gt})
|
||||
elif isinstance(annotation, at.Ge):
|
||||
res.update({'ge': annotation.ge})
|
||||
elif isinstance(annotation, at.Lt):
|
||||
res.update({'lt': annotation.lt})
|
||||
elif isinstance(annotation, at.Le):
|
||||
res.update({'le': annotation.le})
|
||||
elif isinstance(annotation, at.MultipleOf):
|
||||
res.update({'multiple_of': annotation.multiple_of})
|
||||
elif isinstance(annotation, type) and issubclass(annotation, PydanticMetadata):
|
||||
# also support PydanticMetadata classes being used without initialisation,
|
||||
# e.g. `Annotated[int, Strict]` as well as `Annotated[int, Strict()]`
|
||||
res.update({k: v for k, v in vars(annotation).items() if not k.startswith('_')})
|
||||
else:
|
||||
remaining.append(annotation)
|
||||
# Nones can sneak in but pydantic-core will reject them
|
||||
# it'd be nice to clean things up so we don't put in None (we probably don't _need_ to, it was just easier)
|
||||
# but this is simple enough to kick that can down the road
|
||||
res = {k: v for k, v in res.items() if v is not None}
|
||||
return res, remaining
|
||||
|
||||
|
||||
def check_metadata(metadata: dict[str, Any], allowed: Iterable[str], source_type: Any) -> None:
|
||||
"""A small utility function to validate that the given metadata can be applied to the target.
|
||||
More than saving lines of code, this gives us a consistent error message for all of our internal implementations.
|
||||
|
||||
Args:
|
||||
metadata: A dict of metadata.
|
||||
allowed: An iterable of allowed metadata.
|
||||
source_type: The source type.
|
||||
|
||||
Raises:
|
||||
TypeError: If there is metadatas that can't be applied on source type.
|
||||
"""
|
||||
unknown = metadata.keys() - set(allowed)
|
||||
if unknown:
|
||||
raise TypeError(
|
||||
f'The following constraints cannot be applied to {source_type!r}: {", ".join([f"{k!r}" for k in unknown])}'
|
||||
)
|
||||
@@ -0,0 +1,140 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Callable, Generic, TypeVar
|
||||
|
||||
from pydantic_core import SchemaSerializer, SchemaValidator
|
||||
from typing_extensions import Literal
|
||||
|
||||
from ..errors import PydanticErrorCodes, PydanticUserError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..dataclasses import PydanticDataclass
|
||||
from ..main import BaseModel
|
||||
|
||||
|
||||
ValSer = TypeVar('ValSer', SchemaValidator, SchemaSerializer)
|
||||
|
||||
|
||||
class MockValSer(Generic[ValSer]):
|
||||
"""Mocker for `pydantic_core.SchemaValidator` or `pydantic_core.SchemaSerializer` which optionally attempts to
|
||||
rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails.
|
||||
"""
|
||||
|
||||
__slots__ = '_error_message', '_code', '_val_or_ser', '_attempt_rebuild'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
error_message: str,
|
||||
*,
|
||||
code: PydanticErrorCodes,
|
||||
val_or_ser: Literal['validator', 'serializer'],
|
||||
attempt_rebuild: Callable[[], ValSer | None] | None = None,
|
||||
) -> None:
|
||||
self._error_message = error_message
|
||||
self._val_or_ser = SchemaValidator if val_or_ser == 'validator' else SchemaSerializer
|
||||
self._code: PydanticErrorCodes = code
|
||||
self._attempt_rebuild = attempt_rebuild
|
||||
|
||||
def __getattr__(self, item: str) -> None:
|
||||
__tracebackhide__ = True
|
||||
if self._attempt_rebuild:
|
||||
val_ser = self._attempt_rebuild()
|
||||
if val_ser is not None:
|
||||
return getattr(val_ser, item)
|
||||
|
||||
# raise an AttributeError if `item` doesn't exist
|
||||
getattr(self._val_or_ser, item)
|
||||
raise PydanticUserError(self._error_message, code=self._code)
|
||||
|
||||
def rebuild(self) -> ValSer | None:
|
||||
if self._attempt_rebuild:
|
||||
val_ser = self._attempt_rebuild()
|
||||
if val_ser is not None:
|
||||
return val_ser
|
||||
else:
|
||||
raise PydanticUserError(self._error_message, code=self._code)
|
||||
return None
|
||||
|
||||
|
||||
def set_model_mocks(cls: type[BaseModel], cls_name: str, undefined_name: str = 'all referenced types') -> None:
|
||||
"""Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a model.
|
||||
|
||||
Args:
|
||||
cls: The model class to set the mocks on
|
||||
cls_name: Name of the model class, used in error messages
|
||||
undefined_name: Name of the undefined thing, used in error messages
|
||||
"""
|
||||
undefined_type_error_message = (
|
||||
f'`{cls_name}` is not fully defined; you should define {undefined_name},'
|
||||
f' then call `{cls_name}.model_rebuild()`.'
|
||||
)
|
||||
|
||||
def attempt_rebuild_validator() -> SchemaValidator | None:
|
||||
if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False:
|
||||
return cls.__pydantic_validator__
|
||||
else:
|
||||
return None
|
||||
|
||||
cls.__pydantic_validator__ = MockValSer( # type: ignore[assignment]
|
||||
undefined_type_error_message,
|
||||
code='class-not-fully-defined',
|
||||
val_or_ser='validator',
|
||||
attempt_rebuild=attempt_rebuild_validator,
|
||||
)
|
||||
|
||||
def attempt_rebuild_serializer() -> SchemaSerializer | None:
|
||||
if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False:
|
||||
return cls.__pydantic_serializer__
|
||||
else:
|
||||
return None
|
||||
|
||||
cls.__pydantic_serializer__ = MockValSer( # type: ignore[assignment]
|
||||
undefined_type_error_message,
|
||||
code='class-not-fully-defined',
|
||||
val_or_ser='serializer',
|
||||
attempt_rebuild=attempt_rebuild_serializer,
|
||||
)
|
||||
|
||||
|
||||
def set_dataclass_mocks(
|
||||
cls: type[PydanticDataclass], cls_name: str, undefined_name: str = 'all referenced types'
|
||||
) -> None:
|
||||
"""Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a dataclass.
|
||||
|
||||
Args:
|
||||
cls: The model class to set the mocks on
|
||||
cls_name: Name of the model class, used in error messages
|
||||
undefined_name: Name of the undefined thing, used in error messages
|
||||
"""
|
||||
from ..dataclasses import rebuild_dataclass
|
||||
|
||||
undefined_type_error_message = (
|
||||
f'`{cls_name}` is not fully defined; you should define {undefined_name},'
|
||||
f' then call `pydantic.dataclasses.rebuild_dataclass({cls_name})`.'
|
||||
)
|
||||
|
||||
def attempt_rebuild_validator() -> SchemaValidator | None:
|
||||
if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False:
|
||||
return cls.__pydantic_validator__
|
||||
else:
|
||||
return None
|
||||
|
||||
cls.__pydantic_validator__ = MockValSer( # type: ignore[assignment]
|
||||
undefined_type_error_message,
|
||||
code='class-not-fully-defined',
|
||||
val_or_ser='validator',
|
||||
attempt_rebuild=attempt_rebuild_validator,
|
||||
)
|
||||
|
||||
def attempt_rebuild_serializer() -> SchemaSerializer | None:
|
||||
if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False:
|
||||
return cls.__pydantic_serializer__
|
||||
else:
|
||||
return None
|
||||
|
||||
cls.__pydantic_serializer__ = MockValSer( # type: ignore[assignment]
|
||||
undefined_type_error_message,
|
||||
code='class-not-fully-defined',
|
||||
val_or_ser='validator',
|
||||
attempt_rebuild=attempt_rebuild_serializer,
|
||||
)
|
||||
@@ -0,0 +1,626 @@
|
||||
"""Private logic for creating models."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import typing
|
||||
import warnings
|
||||
import weakref
|
||||
from abc import ABCMeta
|
||||
from functools import partial
|
||||
from types import FunctionType
|
||||
from typing import Any, Callable, Generic, Mapping
|
||||
|
||||
import typing_extensions
|
||||
from pydantic_core import PydanticUndefined, SchemaSerializer
|
||||
from typing_extensions import dataclass_transform, deprecated
|
||||
|
||||
from ..errors import PydanticUndefinedAnnotation, PydanticUserError
|
||||
from ..plugin._schema_validator import create_schema_validator
|
||||
from ..warnings import GenericBeforeBaseModelWarning, PydanticDeprecatedSince20
|
||||
from ._config import ConfigWrapper
|
||||
from ._decorators import DecoratorInfos, PydanticDescriptorProxy, get_attribute_from_bases
|
||||
from ._fields import collect_model_fields, is_valid_field_name, is_valid_privateattr_name
|
||||
from ._generate_schema import GenerateSchema, generate_pydantic_signature
|
||||
from ._generics import PydanticGenericMetadata, get_model_typevars_map
|
||||
from ._mock_val_ser import MockValSer, set_model_mocks
|
||||
from ._schema_generation_shared import CallbackGetCoreSchemaHandler
|
||||
from ._typing_extra import get_cls_types_namespace, is_annotated, is_classvar, parent_frame_namespace
|
||||
from ._utils import ClassAttribute
|
||||
from ._validate_call import ValidateCallWrapper
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from inspect import Signature
|
||||
|
||||
from ..fields import Field as PydanticModelField
|
||||
from ..fields import FieldInfo, ModelPrivateAttr
|
||||
from ..main import BaseModel
|
||||
else:
|
||||
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
||||
# and https://youtrack.jetbrains.com/issue/PY-51428
|
||||
DeprecationWarning = PydanticDeprecatedSince20
|
||||
PydanticModelField = object()
|
||||
|
||||
object_setattr = object.__setattr__
|
||||
|
||||
|
||||
class _ModelNamespaceDict(dict):
|
||||
"""A dictionary subclass that intercepts attribute setting on model classes and
|
||||
warns about overriding of decorators.
|
||||
"""
|
||||
|
||||
def __setitem__(self, k: str, v: object) -> None:
|
||||
existing: Any = self.get(k, None)
|
||||
if existing and v is not existing and isinstance(existing, PydanticDescriptorProxy):
|
||||
warnings.warn(f'`{k}` overrides an existing Pydantic `{existing.decorator_info.decorator_repr}` decorator')
|
||||
|
||||
return super().__setitem__(k, v)
|
||||
|
||||
|
||||
@dataclass_transform(kw_only_default=True, field_specifiers=(PydanticModelField,))
|
||||
class ModelMetaclass(ABCMeta):
|
||||
def __new__(
|
||||
mcs,
|
||||
cls_name: str,
|
||||
bases: tuple[type[Any], ...],
|
||||
namespace: dict[str, Any],
|
||||
__pydantic_generic_metadata__: PydanticGenericMetadata | None = None,
|
||||
__pydantic_reset_parent_namespace__: bool = True,
|
||||
_create_model_module: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> type:
|
||||
"""Metaclass for creating Pydantic models.
|
||||
|
||||
Args:
|
||||
cls_name: The name of the class to be created.
|
||||
bases: The base classes of the class to be created.
|
||||
namespace: The attribute dictionary of the class to be created.
|
||||
__pydantic_generic_metadata__: Metadata for generic models.
|
||||
__pydantic_reset_parent_namespace__: Reset parent namespace.
|
||||
_create_model_module: The module of the class to be created, if created by `create_model`.
|
||||
**kwargs: Catch-all for any other keyword arguments.
|
||||
|
||||
Returns:
|
||||
The new class created by the metaclass.
|
||||
"""
|
||||
# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we rely on the fact
|
||||
# that `BaseModel` itself won't have any bases, but any subclass of it will, to determine whether the `__new__`
|
||||
# call we're in the middle of is for the `BaseModel` class.
|
||||
if bases:
|
||||
base_field_names, class_vars, base_private_attributes = mcs._collect_bases_data(bases)
|
||||
|
||||
config_wrapper = ConfigWrapper.for_model(bases, namespace, kwargs)
|
||||
namespace['model_config'] = config_wrapper.config_dict
|
||||
private_attributes = inspect_namespace(
|
||||
namespace, config_wrapper.ignored_types, class_vars, base_field_names
|
||||
)
|
||||
if private_attributes:
|
||||
original_model_post_init = get_model_post_init(namespace, bases)
|
||||
if original_model_post_init is not None:
|
||||
# if there are private_attributes and a model_post_init function, we handle both
|
||||
|
||||
def wrapped_model_post_init(self: BaseModel, __context: Any) -> None:
|
||||
"""We need to both initialize private attributes and call the user-defined model_post_init
|
||||
method.
|
||||
"""
|
||||
init_private_attributes(self, __context)
|
||||
original_model_post_init(self, __context)
|
||||
|
||||
namespace['model_post_init'] = wrapped_model_post_init
|
||||
else:
|
||||
namespace['model_post_init'] = init_private_attributes
|
||||
|
||||
namespace['__class_vars__'] = class_vars
|
||||
namespace['__private_attributes__'] = {**base_private_attributes, **private_attributes}
|
||||
|
||||
if config_wrapper.frozen:
|
||||
set_default_hash_func(namespace, bases)
|
||||
|
||||
cls: type[BaseModel] = super().__new__(mcs, cls_name, bases, namespace, **kwargs) # type: ignore
|
||||
|
||||
from ..main import BaseModel
|
||||
|
||||
mro = cls.__mro__
|
||||
if Generic in mro and mro.index(Generic) < mro.index(BaseModel):
|
||||
warnings.warn(
|
||||
GenericBeforeBaseModelWarning(
|
||||
'Classes should inherit from `BaseModel` before generic classes (e.g. `typing.Generic[T]`) '
|
||||
'for pydantic generics to work properly.'
|
||||
),
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
cls.__pydantic_custom_init__ = not getattr(cls.__init__, '__pydantic_base_init__', False)
|
||||
cls.__pydantic_post_init__ = None if cls.model_post_init is BaseModel.model_post_init else 'model_post_init'
|
||||
|
||||
cls.__pydantic_decorators__ = DecoratorInfos.build(cls)
|
||||
|
||||
# Use the getattr below to grab the __parameters__ from the `typing.Generic` parent class
|
||||
if __pydantic_generic_metadata__:
|
||||
cls.__pydantic_generic_metadata__ = __pydantic_generic_metadata__
|
||||
else:
|
||||
parent_parameters = getattr(cls, '__pydantic_generic_metadata__', {}).get('parameters', ())
|
||||
parameters = getattr(cls, '__parameters__', None) or parent_parameters
|
||||
if parameters and parent_parameters and not all(x in parameters for x in parent_parameters):
|
||||
combined_parameters = parent_parameters + tuple(x for x in parameters if x not in parent_parameters)
|
||||
parameters_str = ', '.join([str(x) for x in combined_parameters])
|
||||
generic_type_label = f'typing.Generic[{parameters_str}]'
|
||||
error_message = (
|
||||
f'All parameters must be present on typing.Generic;'
|
||||
f' you should inherit from {generic_type_label}.'
|
||||
)
|
||||
if Generic not in bases: # pragma: no cover
|
||||
# We raise an error here not because it is desirable, but because some cases are mishandled.
|
||||
# It would be nice to remove this error and still have things behave as expected, it's just
|
||||
# challenging because we are using a custom `__class_getitem__` to parametrize generic models,
|
||||
# and not returning a typing._GenericAlias from it.
|
||||
bases_str = ', '.join([x.__name__ for x in bases] + [generic_type_label])
|
||||
error_message += (
|
||||
f' Note: `typing.Generic` must go last: `class {cls.__name__}({bases_str}): ...`)'
|
||||
)
|
||||
raise TypeError(error_message)
|
||||
|
||||
cls.__pydantic_generic_metadata__ = {
|
||||
'origin': None,
|
||||
'args': (),
|
||||
'parameters': parameters,
|
||||
}
|
||||
|
||||
cls.__pydantic_complete__ = False # Ensure this specific class gets completed
|
||||
|
||||
# preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487
|
||||
# for attributes not in `new_namespace` (e.g. private attributes)
|
||||
for name, obj in private_attributes.items():
|
||||
obj.__set_name__(cls, name)
|
||||
|
||||
if __pydantic_reset_parent_namespace__:
|
||||
cls.__pydantic_parent_namespace__ = build_lenient_weakvaluedict(parent_frame_namespace())
|
||||
parent_namespace = getattr(cls, '__pydantic_parent_namespace__', None)
|
||||
if isinstance(parent_namespace, dict):
|
||||
parent_namespace = unpack_lenient_weakvaluedict(parent_namespace)
|
||||
|
||||
types_namespace = get_cls_types_namespace(cls, parent_namespace)
|
||||
set_model_fields(cls, bases, config_wrapper, types_namespace)
|
||||
complete_model_class(
|
||||
cls,
|
||||
cls_name,
|
||||
config_wrapper,
|
||||
raise_errors=False,
|
||||
types_namespace=types_namespace,
|
||||
create_model_module=_create_model_module,
|
||||
)
|
||||
# using super(cls, cls) on the next line ensures we only call the parent class's __pydantic_init_subclass__
|
||||
# I believe the `type: ignore` is only necessary because mypy doesn't realize that this code branch is
|
||||
# only hit for _proper_ subclasses of BaseModel
|
||||
super(cls, cls).__pydantic_init_subclass__(**kwargs) # type: ignore[misc]
|
||||
return cls
|
||||
else:
|
||||
# this is the BaseModel class itself being created, no logic required
|
||||
return super().__new__(mcs, cls_name, bases, namespace, **kwargs)
|
||||
|
||||
if not typing.TYPE_CHECKING: # pragma: no branch
|
||||
# We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
|
||||
|
||||
def __getattr__(self, item: str) -> Any:
|
||||
"""This is necessary to keep attribute access working for class attribute access."""
|
||||
private_attributes = self.__dict__.get('__private_attributes__')
|
||||
if private_attributes and item in private_attributes:
|
||||
return private_attributes[item]
|
||||
if item == '__pydantic_core_schema__':
|
||||
# This means the class didn't get a schema generated for it, likely because there was an undefined reference
|
||||
maybe_mock_validator = getattr(self, '__pydantic_validator__', None)
|
||||
if isinstance(maybe_mock_validator, MockValSer):
|
||||
rebuilt_validator = maybe_mock_validator.rebuild()
|
||||
if rebuilt_validator is not None:
|
||||
# In this case, a validator was built, and so `__pydantic_core_schema__` should now be set
|
||||
return getattr(self, '__pydantic_core_schema__')
|
||||
raise AttributeError(item)
|
||||
|
||||
@classmethod
|
||||
def __prepare__(cls, *args: Any, **kwargs: Any) -> Mapping[str, object]:
|
||||
return _ModelNamespaceDict()
|
||||
|
||||
def __instancecheck__(self, instance: Any) -> bool:
|
||||
"""Avoid calling ABC _abc_subclasscheck unless we're pretty sure.
|
||||
|
||||
See #3829 and python/cpython#92810
|
||||
"""
|
||||
return hasattr(instance, '__pydantic_validator__') and super().__instancecheck__(instance)
|
||||
|
||||
@staticmethod
|
||||
def _collect_bases_data(bases: tuple[type[Any], ...]) -> tuple[set[str], set[str], dict[str, ModelPrivateAttr]]:
|
||||
from ..main import BaseModel
|
||||
|
||||
field_names: set[str] = set()
|
||||
class_vars: set[str] = set()
|
||||
private_attributes: dict[str, ModelPrivateAttr] = {}
|
||||
for base in bases:
|
||||
if issubclass(base, BaseModel) and base is not BaseModel:
|
||||
# model_fields might not be defined yet in the case of generics, so we use getattr here:
|
||||
field_names.update(getattr(base, 'model_fields', {}).keys())
|
||||
class_vars.update(base.__class_vars__)
|
||||
private_attributes.update(base.__private_attributes__)
|
||||
return field_names, class_vars, private_attributes
|
||||
|
||||
@property
|
||||
@deprecated(
|
||||
'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=PydanticDeprecatedSince20
|
||||
)
|
||||
def __fields__(self) -> dict[str, FieldInfo]:
|
||||
warnings.warn('The `__fields__` attribute is deprecated, use `model_fields` instead.', DeprecationWarning)
|
||||
return self.model_fields # type: ignore
|
||||
|
||||
|
||||
def init_private_attributes(self: BaseModel, __context: Any) -> None:
|
||||
"""This function is meant to behave like a BaseModel method to initialise private attributes.
|
||||
|
||||
It takes context as an argument since that's what pydantic-core passes when calling it.
|
||||
|
||||
Args:
|
||||
self: The BaseModel instance.
|
||||
__context: The context.
|
||||
"""
|
||||
if getattr(self, '__pydantic_private__', None) is None:
|
||||
pydantic_private = {}
|
||||
for name, private_attr in self.__private_attributes__.items():
|
||||
default = private_attr.get_default()
|
||||
if default is not PydanticUndefined:
|
||||
pydantic_private[name] = default
|
||||
object_setattr(self, '__pydantic_private__', pydantic_private)
|
||||
|
||||
|
||||
def get_model_post_init(namespace: dict[str, Any], bases: tuple[type[Any], ...]) -> Callable[..., Any] | None:
|
||||
"""Get the `model_post_init` method from the namespace or the class bases, or `None` if not defined."""
|
||||
if 'model_post_init' in namespace:
|
||||
return namespace['model_post_init']
|
||||
|
||||
from ..main import BaseModel
|
||||
|
||||
model_post_init = get_attribute_from_bases(bases, 'model_post_init')
|
||||
if model_post_init is not BaseModel.model_post_init:
|
||||
return model_post_init
|
||||
|
||||
|
||||
def inspect_namespace( # noqa C901
|
||||
namespace: dict[str, Any],
|
||||
ignored_types: tuple[type[Any], ...],
|
||||
base_class_vars: set[str],
|
||||
base_class_fields: set[str],
|
||||
) -> dict[str, ModelPrivateAttr]:
|
||||
"""Iterate over the namespace and:
|
||||
* gather private attributes
|
||||
* check for items which look like fields but are not (e.g. have no annotation) and warn.
|
||||
|
||||
Args:
|
||||
namespace: The attribute dictionary of the class to be created.
|
||||
ignored_types: A tuple of ignore types.
|
||||
base_class_vars: A set of base class class variables.
|
||||
base_class_fields: A set of base class fields.
|
||||
|
||||
Returns:
|
||||
A dict contains private attributes info.
|
||||
|
||||
Raises:
|
||||
TypeError: If there is a `__root__` field in model.
|
||||
NameError: If private attribute name is invalid.
|
||||
PydanticUserError:
|
||||
- If a field does not have a type annotation.
|
||||
- If a field on base class was overridden by a non-annotated attribute.
|
||||
"""
|
||||
from ..fields import FieldInfo, ModelPrivateAttr, PrivateAttr
|
||||
|
||||
all_ignored_types = ignored_types + default_ignored_types()
|
||||
|
||||
private_attributes: dict[str, ModelPrivateAttr] = {}
|
||||
raw_annotations = namespace.get('__annotations__', {})
|
||||
|
||||
if '__root__' in raw_annotations or '__root__' in namespace:
|
||||
raise TypeError("To define root models, use `pydantic.RootModel` rather than a field called '__root__'")
|
||||
|
||||
ignored_names: set[str] = set()
|
||||
for var_name, value in list(namespace.items()):
|
||||
if var_name == 'model_config':
|
||||
continue
|
||||
elif (
|
||||
isinstance(value, type)
|
||||
and value.__module__ == namespace['__module__']
|
||||
and value.__qualname__.startswith(namespace['__qualname__'])
|
||||
):
|
||||
# `value` is a nested type defined in this namespace; don't error
|
||||
continue
|
||||
elif isinstance(value, all_ignored_types) or value.__class__.__module__ == 'functools':
|
||||
ignored_names.add(var_name)
|
||||
continue
|
||||
elif isinstance(value, ModelPrivateAttr):
|
||||
if var_name.startswith('__'):
|
||||
raise NameError(
|
||||
'Private attributes must not use dunder names;'
|
||||
f' use a single underscore prefix instead of {var_name!r}.'
|
||||
)
|
||||
elif is_valid_field_name(var_name):
|
||||
raise NameError(
|
||||
'Private attributes must not use valid field names;'
|
||||
f' use sunder names, e.g. {"_" + var_name!r} instead of {var_name!r}.'
|
||||
)
|
||||
private_attributes[var_name] = value
|
||||
del namespace[var_name]
|
||||
elif isinstance(value, FieldInfo) and not is_valid_field_name(var_name):
|
||||
suggested_name = var_name.lstrip('_') or 'my_field' # don't suggest '' for all-underscore name
|
||||
raise NameError(
|
||||
f'Fields must not use names with leading underscores;'
|
||||
f' e.g., use {suggested_name!r} instead of {var_name!r}.'
|
||||
)
|
||||
|
||||
elif var_name.startswith('__'):
|
||||
continue
|
||||
elif is_valid_privateattr_name(var_name):
|
||||
if var_name not in raw_annotations or not is_classvar(raw_annotations[var_name]):
|
||||
private_attributes[var_name] = PrivateAttr(default=value)
|
||||
del namespace[var_name]
|
||||
elif var_name in base_class_vars:
|
||||
continue
|
||||
elif var_name not in raw_annotations:
|
||||
if var_name in base_class_fields:
|
||||
raise PydanticUserError(
|
||||
f'Field {var_name!r} defined on a base class was overridden by a non-annotated attribute. '
|
||||
f'All field definitions, including overrides, require a type annotation.',
|
||||
code='model-field-overridden',
|
||||
)
|
||||
elif isinstance(value, FieldInfo):
|
||||
raise PydanticUserError(
|
||||
f'Field {var_name!r} requires a type annotation', code='model-field-missing-annotation'
|
||||
)
|
||||
else:
|
||||
raise PydanticUserError(
|
||||
f'A non-annotated attribute was detected: `{var_name} = {value!r}`. All model fields require a '
|
||||
f'type annotation; if `{var_name}` is not meant to be a field, you may be able to resolve this '
|
||||
f"error by annotating it as a `ClassVar` or updating `model_config['ignored_types']`.",
|
||||
code='model-field-missing-annotation',
|
||||
)
|
||||
|
||||
for ann_name, ann_type in raw_annotations.items():
|
||||
if (
|
||||
is_valid_privateattr_name(ann_name)
|
||||
and ann_name not in private_attributes
|
||||
and ann_name not in ignored_names
|
||||
and not is_classvar(ann_type)
|
||||
and ann_type not in all_ignored_types
|
||||
and getattr(ann_type, '__module__', None) != 'functools'
|
||||
):
|
||||
if is_annotated(ann_type):
|
||||
_, *metadata = typing_extensions.get_args(ann_type)
|
||||
private_attr = next((v for v in metadata if isinstance(v, ModelPrivateAttr)), None)
|
||||
if private_attr is not None:
|
||||
private_attributes[ann_name] = private_attr
|
||||
continue
|
||||
private_attributes[ann_name] = PrivateAttr()
|
||||
|
||||
return private_attributes
|
||||
|
||||
|
||||
def set_default_hash_func(namespace: dict[str, Any], bases: tuple[type[Any], ...]) -> None:
|
||||
if '__hash__' in namespace:
|
||||
return
|
||||
|
||||
base_hash_func = get_attribute_from_bases(bases, '__hash__')
|
||||
if base_hash_func in {None, object.__hash__}:
|
||||
# If `__hash__` is None _or_ `object.__hash__`, we generate a hash function.
|
||||
# It will be `None` if not overridden from BaseModel, but may be `object.__hash__` if there is another
|
||||
# parent class earlier in the bases which doesn't override `__hash__` (e.g. `typing.Generic`).
|
||||
def hash_func(self: Any) -> int:
|
||||
return hash(self.__class__) + hash(tuple(self.__dict__.values()))
|
||||
|
||||
namespace['__hash__'] = hash_func
|
||||
|
||||
|
||||
def set_model_fields(
|
||||
cls: type[BaseModel], bases: tuple[type[Any], ...], config_wrapper: ConfigWrapper, types_namespace: dict[str, Any]
|
||||
) -> None:
|
||||
"""Collect and set `cls.model_fields` and `cls.__class_vars__`.
|
||||
|
||||
Args:
|
||||
cls: BaseModel or dataclass.
|
||||
bases: Parents of the class, generally `cls.__bases__`.
|
||||
config_wrapper: The config wrapper instance.
|
||||
types_namespace: Optional extra namespace to look for types in.
|
||||
"""
|
||||
typevars_map = get_model_typevars_map(cls)
|
||||
fields, class_vars = collect_model_fields(cls, bases, config_wrapper, types_namespace, typevars_map=typevars_map)
|
||||
|
||||
cls.model_fields = fields
|
||||
cls.__class_vars__.update(class_vars)
|
||||
|
||||
for k in class_vars:
|
||||
# Class vars should not be private attributes
|
||||
# We remove them _here_ and not earlier because we rely on inspecting the class to determine its classvars,
|
||||
# but private attributes are determined by inspecting the namespace _prior_ to class creation.
|
||||
# In the case that a classvar with a leading-'_' is defined via a ForwardRef (e.g., when using
|
||||
# `__future__.annotations`), we want to remove the private attribute which was detected _before_ we knew it
|
||||
# evaluated to a classvar
|
||||
|
||||
value = cls.__private_attributes__.pop(k, None)
|
||||
if value is not None and value.default is not PydanticUndefined:
|
||||
setattr(cls, k, value.default)
|
||||
|
||||
|
||||
def complete_model_class(
|
||||
cls: type[BaseModel],
|
||||
cls_name: str,
|
||||
config_wrapper: ConfigWrapper,
|
||||
*,
|
||||
raise_errors: bool = True,
|
||||
types_namespace: dict[str, Any] | None,
|
||||
create_model_module: str | None = None,
|
||||
) -> bool:
|
||||
"""Finish building a model class.
|
||||
|
||||
This logic must be called after class has been created since validation functions must be bound
|
||||
and `get_type_hints` requires a class object.
|
||||
|
||||
Args:
|
||||
cls: BaseModel or dataclass.
|
||||
cls_name: The model or dataclass name.
|
||||
config_wrapper: The config wrapper instance.
|
||||
raise_errors: Whether to raise errors.
|
||||
types_namespace: Optional extra namespace to look for types in.
|
||||
create_model_module: The module of the class to be created, if created by `create_model`.
|
||||
|
||||
Returns:
|
||||
`True` if the model is successfully completed, else `False`.
|
||||
|
||||
Raises:
|
||||
PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation` occurs in`__get_pydantic_core_schema__`
|
||||
and `raise_errors=True`.
|
||||
"""
|
||||
typevars_map = get_model_typevars_map(cls)
|
||||
gen_schema = GenerateSchema(
|
||||
config_wrapper,
|
||||
types_namespace,
|
||||
typevars_map,
|
||||
)
|
||||
|
||||
handler = CallbackGetCoreSchemaHandler(
|
||||
partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
|
||||
gen_schema,
|
||||
ref_mode='unpack',
|
||||
)
|
||||
|
||||
if config_wrapper.defer_build:
|
||||
set_model_mocks(cls, cls_name)
|
||||
return False
|
||||
|
||||
try:
|
||||
schema = cls.__get_pydantic_core_schema__(cls, handler)
|
||||
except PydanticUndefinedAnnotation as e:
|
||||
if raise_errors:
|
||||
raise
|
||||
set_model_mocks(cls, cls_name, f'`{e.name}`')
|
||||
return False
|
||||
|
||||
core_config = config_wrapper.core_config(cls)
|
||||
|
||||
try:
|
||||
schema = gen_schema.clean_schema(schema)
|
||||
except gen_schema.CollectedInvalid:
|
||||
set_model_mocks(cls, cls_name)
|
||||
return False
|
||||
|
||||
# debug(schema)
|
||||
cls.__pydantic_core_schema__ = schema
|
||||
|
||||
cls.__pydantic_validator__ = create_schema_validator(
|
||||
schema,
|
||||
cls,
|
||||
create_model_module or cls.__module__,
|
||||
cls.__qualname__,
|
||||
'create_model' if create_model_module else 'BaseModel',
|
||||
core_config,
|
||||
config_wrapper.plugin_settings,
|
||||
)
|
||||
cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
|
||||
cls.__pydantic_complete__ = True
|
||||
|
||||
# set __signature__ attr only for model class, but not for its instances
|
||||
cls.__signature__ = ClassAttribute(
|
||||
'__signature__', generate_model_signature(cls.__init__, cls.model_fields, config_wrapper)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def generate_model_signature(
|
||||
init: Callable[..., None], fields: dict[str, FieldInfo], config_wrapper: ConfigWrapper
|
||||
) -> Signature:
|
||||
"""Generate signature for model based on its fields.
|
||||
|
||||
Args:
|
||||
init: The class init.
|
||||
fields: The model fields.
|
||||
config_wrapper: The config wrapper instance.
|
||||
|
||||
Returns:
|
||||
The model signature.
|
||||
"""
|
||||
return generate_pydantic_signature(init, fields, config_wrapper)
|
||||
|
||||
|
||||
class _PydanticWeakRef:
|
||||
"""Wrapper for `weakref.ref` that enables `pickle` serialization.
|
||||
|
||||
Cloudpickle fails to serialize `weakref.ref` objects due to an arcane error related
|
||||
to abstract base classes (`abc.ABC`). This class works around the issue by wrapping
|
||||
`weakref.ref` instead of subclassing it.
|
||||
|
||||
See https://github.com/pydantic/pydantic/issues/6763 for context.
|
||||
|
||||
Semantics:
|
||||
- If not pickled, behaves the same as a `weakref.ref`.
|
||||
- If pickled along with the referenced object, the same `weakref.ref` behavior
|
||||
will be maintained between them after unpickling.
|
||||
- If pickled without the referenced object, after unpickling the underlying
|
||||
reference will be cleared (`__call__` will always return `None`).
|
||||
"""
|
||||
|
||||
def __init__(self, obj: Any):
|
||||
if obj is None:
|
||||
# The object will be `None` upon deserialization if the serialized weakref
|
||||
# had lost its underlying object.
|
||||
self._wr = None
|
||||
else:
|
||||
self._wr = weakref.ref(obj)
|
||||
|
||||
def __call__(self) -> Any:
|
||||
if self._wr is None:
|
||||
return None
|
||||
else:
|
||||
return self._wr()
|
||||
|
||||
def __reduce__(self) -> tuple[Callable, tuple[weakref.ReferenceType | None]]:
|
||||
return _PydanticWeakRef, (self(),)
|
||||
|
||||
|
||||
def build_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None:
|
||||
"""Takes an input dictionary, and produces a new value that (invertibly) replaces the values with weakrefs.
|
||||
|
||||
We can't just use a WeakValueDictionary because many types (including int, str, etc.) can't be stored as values
|
||||
in a WeakValueDictionary.
|
||||
|
||||
The `unpack_lenient_weakvaluedict` function can be used to reverse this operation.
|
||||
"""
|
||||
if d is None:
|
||||
return None
|
||||
result = {}
|
||||
for k, v in d.items():
|
||||
try:
|
||||
proxy = _PydanticWeakRef(v)
|
||||
except TypeError:
|
||||
proxy = v
|
||||
result[k] = proxy
|
||||
return result
|
||||
|
||||
|
||||
def unpack_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None:
|
||||
"""Inverts the transform performed by `build_lenient_weakvaluedict`."""
|
||||
if d is None:
|
||||
return None
|
||||
|
||||
result = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, _PydanticWeakRef):
|
||||
v = v()
|
||||
if v is not None:
|
||||
result[k] = v
|
||||
else:
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
|
||||
def default_ignored_types() -> tuple[type[Any], ...]:
|
||||
from ..fields import ComputedFieldInfo
|
||||
|
||||
return (
|
||||
FunctionType,
|
||||
property,
|
||||
classmethod,
|
||||
staticmethod,
|
||||
PydanticDescriptorProxy,
|
||||
ComputedFieldInfo,
|
||||
ValidateCallWrapper,
|
||||
)
|
||||
@@ -0,0 +1,115 @@
|
||||
"""Tools to provide pretty/human-readable display of objects."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import types
|
||||
import typing
|
||||
from typing import Any
|
||||
|
||||
import typing_extensions
|
||||
|
||||
from . import _typing_extra
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
ReprArgs: typing_extensions.TypeAlias = 'typing.Iterable[tuple[str | None, Any]]'
|
||||
RichReprResult: typing_extensions.TypeAlias = (
|
||||
'typing.Iterable[Any | tuple[Any] | tuple[str, Any] | tuple[str, Any, Any]]'
|
||||
)
|
||||
|
||||
|
||||
class PlainRepr(str):
|
||||
"""String class where repr doesn't include quotes. Useful with Representation when you want to return a string
|
||||
representation of something that is valid (or pseudo-valid) python.
|
||||
"""
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
class Representation:
|
||||
# Mixin to provide `__str__`, `__repr__`, and `__pretty__` and `__rich_repr__` methods.
|
||||
# `__pretty__` is used by [devtools](https://python-devtools.helpmanual.io/).
|
||||
# `__rich_repr__` is used by [rich](https://rich.readthedocs.io/en/stable/pretty.html).
|
||||
# (this is not a docstring to avoid adding a docstring to classes which inherit from Representation)
|
||||
|
||||
# we don't want to use a type annotation here as it can break get_type_hints
|
||||
__slots__ = tuple() # type: typing.Collection[str]
|
||||
|
||||
def __repr_args__(self) -> ReprArgs:
|
||||
"""Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.
|
||||
|
||||
Can either return:
|
||||
* name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]`
|
||||
* or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]`
|
||||
"""
|
||||
attrs_names = self.__slots__
|
||||
if not attrs_names and hasattr(self, '__dict__'):
|
||||
attrs_names = self.__dict__.keys()
|
||||
attrs = ((s, getattr(self, s)) for s in attrs_names)
|
||||
return [(a, v) for a, v in attrs if v is not None]
|
||||
|
||||
def __repr_name__(self) -> str:
|
||||
"""Name of the instance's class, used in __repr__."""
|
||||
return self.__class__.__name__
|
||||
|
||||
def __repr_str__(self, join_str: str) -> str:
|
||||
return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())
|
||||
|
||||
def __pretty__(self, fmt: typing.Callable[[Any], Any], **kwargs: Any) -> typing.Generator[Any, None, None]:
|
||||
"""Used by devtools (https://python-devtools.helpmanual.io/) to pretty print objects."""
|
||||
yield self.__repr_name__() + '('
|
||||
yield 1
|
||||
for name, value in self.__repr_args__():
|
||||
if name is not None:
|
||||
yield name + '='
|
||||
yield fmt(value)
|
||||
yield ','
|
||||
yield 0
|
||||
yield -1
|
||||
yield ')'
|
||||
|
||||
def __rich_repr__(self) -> RichReprResult:
|
||||
"""Used by Rich (https://rich.readthedocs.io/en/stable/pretty.html) to pretty print objects."""
|
||||
for name, field_repr in self.__repr_args__():
|
||||
if name is None:
|
||||
yield field_repr
|
||||
else:
|
||||
yield name, field_repr
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.__repr_str__(' ')
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
|
||||
|
||||
|
||||
def display_as_type(obj: Any) -> str:
|
||||
"""Pretty representation of a type, should be as close as possible to the original type definition string.
|
||||
|
||||
Takes some logic from `typing._type_repr`.
|
||||
"""
|
||||
if isinstance(obj, types.FunctionType):
|
||||
return obj.__name__
|
||||
elif obj is ...:
|
||||
return '...'
|
||||
elif isinstance(obj, Representation):
|
||||
return repr(obj)
|
||||
|
||||
if not isinstance(obj, (_typing_extra.typing_base, _typing_extra.WithArgsTypes, type)):
|
||||
obj = obj.__class__
|
||||
|
||||
if _typing_extra.origin_is_union(typing_extensions.get_origin(obj)):
|
||||
args = ', '.join(map(display_as_type, typing_extensions.get_args(obj)))
|
||||
return f'Union[{args}]'
|
||||
elif isinstance(obj, _typing_extra.WithArgsTypes):
|
||||
if typing_extensions.get_origin(obj) == typing_extensions.Literal:
|
||||
args = ', '.join(map(repr, typing_extensions.get_args(obj)))
|
||||
else:
|
||||
args = ', '.join(map(display_as_type, typing_extensions.get_args(obj)))
|
||||
try:
|
||||
return f'{obj.__qualname__}[{args}]'
|
||||
except AttributeError:
|
||||
return str(obj) # handles TypeAliasType in 3.12
|
||||
elif isinstance(obj, type):
|
||||
return obj.__qualname__
|
||||
else:
|
||||
return repr(obj).replace('typing.', '').replace('typing_extensions.', '')
|
||||
@@ -0,0 +1,124 @@
|
||||
"""Types and utility functions used by various other internal tools."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
from pydantic_core import core_schema
|
||||
from typing_extensions import Literal
|
||||
|
||||
from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..json_schema import GenerateJsonSchema, JsonSchemaValue
|
||||
from ._core_utils import CoreSchemaOrField
|
||||
from ._generate_schema import GenerateSchema
|
||||
|
||||
GetJsonSchemaFunction = Callable[[CoreSchemaOrField, GetJsonSchemaHandler], JsonSchemaValue]
|
||||
HandlerOverride = Callable[[CoreSchemaOrField], JsonSchemaValue]
|
||||
|
||||
|
||||
class GenerateJsonSchemaHandler(GetJsonSchemaHandler):
|
||||
"""JsonSchemaHandler implementation that doesn't do ref unwrapping by default.
|
||||
|
||||
This is used for any Annotated metadata so that we don't end up with conflicting
|
||||
modifications to the definition schema.
|
||||
|
||||
Used internally by Pydantic, please do not rely on this implementation.
|
||||
See `GetJsonSchemaHandler` for the handler API.
|
||||
"""
|
||||
|
||||
def __init__(self, generate_json_schema: GenerateJsonSchema, handler_override: HandlerOverride | None) -> None:
|
||||
self.generate_json_schema = generate_json_schema
|
||||
self.handler = handler_override or generate_json_schema.generate_inner
|
||||
self.mode = generate_json_schema.mode
|
||||
|
||||
def __call__(self, __core_schema: CoreSchemaOrField) -> JsonSchemaValue:
|
||||
return self.handler(__core_schema)
|
||||
|
||||
def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue) -> JsonSchemaValue:
|
||||
"""Resolves `$ref` in the json schema.
|
||||
|
||||
This returns the input json schema if there is no `$ref` in json schema.
|
||||
|
||||
Args:
|
||||
maybe_ref_json_schema: The input json schema that may contains `$ref`.
|
||||
|
||||
Returns:
|
||||
Resolved json schema.
|
||||
|
||||
Raises:
|
||||
LookupError: If it can't find the definition for `$ref`.
|
||||
"""
|
||||
if '$ref' not in maybe_ref_json_schema:
|
||||
return maybe_ref_json_schema
|
||||
ref = maybe_ref_json_schema['$ref']
|
||||
json_schema = self.generate_json_schema.get_schema_from_definitions(ref)
|
||||
if json_schema is None:
|
||||
raise LookupError(
|
||||
f'Could not find a ref for {ref}.'
|
||||
' Maybe you tried to call resolve_ref_schema from within a recursive model?'
|
||||
)
|
||||
return json_schema
|
||||
|
||||
|
||||
class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler):
|
||||
"""Wrapper to use an arbitrary function as a `GetCoreSchemaHandler`.
|
||||
|
||||
Used internally by Pydantic, please do not rely on this implementation.
|
||||
See `GetCoreSchemaHandler` for the handler API.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
handler: Callable[[Any], core_schema.CoreSchema],
|
||||
generate_schema: GenerateSchema,
|
||||
ref_mode: Literal['to-def', 'unpack'] = 'to-def',
|
||||
) -> None:
|
||||
self._handler = handler
|
||||
self._generate_schema = generate_schema
|
||||
self._ref_mode = ref_mode
|
||||
|
||||
def __call__(self, __source_type: Any) -> core_schema.CoreSchema:
|
||||
schema = self._handler(__source_type)
|
||||
ref = schema.get('ref')
|
||||
if self._ref_mode == 'to-def':
|
||||
if ref is not None:
|
||||
self._generate_schema.defs.definitions[ref] = schema
|
||||
return core_schema.definition_reference_schema(ref)
|
||||
return schema
|
||||
else: # ref_mode = 'unpack
|
||||
return self.resolve_ref_schema(schema)
|
||||
|
||||
def _get_types_namespace(self) -> dict[str, Any] | None:
|
||||
return self._generate_schema._types_namespace
|
||||
|
||||
def generate_schema(self, __source_type: Any) -> core_schema.CoreSchema:
|
||||
return self._generate_schema.generate_schema(__source_type)
|
||||
|
||||
@property
|
||||
def field_name(self) -> str | None:
|
||||
return self._generate_schema.field_name_stack.get()
|
||||
|
||||
def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
"""Resolves reference in the core schema.
|
||||
|
||||
Args:
|
||||
maybe_ref_schema: The input core schema that may contains reference.
|
||||
|
||||
Returns:
|
||||
Resolved core schema.
|
||||
|
||||
Raises:
|
||||
LookupError: If it can't find the definition for reference.
|
||||
"""
|
||||
if maybe_ref_schema['type'] == 'definition-ref':
|
||||
ref = maybe_ref_schema['schema_ref']
|
||||
if ref not in self._generate_schema.defs.definitions:
|
||||
raise LookupError(
|
||||
f'Could not find a ref for {ref}.'
|
||||
' Maybe you tried to call resolve_ref_schema from within a recursive model?'
|
||||
)
|
||||
return self._generate_schema.defs.definitions[ref]
|
||||
elif maybe_ref_schema['type'] == 'definitions':
|
||||
return self.resolve_ref_schema(maybe_ref_schema['schema'])
|
||||
return maybe_ref_schema
|
||||
@@ -0,0 +1,714 @@
|
||||
"""Logic for generating pydantic-core schemas for standard library types.
|
||||
|
||||
Import of this module is deferred since it contains imports of many standard library modules.
|
||||
"""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import collections
|
||||
import collections.abc
|
||||
import dataclasses
|
||||
import decimal
|
||||
import inspect
|
||||
import os
|
||||
import typing
|
||||
from enum import Enum
|
||||
from functools import partial
|
||||
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
|
||||
from typing import Any, Callable, Iterable, TypeVar
|
||||
|
||||
import typing_extensions
|
||||
from pydantic_core import (
|
||||
CoreSchema,
|
||||
MultiHostUrl,
|
||||
PydanticCustomError,
|
||||
PydanticOmit,
|
||||
Url,
|
||||
core_schema,
|
||||
)
|
||||
from typing_extensions import get_args, get_origin
|
||||
|
||||
from pydantic.errors import PydanticSchemaGenerationError
|
||||
from pydantic.fields import FieldInfo
|
||||
from pydantic.types import Strict
|
||||
|
||||
from ..config import ConfigDict
|
||||
from ..json_schema import JsonSchemaValue, update_json_schema
|
||||
from . import _known_annotated_metadata, _typing_extra, _validators
|
||||
from ._core_utils import get_type_ref
|
||||
from ._internal_dataclass import slots_true
|
||||
from ._schema_generation_shared import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ._generate_schema import GenerateSchema
|
||||
|
||||
StdSchemaFunction = Callable[[GenerateSchema, type[Any]], core_schema.CoreSchema]
|
||||
|
||||
|
||||
@dataclasses.dataclass(**slots_true)
|
||||
class SchemaTransformer:
|
||||
get_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema]
|
||||
get_json_schema: Callable[[CoreSchema, GetJsonSchemaHandler], JsonSchemaValue]
|
||||
|
||||
def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
|
||||
return self.get_core_schema(source_type, handler)
|
||||
|
||||
def __get_pydantic_json_schema__(self, schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
|
||||
return self.get_json_schema(schema, handler)
|
||||
|
||||
|
||||
def get_enum_core_schema(enum_type: type[Enum], config: ConfigDict) -> CoreSchema:
|
||||
cases: list[Any] = list(enum_type.__members__.values())
|
||||
|
||||
enum_ref = get_type_ref(enum_type)
|
||||
description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__)
|
||||
if description == 'An enumeration.': # This is the default value provided by enum.EnumMeta.__new__; don't use it
|
||||
description = None
|
||||
updates = {'title': enum_type.__name__, 'description': description}
|
||||
updates = {k: v for k, v in updates.items() if v is not None}
|
||||
|
||||
def get_json_schema(_, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
|
||||
json_schema = handler(core_schema.literal_schema([x.value for x in cases], ref=enum_ref))
|
||||
original_schema = handler.resolve_ref_schema(json_schema)
|
||||
update_json_schema(original_schema, updates)
|
||||
return json_schema
|
||||
|
||||
if not cases:
|
||||
# Use an isinstance check for enums with no cases.
|
||||
# The most important use case for this is creating TypeVar bounds for generics that should
|
||||
# be restricted to enums. This is more consistent than it might seem at first, since you can only
|
||||
# subclass enum.Enum (or subclasses of enum.Enum) if all parent classes have no cases.
|
||||
# We use the get_json_schema function when an Enum subclass has been declared with no cases
|
||||
# so that we can still generate a valid json schema.
|
||||
return core_schema.is_instance_schema(enum_type, metadata={'pydantic_js_functions': [get_json_schema]})
|
||||
|
||||
use_enum_values = config.get('use_enum_values', False)
|
||||
|
||||
if len(cases) == 1:
|
||||
expected = repr(cases[0].value)
|
||||
else:
|
||||
expected = ', '.join([repr(case.value) for case in cases[:-1]]) + f' or {cases[-1].value!r}'
|
||||
|
||||
def to_enum(__input_value: Any) -> Enum:
|
||||
try:
|
||||
enum_field = enum_type(__input_value)
|
||||
if use_enum_values:
|
||||
return enum_field.value
|
||||
return enum_field
|
||||
except ValueError:
|
||||
# The type: ignore on the next line is to ignore the requirement of LiteralString
|
||||
raise PydanticCustomError('enum', f'Input should be {expected}', {'expected': expected}) # type: ignore
|
||||
|
||||
strict_python_schema = core_schema.is_instance_schema(enum_type)
|
||||
if use_enum_values:
|
||||
strict_python_schema = core_schema.chain_schema(
|
||||
[strict_python_schema, core_schema.no_info_plain_validator_function(lambda x: x.value)]
|
||||
)
|
||||
|
||||
to_enum_validator = core_schema.no_info_plain_validator_function(to_enum)
|
||||
if issubclass(enum_type, int):
|
||||
# this handles `IntEnum`, and also `Foobar(int, Enum)`
|
||||
updates['type'] = 'integer'
|
||||
lax = core_schema.chain_schema([core_schema.int_schema(), to_enum_validator])
|
||||
# Disallow float from JSON due to strict mode
|
||||
strict = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.int_schema()),
|
||||
python_schema=strict_python_schema,
|
||||
)
|
||||
elif issubclass(enum_type, str):
|
||||
# this handles `StrEnum` (3.11 only), and also `Foobar(str, Enum)`
|
||||
updates['type'] = 'string'
|
||||
lax = core_schema.chain_schema([core_schema.str_schema(), to_enum_validator])
|
||||
strict = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.str_schema()),
|
||||
python_schema=strict_python_schema,
|
||||
)
|
||||
elif issubclass(enum_type, float):
|
||||
updates['type'] = 'numeric'
|
||||
lax = core_schema.chain_schema([core_schema.float_schema(), to_enum_validator])
|
||||
strict = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.float_schema()),
|
||||
python_schema=strict_python_schema,
|
||||
)
|
||||
else:
|
||||
lax = to_enum_validator
|
||||
strict = core_schema.json_or_python_schema(json_schema=to_enum_validator, python_schema=strict_python_schema)
|
||||
return core_schema.lax_or_strict_schema(
|
||||
lax_schema=lax, strict_schema=strict, ref=enum_ref, metadata={'pydantic_js_functions': [get_json_schema]}
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(**slots_true)
|
||||
class InnerSchemaValidator:
|
||||
"""Use a fixed CoreSchema, avoiding interference from outward annotations."""
|
||||
|
||||
core_schema: CoreSchema
|
||||
js_schema: JsonSchemaValue | None = None
|
||||
js_core_schema: CoreSchema | None = None
|
||||
js_schema_update: JsonSchemaValue | None = None
|
||||
|
||||
def __get_pydantic_json_schema__(self, _schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
|
||||
if self.js_schema is not None:
|
||||
return self.js_schema
|
||||
js_schema = handler(self.js_core_schema or self.core_schema)
|
||||
if self.js_schema_update is not None:
|
||||
js_schema.update(self.js_schema_update)
|
||||
return js_schema
|
||||
|
||||
def __get_pydantic_core_schema__(self, _source_type: Any, _handler: GetCoreSchemaHandler) -> CoreSchema:
|
||||
return self.core_schema
|
||||
|
||||
|
||||
def decimal_prepare_pydantic_annotations(
|
||||
source: Any, annotations: Iterable[Any], config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
if source is not decimal.Decimal:
|
||||
return None
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
|
||||
config_allow_inf_nan = config.get('allow_inf_nan')
|
||||
if config_allow_inf_nan is not None:
|
||||
metadata.setdefault('allow_inf_nan', config_allow_inf_nan)
|
||||
|
||||
_known_annotated_metadata.check_metadata(
|
||||
metadata, {*_known_annotated_metadata.FLOAT_CONSTRAINTS, 'max_digits', 'decimal_places'}, decimal.Decimal
|
||||
)
|
||||
return source, [InnerSchemaValidator(core_schema.decimal_schema(**metadata)), *remaining_annotations]
|
||||
|
||||
|
||||
def datetime_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
import datetime
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
if source_type is datetime.date:
|
||||
sv = InnerSchemaValidator(core_schema.date_schema(**metadata))
|
||||
elif source_type is datetime.datetime:
|
||||
sv = InnerSchemaValidator(core_schema.datetime_schema(**metadata))
|
||||
elif source_type is datetime.time:
|
||||
sv = InnerSchemaValidator(core_schema.time_schema(**metadata))
|
||||
elif source_type is datetime.timedelta:
|
||||
sv = InnerSchemaValidator(core_schema.timedelta_schema(**metadata))
|
||||
else:
|
||||
return None
|
||||
# check now that we know the source type is correct
|
||||
_known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.DATE_TIME_CONSTRAINTS, source_type)
|
||||
return (source_type, [sv, *remaining_annotations])
|
||||
|
||||
|
||||
def uuid_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
# UUIDs have no constraints - they are fixed length, constructing a UUID instance checks the length
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
if source_type is not UUID:
|
||||
return None
|
||||
|
||||
return (source_type, [InnerSchemaValidator(core_schema.uuid_schema()), *annotations])
|
||||
|
||||
|
||||
def path_schema_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
import pathlib
|
||||
|
||||
if source_type not in {
|
||||
os.PathLike,
|
||||
pathlib.Path,
|
||||
pathlib.PurePath,
|
||||
pathlib.PosixPath,
|
||||
pathlib.PurePosixPath,
|
||||
pathlib.PureWindowsPath,
|
||||
}:
|
||||
return None
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
_known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.STR_CONSTRAINTS, source_type)
|
||||
|
||||
construct_path = pathlib.PurePath if source_type is os.PathLike else source_type
|
||||
|
||||
def path_validator(input_value: str) -> os.PathLike[Any]:
|
||||
try:
|
||||
return construct_path(input_value)
|
||||
except TypeError as e:
|
||||
raise PydanticCustomError('path_type', 'Input is not a valid path') from e
|
||||
|
||||
constrained_str_schema = core_schema.str_schema(**metadata)
|
||||
|
||||
instance_schema = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(path_validator, constrained_str_schema),
|
||||
python_schema=core_schema.is_instance_schema(source_type),
|
||||
)
|
||||
|
||||
strict: bool | None = None
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, Strict):
|
||||
strict = annotation.strict
|
||||
|
||||
schema = core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.union_schema(
|
||||
[
|
||||
instance_schema,
|
||||
core_schema.no_info_after_validator_function(path_validator, constrained_str_schema),
|
||||
],
|
||||
custom_error_type='path_type',
|
||||
custom_error_message='Input is not a valid path',
|
||||
strict=True,
|
||||
),
|
||||
strict_schema=instance_schema,
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
strict=strict,
|
||||
)
|
||||
|
||||
return (
|
||||
source_type,
|
||||
[
|
||||
InnerSchemaValidator(schema, js_core_schema=constrained_str_schema, js_schema_update={'format': 'path'}),
|
||||
*remaining_annotations,
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def dequeue_validator(
|
||||
input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, maxlen: None | int
|
||||
) -> collections.deque[Any]:
|
||||
if isinstance(input_value, collections.deque):
|
||||
maxlens = [v for v in (input_value.maxlen, maxlen) if v is not None]
|
||||
if maxlens:
|
||||
maxlen = min(maxlens)
|
||||
return collections.deque(handler(input_value), maxlen=maxlen)
|
||||
else:
|
||||
return collections.deque(handler(input_value), maxlen=maxlen)
|
||||
|
||||
|
||||
@dataclasses.dataclass(**slots_true)
|
||||
class SequenceValidator:
|
||||
mapped_origin: type[Any]
|
||||
item_source_type: type[Any]
|
||||
min_length: int | None = None
|
||||
max_length: int | None = None
|
||||
strict: bool = False
|
||||
|
||||
def serialize_sequence_via_list(
|
||||
self, v: Any, handler: core_schema.SerializerFunctionWrapHandler, info: core_schema.SerializationInfo
|
||||
) -> Any:
|
||||
items: list[Any] = []
|
||||
for index, item in enumerate(v):
|
||||
try:
|
||||
v = handler(item, index)
|
||||
except PydanticOmit:
|
||||
pass
|
||||
else:
|
||||
items.append(v)
|
||||
|
||||
if info.mode_is_json():
|
||||
return items
|
||||
else:
|
||||
return self.mapped_origin(items)
|
||||
|
||||
def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
|
||||
if self.item_source_type is Any:
|
||||
items_schema = None
|
||||
else:
|
||||
items_schema = handler.generate_schema(self.item_source_type)
|
||||
|
||||
metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict}
|
||||
|
||||
if self.mapped_origin in (list, set, frozenset):
|
||||
if self.mapped_origin is list:
|
||||
constrained_schema = core_schema.list_schema(items_schema, **metadata)
|
||||
elif self.mapped_origin is set:
|
||||
constrained_schema = core_schema.set_schema(items_schema, **metadata)
|
||||
else:
|
||||
assert self.mapped_origin is frozenset # safety check in case we forget to add a case
|
||||
constrained_schema = core_schema.frozenset_schema(items_schema, **metadata)
|
||||
|
||||
schema = constrained_schema
|
||||
else:
|
||||
# safety check in case we forget to add a case
|
||||
assert self.mapped_origin in (collections.deque, collections.Counter)
|
||||
|
||||
if self.mapped_origin is collections.deque:
|
||||
# if we have a MaxLen annotation might as well set that as the default maxlen on the deque
|
||||
# this lets us re-use existing metadata annotations to let users set the maxlen on a dequeue
|
||||
# that e.g. comes from JSON
|
||||
coerce_instance_wrap = partial(
|
||||
core_schema.no_info_wrap_validator_function,
|
||||
partial(dequeue_validator, maxlen=metadata.get('max_length', None)),
|
||||
)
|
||||
else:
|
||||
coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin)
|
||||
|
||||
constrained_schema = core_schema.list_schema(items_schema, **metadata)
|
||||
|
||||
check_instance = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.list_schema(),
|
||||
python_schema=core_schema.is_instance_schema(self.mapped_origin),
|
||||
)
|
||||
|
||||
serialization = core_schema.wrap_serializer_function_ser_schema(
|
||||
self.serialize_sequence_via_list, schema=items_schema or core_schema.any_schema(), info_arg=True
|
||||
)
|
||||
|
||||
strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)])
|
||||
|
||||
if metadata.get('strict', False):
|
||||
schema = strict
|
||||
else:
|
||||
lax = coerce_instance_wrap(constrained_schema)
|
||||
schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict)
|
||||
schema['serialization'] = serialization
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
SEQUENCE_ORIGIN_MAP: dict[Any, Any] = {
|
||||
typing.Deque: collections.deque,
|
||||
collections.deque: collections.deque,
|
||||
list: list,
|
||||
typing.List: list,
|
||||
set: set,
|
||||
typing.AbstractSet: set,
|
||||
typing.Set: set,
|
||||
frozenset: frozenset,
|
||||
typing.FrozenSet: frozenset,
|
||||
typing.Sequence: list,
|
||||
typing.MutableSequence: list,
|
||||
typing.MutableSet: set,
|
||||
# this doesn't handle subclasses of these
|
||||
# parametrized typing.Set creates one of these
|
||||
collections.abc.MutableSet: set,
|
||||
collections.abc.Set: frozenset,
|
||||
}
|
||||
|
||||
|
||||
def identity(s: CoreSchema) -> CoreSchema:
|
||||
return s
|
||||
|
||||
|
||||
def sequence_like_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
origin: Any = get_origin(source_type)
|
||||
|
||||
mapped_origin = SEQUENCE_ORIGIN_MAP.get(origin, None) if origin else SEQUENCE_ORIGIN_MAP.get(source_type, None)
|
||||
if mapped_origin is None:
|
||||
return None
|
||||
|
||||
args = get_args(source_type)
|
||||
|
||||
if not args:
|
||||
args = (Any,)
|
||||
elif len(args) != 1:
|
||||
raise ValueError('Expected sequence to have exactly 1 generic parameter')
|
||||
|
||||
item_source_type = args[0]
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
_known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type)
|
||||
|
||||
return (source_type, [SequenceValidator(mapped_origin, item_source_type, **metadata), *remaining_annotations])
|
||||
|
||||
|
||||
MAPPING_ORIGIN_MAP: dict[Any, Any] = {
|
||||
typing.DefaultDict: collections.defaultdict,
|
||||
collections.defaultdict: collections.defaultdict,
|
||||
collections.OrderedDict: collections.OrderedDict,
|
||||
typing_extensions.OrderedDict: collections.OrderedDict,
|
||||
dict: dict,
|
||||
typing.Dict: dict,
|
||||
collections.Counter: collections.Counter,
|
||||
typing.Counter: collections.Counter,
|
||||
# this doesn't handle subclasses of these
|
||||
typing.Mapping: dict,
|
||||
typing.MutableMapping: dict,
|
||||
# parametrized typing.{Mutable}Mapping creates one of these
|
||||
collections.abc.MutableMapping: dict,
|
||||
collections.abc.Mapping: dict,
|
||||
}
|
||||
|
||||
|
||||
def defaultdict_validator(
|
||||
input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, default_default_factory: Callable[[], Any]
|
||||
) -> collections.defaultdict[Any, Any]:
|
||||
if isinstance(input_value, collections.defaultdict):
|
||||
default_factory = input_value.default_factory
|
||||
return collections.defaultdict(default_factory, handler(input_value))
|
||||
else:
|
||||
return collections.defaultdict(default_default_factory, handler(input_value))
|
||||
|
||||
|
||||
def get_defaultdict_default_default_factory(values_source_type: Any) -> Callable[[], Any]:
|
||||
def infer_default() -> Callable[[], Any]:
|
||||
allowed_default_types: dict[Any, Any] = {
|
||||
typing.Tuple: tuple,
|
||||
tuple: tuple,
|
||||
collections.abc.Sequence: tuple,
|
||||
collections.abc.MutableSequence: list,
|
||||
typing.List: list,
|
||||
list: list,
|
||||
typing.Sequence: list,
|
||||
typing.Set: set,
|
||||
set: set,
|
||||
typing.MutableSet: set,
|
||||
collections.abc.MutableSet: set,
|
||||
collections.abc.Set: frozenset,
|
||||
typing.MutableMapping: dict,
|
||||
typing.Mapping: dict,
|
||||
collections.abc.Mapping: dict,
|
||||
collections.abc.MutableMapping: dict,
|
||||
float: float,
|
||||
int: int,
|
||||
str: str,
|
||||
bool: bool,
|
||||
}
|
||||
values_type_origin = get_origin(values_source_type) or values_source_type
|
||||
instructions = 'set using `DefaultDict[..., Annotated[..., Field(default_factory=...)]]`'
|
||||
if isinstance(values_type_origin, TypeVar):
|
||||
|
||||
def type_var_default_factory() -> None:
|
||||
raise RuntimeError(
|
||||
'Generic defaultdict cannot be used without a concrete value type or an'
|
||||
' explicit default factory, ' + instructions
|
||||
)
|
||||
|
||||
return type_var_default_factory
|
||||
elif values_type_origin not in allowed_default_types:
|
||||
# a somewhat subjective set of types that have reasonable default values
|
||||
allowed_msg = ', '.join([t.__name__ for t in set(allowed_default_types.values())])
|
||||
raise PydanticSchemaGenerationError(
|
||||
f'Unable to infer a default factory for keys of type {values_source_type}.'
|
||||
f' Only {allowed_msg} are supported, other types require an explicit default factory'
|
||||
' ' + instructions
|
||||
)
|
||||
return allowed_default_types[values_type_origin]
|
||||
|
||||
# Assume Annotated[..., Field(...)]
|
||||
if _typing_extra.is_annotated(values_source_type):
|
||||
field_info = next((v for v in get_args(values_source_type) if isinstance(v, FieldInfo)), None)
|
||||
else:
|
||||
field_info = None
|
||||
if field_info and field_info.default_factory:
|
||||
default_default_factory = field_info.default_factory
|
||||
else:
|
||||
default_default_factory = infer_default()
|
||||
return default_default_factory
|
||||
|
||||
|
||||
@dataclasses.dataclass(**slots_true)
|
||||
class MappingValidator:
|
||||
mapped_origin: type[Any]
|
||||
keys_source_type: type[Any]
|
||||
values_source_type: type[Any]
|
||||
min_length: int | None = None
|
||||
max_length: int | None = None
|
||||
strict: bool = False
|
||||
|
||||
def serialize_mapping_via_dict(self, v: Any, handler: core_schema.SerializerFunctionWrapHandler) -> Any:
|
||||
return handler(v)
|
||||
|
||||
def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
|
||||
if self.keys_source_type is Any:
|
||||
keys_schema = None
|
||||
else:
|
||||
keys_schema = handler.generate_schema(self.keys_source_type)
|
||||
if self.values_source_type is Any:
|
||||
values_schema = None
|
||||
else:
|
||||
values_schema = handler.generate_schema(self.values_source_type)
|
||||
|
||||
metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict}
|
||||
|
||||
if self.mapped_origin is dict:
|
||||
schema = core_schema.dict_schema(keys_schema, values_schema, **metadata)
|
||||
else:
|
||||
constrained_schema = core_schema.dict_schema(keys_schema, values_schema, **metadata)
|
||||
check_instance = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.dict_schema(),
|
||||
python_schema=core_schema.is_instance_schema(self.mapped_origin),
|
||||
)
|
||||
|
||||
if self.mapped_origin is collections.defaultdict:
|
||||
default_default_factory = get_defaultdict_default_default_factory(self.values_source_type)
|
||||
coerce_instance_wrap = partial(
|
||||
core_schema.no_info_wrap_validator_function,
|
||||
partial(defaultdict_validator, default_default_factory=default_default_factory),
|
||||
)
|
||||
else:
|
||||
coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin)
|
||||
|
||||
serialization = core_schema.wrap_serializer_function_ser_schema(
|
||||
self.serialize_mapping_via_dict,
|
||||
schema=core_schema.dict_schema(
|
||||
keys_schema or core_schema.any_schema(), values_schema or core_schema.any_schema()
|
||||
),
|
||||
info_arg=False,
|
||||
)
|
||||
|
||||
strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)])
|
||||
|
||||
if metadata.get('strict', False):
|
||||
schema = strict
|
||||
else:
|
||||
lax = coerce_instance_wrap(constrained_schema)
|
||||
schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict)
|
||||
schema['serialization'] = serialization
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
def mapping_like_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
origin: Any = get_origin(source_type)
|
||||
|
||||
mapped_origin = MAPPING_ORIGIN_MAP.get(origin, None) if origin else MAPPING_ORIGIN_MAP.get(source_type, None)
|
||||
if mapped_origin is None:
|
||||
return None
|
||||
|
||||
args = get_args(source_type)
|
||||
|
||||
if not args:
|
||||
args = (Any, Any)
|
||||
elif mapped_origin is collections.Counter:
|
||||
# a single generic
|
||||
if len(args) != 1:
|
||||
raise ValueError('Expected Counter to have exactly 1 generic parameter')
|
||||
args = (args[0], int) # keys are always an int
|
||||
elif len(args) != 2:
|
||||
raise ValueError('Expected mapping to have exactly 2 generic parameters')
|
||||
|
||||
keys_source_type, values_source_type = args
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
_known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type)
|
||||
|
||||
return (
|
||||
source_type,
|
||||
[
|
||||
MappingValidator(mapped_origin, keys_source_type, values_source_type, **metadata),
|
||||
*remaining_annotations,
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def ip_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
def make_strict_ip_schema(tp: type[Any]) -> CoreSchema:
|
||||
return core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(tp, core_schema.str_schema()),
|
||||
python_schema=core_schema.is_instance_schema(tp),
|
||||
)
|
||||
|
||||
if source_type is IPv4Address:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_address_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv4Address),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv4'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is IPv4Network:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_network_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv4Network),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv4network'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is IPv4Interface:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_interface_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv4Interface),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv4interface'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
|
||||
if source_type is IPv6Address:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_address_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv6Address),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv6'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is IPv6Network:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_network_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv6Network),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv6network'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is IPv6Interface:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_interface_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv6Interface),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv6interface'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def url_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
if source_type is Url:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.url_schema(),
|
||||
lambda cs, handler: handler(cs),
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is MultiHostUrl:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.multi_host_url_schema(),
|
||||
lambda cs, handler: handler(cs),
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
|
||||
|
||||
PREPARE_METHODS: tuple[Callable[[Any, Iterable[Any], ConfigDict], tuple[Any, list[Any]] | None], ...] = (
|
||||
decimal_prepare_pydantic_annotations,
|
||||
sequence_like_prepare_pydantic_annotations,
|
||||
datetime_prepare_pydantic_annotations,
|
||||
uuid_prepare_pydantic_annotations,
|
||||
path_schema_prepare_pydantic_annotations,
|
||||
mapping_like_prepare_pydantic_annotations,
|
||||
ip_prepare_pydantic_annotations,
|
||||
url_prepare_pydantic_annotations,
|
||||
)
|
||||
@@ -0,0 +1,446 @@
|
||||
"""Logic for interacting with type annotations, mostly extensions, shims and hacks to wrap python's typing module."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import dataclasses
|
||||
import sys
|
||||
import types
|
||||
import typing
|
||||
from collections.abc import Callable
|
||||
from functools import partial
|
||||
from types import GetSetDescriptorType
|
||||
from typing import TYPE_CHECKING, Any, ForwardRef
|
||||
|
||||
from typing_extensions import Annotated, Final, Literal, TypeAliasType, TypeGuard, get_args, get_origin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._dataclasses import StandardDataclass
|
||||
|
||||
try:
|
||||
from typing import _TypingBase # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from typing import _Final as _TypingBase # type: ignore[attr-defined]
|
||||
|
||||
typing_base = _TypingBase
|
||||
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
# python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on)
|
||||
TypingGenericAlias = ()
|
||||
else:
|
||||
from typing import GenericAlias as TypingGenericAlias # type: ignore
|
||||
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from typing_extensions import NotRequired, Required
|
||||
else:
|
||||
from typing import NotRequired, Required # noqa: F401
|
||||
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
|
||||
def origin_is_union(tp: type[Any] | None) -> bool:
|
||||
return tp is typing.Union
|
||||
|
||||
WithArgsTypes = (TypingGenericAlias,)
|
||||
|
||||
else:
|
||||
|
||||
def origin_is_union(tp: type[Any] | None) -> bool:
|
||||
return tp is typing.Union or tp is types.UnionType
|
||||
|
||||
WithArgsTypes = typing._GenericAlias, types.GenericAlias, types.UnionType # type: ignore[attr-defined]
|
||||
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
NoneType = type(None)
|
||||
EllipsisType = type(Ellipsis)
|
||||
else:
|
||||
from types import NoneType as NoneType
|
||||
|
||||
|
||||
LITERAL_TYPES: set[Any] = {Literal}
|
||||
if hasattr(typing, 'Literal'):
|
||||
LITERAL_TYPES.add(typing.Literal) # type: ignore
|
||||
|
||||
NONE_TYPES: tuple[Any, ...] = (None, NoneType, *(tp[None] for tp in LITERAL_TYPES))
|
||||
|
||||
|
||||
TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type
|
||||
|
||||
|
||||
def is_none_type(type_: Any) -> bool:
|
||||
return type_ in NONE_TYPES
|
||||
|
||||
|
||||
def is_callable_type(type_: type[Any]) -> bool:
|
||||
return type_ is Callable or get_origin(type_) is Callable
|
||||
|
||||
|
||||
def is_literal_type(type_: type[Any]) -> bool:
|
||||
return Literal is not None and get_origin(type_) in LITERAL_TYPES
|
||||
|
||||
|
||||
def literal_values(type_: type[Any]) -> tuple[Any, ...]:
|
||||
return get_args(type_)
|
||||
|
||||
|
||||
def all_literal_values(type_: type[Any]) -> list[Any]:
|
||||
"""This method is used to retrieve all Literal values as
|
||||
Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586)
|
||||
e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`.
|
||||
"""
|
||||
if not is_literal_type(type_):
|
||||
return [type_]
|
||||
|
||||
values = literal_values(type_)
|
||||
return list(x for value in values for x in all_literal_values(value))
|
||||
|
||||
|
||||
def is_annotated(ann_type: Any) -> bool:
|
||||
from ._utils import lenient_issubclass
|
||||
|
||||
origin = get_origin(ann_type)
|
||||
return origin is not None and lenient_issubclass(origin, Annotated)
|
||||
|
||||
|
||||
def is_namedtuple(type_: type[Any]) -> bool:
|
||||
"""Check if a given class is a named tuple.
|
||||
It can be either a `typing.NamedTuple` or `collections.namedtuple`.
|
||||
"""
|
||||
from ._utils import lenient_issubclass
|
||||
|
||||
return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields')
|
||||
|
||||
|
||||
test_new_type = typing.NewType('test_new_type', str)
|
||||
|
||||
|
||||
def is_new_type(type_: type[Any]) -> bool:
|
||||
"""Check whether type_ was created using typing.NewType.
|
||||
|
||||
Can't use isinstance because it fails <3.10.
|
||||
"""
|
||||
return isinstance(type_, test_new_type.__class__) and hasattr(type_, '__supertype__') # type: ignore[arg-type]
|
||||
|
||||
|
||||
def _check_classvar(v: type[Any] | None) -> bool:
|
||||
if v is None:
|
||||
return False
|
||||
|
||||
return v.__class__ == typing.ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar'
|
||||
|
||||
|
||||
def is_classvar(ann_type: type[Any]) -> bool:
|
||||
if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)):
|
||||
return True
|
||||
|
||||
# this is an ugly workaround for class vars that contain forward references and are therefore themselves
|
||||
# forward references, see #3679
|
||||
if ann_type.__class__ == typing.ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['): # type: ignore
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _check_finalvar(v: type[Any] | None) -> bool:
|
||||
"""Check if a given type is a `typing.Final` type."""
|
||||
if v is None:
|
||||
return False
|
||||
|
||||
return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final')
|
||||
|
||||
|
||||
def is_finalvar(ann_type: Any) -> bool:
|
||||
return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type))
|
||||
|
||||
|
||||
def parent_frame_namespace(*, parent_depth: int = 2) -> dict[str, Any] | None:
|
||||
"""We allow use of items in parent namespace to get around the issue with `get_type_hints` only looking in the
|
||||
global module namespace. See https://github.com/pydantic/pydantic/issues/2678#issuecomment-1008139014 -> Scope
|
||||
and suggestion at the end of the next comment by @gvanrossum.
|
||||
|
||||
WARNING 1: it matters exactly where this is called. By default, this function will build a namespace from the
|
||||
parent of where it is called.
|
||||
|
||||
WARNING 2: this only looks in the parent namespace, not other parents since (AFAIK) there's no way to collect a
|
||||
dict of exactly what's in scope. Using `f_back` would work sometimes but would be very wrong and confusing in many
|
||||
other cases. See https://discuss.python.org/t/is-there-a-way-to-access-parent-nested-namespaces/20659.
|
||||
"""
|
||||
frame = sys._getframe(parent_depth)
|
||||
# if f_back is None, it's the global module namespace and we don't need to include it here
|
||||
if frame.f_back is None:
|
||||
return None
|
||||
else:
|
||||
return frame.f_locals
|
||||
|
||||
|
||||
def add_module_globals(obj: Any, globalns: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
module_name = getattr(obj, '__module__', None)
|
||||
if module_name:
|
||||
try:
|
||||
module_globalns = sys.modules[module_name].__dict__
|
||||
except KeyError:
|
||||
# happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
|
||||
pass
|
||||
else:
|
||||
if globalns:
|
||||
return {**module_globalns, **globalns}
|
||||
else:
|
||||
# copy module globals to make sure it can't be updated later
|
||||
return module_globalns.copy()
|
||||
|
||||
return globalns or {}
|
||||
|
||||
|
||||
def get_cls_types_namespace(cls: type[Any], parent_namespace: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
ns = add_module_globals(cls, parent_namespace)
|
||||
ns[cls.__name__] = cls
|
||||
return ns
|
||||
|
||||
|
||||
def get_cls_type_hints_lenient(obj: Any, globalns: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
"""Collect annotations from a class, including those from parent classes.
|
||||
|
||||
Unlike `typing.get_type_hints`, this function will not error if a forward reference is not resolvable.
|
||||
"""
|
||||
hints = {}
|
||||
for base in reversed(obj.__mro__):
|
||||
ann = base.__dict__.get('__annotations__')
|
||||
localns = dict(vars(base))
|
||||
if ann is not None and ann is not GetSetDescriptorType:
|
||||
for name, value in ann.items():
|
||||
hints[name] = eval_type_lenient(value, globalns, localns)
|
||||
return hints
|
||||
|
||||
|
||||
def eval_type_lenient(value: Any, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any:
|
||||
"""Behaves like typing._eval_type, except it won't raise an error if a forward reference can't be resolved."""
|
||||
if value is None:
|
||||
value = NoneType
|
||||
elif isinstance(value, str):
|
||||
value = _make_forward_ref(value, is_argument=False, is_class=True)
|
||||
|
||||
try:
|
||||
return typing._eval_type(value, globalns, localns) # type: ignore
|
||||
except NameError:
|
||||
# the point of this function is to be tolerant to this case
|
||||
return value
|
||||
|
||||
|
||||
def get_function_type_hints(
|
||||
function: Callable[..., Any], *, include_keys: set[str] | None = None, types_namespace: dict[str, Any] | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Like `typing.get_type_hints`, but doesn't convert `X` to `Optional[X]` if the default value is `None`, also
|
||||
copes with `partial`.
|
||||
"""
|
||||
if isinstance(function, partial):
|
||||
annotations = function.func.__annotations__
|
||||
else:
|
||||
annotations = function.__annotations__
|
||||
|
||||
globalns = add_module_globals(function)
|
||||
type_hints = {}
|
||||
for name, value in annotations.items():
|
||||
if include_keys is not None and name not in include_keys:
|
||||
continue
|
||||
if value is None:
|
||||
value = NoneType
|
||||
elif isinstance(value, str):
|
||||
value = _make_forward_ref(value)
|
||||
|
||||
type_hints[name] = typing._eval_type(value, globalns, types_namespace) # type: ignore
|
||||
|
||||
return type_hints
|
||||
|
||||
|
||||
if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1):
|
||||
|
||||
def _make_forward_ref(
|
||||
arg: Any,
|
||||
is_argument: bool = True,
|
||||
*,
|
||||
is_class: bool = False,
|
||||
) -> typing.ForwardRef:
|
||||
"""Wrapper for ForwardRef that accounts for the `is_class` argument missing in older versions.
|
||||
The `module` argument is omitted as it breaks <3.9.8, =3.10.0 and isn't used in the calls below.
|
||||
|
||||
See https://github.com/python/cpython/pull/28560 for some background.
|
||||
The backport happened on 3.9.8, see:
|
||||
https://github.com/pydantic/pydantic/discussions/6244#discussioncomment-6275458,
|
||||
and on 3.10.1 for the 3.10 branch, see:
|
||||
https://github.com/pydantic/pydantic/issues/6912
|
||||
|
||||
Implemented as EAFP with memory.
|
||||
"""
|
||||
return typing.ForwardRef(arg, is_argument)
|
||||
|
||||
else:
|
||||
_make_forward_ref = typing.ForwardRef
|
||||
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
get_type_hints = typing.get_type_hints
|
||||
|
||||
else:
|
||||
"""
|
||||
For older versions of python, we have a custom implementation of `get_type_hints` which is a close as possible to
|
||||
the implementation in CPython 3.10.8.
|
||||
"""
|
||||
|
||||
@typing.no_type_check
|
||||
def get_type_hints( # noqa: C901
|
||||
obj: Any,
|
||||
globalns: dict[str, Any] | None = None,
|
||||
localns: dict[str, Any] | None = None,
|
||||
include_extras: bool = False,
|
||||
) -> dict[str, Any]: # pragma: no cover
|
||||
"""Taken verbatim from python 3.10.8 unchanged, except:
|
||||
* type annotations of the function definition above.
|
||||
* prefixing `typing.` where appropriate
|
||||
* Use `_make_forward_ref` instead of `typing.ForwardRef` to handle the `is_class` argument.
|
||||
|
||||
https://github.com/python/cpython/blob/aaaf5174241496afca7ce4d4584570190ff972fe/Lib/typing.py#L1773-L1875
|
||||
|
||||
DO NOT CHANGE THIS METHOD UNLESS ABSOLUTELY NECESSARY.
|
||||
======================================================
|
||||
|
||||
Return type hints for an object.
|
||||
|
||||
This is often the same as obj.__annotations__, but it handles
|
||||
forward references encoded as string literals, adds Optional[t] if a
|
||||
default value equal to None is set and recursively replaces all
|
||||
'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
|
||||
|
||||
The argument may be a module, class, method, or function. The annotations
|
||||
are returned as a dictionary. For classes, annotations include also
|
||||
inherited members.
|
||||
|
||||
TypeError is raised if the argument is not of a type that can contain
|
||||
annotations, and an empty dictionary is returned if no annotations are
|
||||
present.
|
||||
|
||||
BEWARE -- the behavior of globalns and localns is counterintuitive
|
||||
(unless you are familiar with how eval() and exec() work). The
|
||||
search order is locals first, then globals.
|
||||
|
||||
- If no dict arguments are passed, an attempt is made to use the
|
||||
globals from obj (or the respective module's globals for classes),
|
||||
and these are also used as the locals. If the object does not appear
|
||||
to have globals, an empty dictionary is used. For classes, the search
|
||||
order is globals first then locals.
|
||||
|
||||
- If one dict argument is passed, it is used for both globals and
|
||||
locals.
|
||||
|
||||
- If two dict arguments are passed, they specify globals and
|
||||
locals, respectively.
|
||||
"""
|
||||
if getattr(obj, '__no_type_check__', None):
|
||||
return {}
|
||||
# Classes require a special treatment.
|
||||
if isinstance(obj, type):
|
||||
hints = {}
|
||||
for base in reversed(obj.__mro__):
|
||||
if globalns is None:
|
||||
base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {})
|
||||
else:
|
||||
base_globals = globalns
|
||||
ann = base.__dict__.get('__annotations__', {})
|
||||
if isinstance(ann, types.GetSetDescriptorType):
|
||||
ann = {}
|
||||
base_locals = dict(vars(base)) if localns is None else localns
|
||||
if localns is None and globalns is None:
|
||||
# This is surprising, but required. Before Python 3.10,
|
||||
# get_type_hints only evaluated the globalns of
|
||||
# a class. To maintain backwards compatibility, we reverse
|
||||
# the globalns and localns order so that eval() looks into
|
||||
# *base_globals* first rather than *base_locals*.
|
||||
# This only affects ForwardRefs.
|
||||
base_globals, base_locals = base_locals, base_globals
|
||||
for name, value in ann.items():
|
||||
if value is None:
|
||||
value = type(None)
|
||||
if isinstance(value, str):
|
||||
value = _make_forward_ref(value, is_argument=False, is_class=True)
|
||||
|
||||
value = typing._eval_type(value, base_globals, base_locals) # type: ignore
|
||||
hints[name] = value
|
||||
return (
|
||||
hints if include_extras else {k: typing._strip_annotations(t) for k, t in hints.items()} # type: ignore
|
||||
)
|
||||
|
||||
if globalns is None:
|
||||
if isinstance(obj, types.ModuleType):
|
||||
globalns = obj.__dict__
|
||||
else:
|
||||
nsobj = obj
|
||||
# Find globalns for the unwrapped object.
|
||||
while hasattr(nsobj, '__wrapped__'):
|
||||
nsobj = nsobj.__wrapped__
|
||||
globalns = getattr(nsobj, '__globals__', {})
|
||||
if localns is None:
|
||||
localns = globalns
|
||||
elif localns is None:
|
||||
localns = globalns
|
||||
hints = getattr(obj, '__annotations__', None)
|
||||
if hints is None:
|
||||
# Return empty annotations for something that _could_ have them.
|
||||
if isinstance(obj, typing._allowed_types): # type: ignore
|
||||
return {}
|
||||
else:
|
||||
raise TypeError(f'{obj!r} is not a module, class, method, ' 'or function.')
|
||||
defaults = typing._get_defaults(obj) # type: ignore
|
||||
hints = dict(hints)
|
||||
for name, value in hints.items():
|
||||
if value is None:
|
||||
value = type(None)
|
||||
if isinstance(value, str):
|
||||
# class-level forward refs were handled above, this must be either
|
||||
# a module-level annotation or a function argument annotation
|
||||
|
||||
value = _make_forward_ref(
|
||||
value,
|
||||
is_argument=not isinstance(obj, types.ModuleType),
|
||||
is_class=False,
|
||||
)
|
||||
value = typing._eval_type(value, globalns, localns) # type: ignore
|
||||
if name in defaults and defaults[name] is None:
|
||||
value = typing.Optional[value]
|
||||
hints[name] = value
|
||||
return hints if include_extras else {k: typing._strip_annotations(t) for k, t in hints.items()} # type: ignore
|
||||
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
|
||||
def evaluate_fwd_ref(
|
||||
ref: ForwardRef, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None
|
||||
) -> Any:
|
||||
return ref._evaluate(globalns=globalns, localns=localns)
|
||||
|
||||
else:
|
||||
|
||||
def evaluate_fwd_ref(
|
||||
ref: ForwardRef, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None
|
||||
) -> Any:
|
||||
return ref._evaluate(globalns=globalns, localns=localns, recursive_guard=frozenset())
|
||||
|
||||
|
||||
def is_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]:
|
||||
# The dataclasses.is_dataclass function doesn't seem to provide TypeGuard functionality,
|
||||
# so I created this convenience function
|
||||
return dataclasses.is_dataclass(_cls)
|
||||
|
||||
|
||||
def origin_is_type_alias_type(origin: Any) -> TypeGuard[TypeAliasType]:
|
||||
return isinstance(origin, TypeAliasType)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
|
||||
def is_generic_alias(type_: type[Any]) -> bool:
|
||||
return isinstance(type_, (types.GenericAlias, typing._GenericAlias)) # type: ignore[attr-defined]
|
||||
|
||||
else:
|
||||
|
||||
def is_generic_alias(type_: type[Any]) -> bool:
|
||||
return isinstance(type_, typing._GenericAlias) # type: ignore
|
||||
@@ -0,0 +1,335 @@
|
||||
"""Bucket of reusable internal utilities.
|
||||
|
||||
This should be reduced as much as possible with functions only used in one place, moved to that place.
|
||||
"""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import keyword
|
||||
import typing
|
||||
import weakref
|
||||
from collections import OrderedDict, defaultdict, deque
|
||||
from copy import deepcopy
|
||||
from itertools import zip_longest
|
||||
from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from typing_extensions import TypeAlias, TypeGuard
|
||||
|
||||
from . import _repr, _typing_extra
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
MappingIntStrAny: TypeAlias = 'typing.Mapping[int, Any] | typing.Mapping[str, Any]'
|
||||
AbstractSetIntStr: TypeAlias = 'typing.AbstractSet[int] | typing.AbstractSet[str]'
|
||||
from ..main import BaseModel
|
||||
|
||||
|
||||
# these are types that are returned unchanged by deepcopy
|
||||
IMMUTABLE_NON_COLLECTIONS_TYPES: set[type[Any]] = {
|
||||
int,
|
||||
float,
|
||||
complex,
|
||||
str,
|
||||
bool,
|
||||
bytes,
|
||||
type,
|
||||
_typing_extra.NoneType,
|
||||
FunctionType,
|
||||
BuiltinFunctionType,
|
||||
LambdaType,
|
||||
weakref.ref,
|
||||
CodeType,
|
||||
# note: including ModuleType will differ from behaviour of deepcopy by not producing error.
|
||||
# It might be not a good idea in general, but considering that this function used only internally
|
||||
# against default values of fields, this will allow to actually have a field with module as default value
|
||||
ModuleType,
|
||||
NotImplemented.__class__,
|
||||
Ellipsis.__class__,
|
||||
}
|
||||
|
||||
# these are types that if empty, might be copied with simple copy() instead of deepcopy()
|
||||
BUILTIN_COLLECTIONS: set[type[Any]] = {
|
||||
list,
|
||||
set,
|
||||
tuple,
|
||||
frozenset,
|
||||
dict,
|
||||
OrderedDict,
|
||||
defaultdict,
|
||||
deque,
|
||||
}
|
||||
|
||||
|
||||
def sequence_like(v: Any) -> bool:
|
||||
return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))
|
||||
|
||||
|
||||
def lenient_isinstance(o: Any, class_or_tuple: type[Any] | tuple[type[Any], ...] | None) -> bool: # pragma: no cover
|
||||
try:
|
||||
return isinstance(o, class_or_tuple) # type: ignore[arg-type]
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
def lenient_issubclass(cls: Any, class_or_tuple: Any) -> bool: # pragma: no cover
|
||||
try:
|
||||
return isinstance(cls, type) and issubclass(cls, class_or_tuple)
|
||||
except TypeError:
|
||||
if isinstance(cls, _typing_extra.WithArgsTypes):
|
||||
return False
|
||||
raise # pragma: no cover
|
||||
|
||||
|
||||
def is_model_class(cls: Any) -> TypeGuard[type[BaseModel]]:
|
||||
"""Returns true if cls is a _proper_ subclass of BaseModel, and provides proper type-checking,
|
||||
unlike raw calls to lenient_issubclass.
|
||||
"""
|
||||
from ..main import BaseModel
|
||||
|
||||
return lenient_issubclass(cls, BaseModel) and cls is not BaseModel
|
||||
|
||||
|
||||
def is_valid_identifier(identifier: str) -> bool:
|
||||
"""Checks that a string is a valid identifier and not a Python keyword.
|
||||
:param identifier: The identifier to test.
|
||||
:return: True if the identifier is valid.
|
||||
"""
|
||||
return identifier.isidentifier() and not keyword.iskeyword(identifier)
|
||||
|
||||
|
||||
KeyType = TypeVar('KeyType')
|
||||
|
||||
|
||||
def deep_update(mapping: dict[KeyType, Any], *updating_mappings: dict[KeyType, Any]) -> dict[KeyType, Any]:
|
||||
updated_mapping = mapping.copy()
|
||||
for updating_mapping in updating_mappings:
|
||||
for k, v in updating_mapping.items():
|
||||
if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):
|
||||
updated_mapping[k] = deep_update(updated_mapping[k], v)
|
||||
else:
|
||||
updated_mapping[k] = v
|
||||
return updated_mapping
|
||||
|
||||
|
||||
def update_not_none(mapping: dict[Any, Any], **update: Any) -> None:
|
||||
mapping.update({k: v for k, v in update.items() if v is not None})
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def unique_list(
|
||||
input_list: list[T] | tuple[T, ...],
|
||||
*,
|
||||
name_factory: typing.Callable[[T], str] = str,
|
||||
) -> list[T]:
|
||||
"""Make a list unique while maintaining order.
|
||||
We update the list if another one with the same name is set
|
||||
(e.g. model validator overridden in subclass).
|
||||
"""
|
||||
result: list[T] = []
|
||||
result_names: list[str] = []
|
||||
for v in input_list:
|
||||
v_name = name_factory(v)
|
||||
if v_name not in result_names:
|
||||
result_names.append(v_name)
|
||||
result.append(v)
|
||||
else:
|
||||
result[result_names.index(v_name)] = v
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class ValueItems(_repr.Representation):
|
||||
"""Class for more convenient calculation of excluded or included fields on values."""
|
||||
|
||||
__slots__ = ('_items', '_type')
|
||||
|
||||
def __init__(self, value: Any, items: AbstractSetIntStr | MappingIntStrAny) -> None:
|
||||
items = self._coerce_items(items)
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
items = self._normalize_indexes(items, len(value)) # type: ignore
|
||||
|
||||
self._items: MappingIntStrAny = items # type: ignore
|
||||
|
||||
def is_excluded(self, item: Any) -> bool:
|
||||
"""Check if item is fully excluded.
|
||||
|
||||
:param item: key or index of a value
|
||||
"""
|
||||
return self.is_true(self._items.get(item))
|
||||
|
||||
def is_included(self, item: Any) -> bool:
|
||||
"""Check if value is contained in self._items.
|
||||
|
||||
:param item: key or index of value
|
||||
"""
|
||||
return item in self._items
|
||||
|
||||
def for_element(self, e: int | str) -> AbstractSetIntStr | MappingIntStrAny | None:
|
||||
""":param e: key or index of element on value
|
||||
:return: raw values for element if self._items is dict and contain needed element
|
||||
"""
|
||||
item = self._items.get(e) # type: ignore
|
||||
return item if not self.is_true(item) else None
|
||||
|
||||
def _normalize_indexes(self, items: MappingIntStrAny, v_length: int) -> dict[int | str, Any]:
|
||||
""":param items: dict or set of indexes which will be normalized
|
||||
:param v_length: length of sequence indexes of which will be
|
||||
|
||||
>>> self._normalize_indexes({0: True, -2: True, -1: True}, 4)
|
||||
{0: True, 2: True, 3: True}
|
||||
>>> self._normalize_indexes({'__all__': True}, 4)
|
||||
{0: True, 1: True, 2: True, 3: True}
|
||||
"""
|
||||
normalized_items: dict[int | str, Any] = {}
|
||||
all_items = None
|
||||
for i, v in items.items():
|
||||
if not (isinstance(v, typing.Mapping) or isinstance(v, typing.AbstractSet) or self.is_true(v)):
|
||||
raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}')
|
||||
if i == '__all__':
|
||||
all_items = self._coerce_value(v)
|
||||
continue
|
||||
if not isinstance(i, int):
|
||||
raise TypeError(
|
||||
'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '
|
||||
'expected integer keys or keyword "__all__"'
|
||||
)
|
||||
normalized_i = v_length + i if i < 0 else i
|
||||
normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i))
|
||||
|
||||
if not all_items:
|
||||
return normalized_items
|
||||
if self.is_true(all_items):
|
||||
for i in range(v_length):
|
||||
normalized_items.setdefault(i, ...)
|
||||
return normalized_items
|
||||
for i in range(v_length):
|
||||
normalized_item = normalized_items.setdefault(i, {})
|
||||
if not self.is_true(normalized_item):
|
||||
normalized_items[i] = self.merge(all_items, normalized_item)
|
||||
return normalized_items
|
||||
|
||||
@classmethod
|
||||
def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any:
|
||||
"""Merge a `base` item with an `override` item.
|
||||
|
||||
Both `base` and `override` are converted to dictionaries if possible.
|
||||
Sets are converted to dictionaries with the sets entries as keys and
|
||||
Ellipsis as values.
|
||||
|
||||
Each key-value pair existing in `base` is merged with `override`,
|
||||
while the rest of the key-value pairs are updated recursively with this function.
|
||||
|
||||
Merging takes place based on the "union" of keys if `intersect` is
|
||||
set to `False` (default) and on the intersection of keys if
|
||||
`intersect` is set to `True`.
|
||||
"""
|
||||
override = cls._coerce_value(override)
|
||||
base = cls._coerce_value(base)
|
||||
if override is None:
|
||||
return base
|
||||
if cls.is_true(base) or base is None:
|
||||
return override
|
||||
if cls.is_true(override):
|
||||
return base if intersect else override
|
||||
|
||||
# intersection or union of keys while preserving ordering:
|
||||
if intersect:
|
||||
merge_keys = [k for k in base if k in override] + [k for k in override if k in base]
|
||||
else:
|
||||
merge_keys = list(base) + [k for k in override if k not in base]
|
||||
|
||||
merged: dict[int | str, Any] = {}
|
||||
for k in merge_keys:
|
||||
merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect)
|
||||
if merged_item is not None:
|
||||
merged[k] = merged_item
|
||||
|
||||
return merged
|
||||
|
||||
@staticmethod
|
||||
def _coerce_items(items: AbstractSetIntStr | MappingIntStrAny) -> MappingIntStrAny:
|
||||
if isinstance(items, typing.Mapping):
|
||||
pass
|
||||
elif isinstance(items, typing.AbstractSet):
|
||||
items = dict.fromkeys(items, ...) # type: ignore
|
||||
else:
|
||||
class_name = getattr(items, '__class__', '???')
|
||||
raise TypeError(f'Unexpected type of exclude value {class_name}')
|
||||
return items # type: ignore
|
||||
|
||||
@classmethod
|
||||
def _coerce_value(cls, value: Any) -> Any:
|
||||
if value is None or cls.is_true(value):
|
||||
return value
|
||||
return cls._coerce_items(value)
|
||||
|
||||
@staticmethod
|
||||
def is_true(v: Any) -> bool:
|
||||
return v is True or v is ...
|
||||
|
||||
def __repr_args__(self) -> _repr.ReprArgs:
|
||||
return [(None, self._items)]
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
|
||||
def ClassAttribute(name: str, value: T) -> T:
|
||||
...
|
||||
|
||||
else:
|
||||
|
||||
class ClassAttribute:
|
||||
"""Hide class attribute from its instances."""
|
||||
|
||||
__slots__ = 'name', 'value'
|
||||
|
||||
def __init__(self, name: str, value: Any) -> None:
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
def __get__(self, instance: Any, owner: type[Any]) -> None:
|
||||
if instance is None:
|
||||
return self.value
|
||||
raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')
|
||||
|
||||
|
||||
Obj = TypeVar('Obj')
|
||||
|
||||
|
||||
def smart_deepcopy(obj: Obj) -> Obj:
|
||||
"""Return type as is for immutable built-in types
|
||||
Use obj.copy() for built-in empty collections
|
||||
Use copy.deepcopy() for non-empty collections and unknown objects.
|
||||
"""
|
||||
obj_type = obj.__class__
|
||||
if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:
|
||||
return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway
|
||||
try:
|
||||
if not obj and obj_type in BUILTIN_COLLECTIONS:
|
||||
# faster way for empty collections, no need to copy its members
|
||||
return obj if obj_type is tuple else obj.copy() # tuple doesn't have copy method # type: ignore
|
||||
except (TypeError, ValueError, RuntimeError):
|
||||
# do we really dare to catch ALL errors? Seems a bit risky
|
||||
pass
|
||||
|
||||
return deepcopy(obj) # slowest way when we actually might need a deepcopy
|
||||
|
||||
|
||||
_EMPTY = object()
|
||||
|
||||
|
||||
def all_identical(left: typing.Iterable[Any], right: typing.Iterable[Any]) -> bool:
|
||||
"""Check that the items of `left` are the same objects as those in `right`.
|
||||
|
||||
>>> a, b = object(), object()
|
||||
>>> all_identical([a, b, a], [a, b, a])
|
||||
True
|
||||
>>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical"
|
||||
False
|
||||
"""
|
||||
for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY):
|
||||
if left_item is not right_item:
|
||||
return False
|
||||
return True
|
||||
@@ -0,0 +1,149 @@
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import inspect
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from typing import Any, Awaitable, Callable
|
||||
|
||||
import pydantic_core
|
||||
|
||||
from ..config import ConfigDict
|
||||
from ..plugin._schema_validator import create_schema_validator
|
||||
from . import _generate_schema, _typing_extra
|
||||
from ._config import ConfigWrapper
|
||||
|
||||
|
||||
@dataclass
|
||||
class CallMarker:
|
||||
function: Callable[..., Any]
|
||||
validate_return: bool
|
||||
|
||||
|
||||
class ValidateCallWrapper:
|
||||
"""This is a wrapper around a function that validates the arguments passed to it, and optionally the return value.
|
||||
|
||||
It's partially inspired by `wraps` which in turn uses `partial`, but extended to be a descriptor so
|
||||
these functions can be applied to instance methods, class methods, static methods, as well as normal functions.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'raw_function',
|
||||
'_config',
|
||||
'_validate_return',
|
||||
'__pydantic_core_schema__',
|
||||
'__pydantic_validator__',
|
||||
'__signature__',
|
||||
'__name__',
|
||||
'__qualname__',
|
||||
'__annotations__',
|
||||
'__dict__', # required for __module__
|
||||
)
|
||||
|
||||
def __init__(self, function: Callable[..., Any], config: ConfigDict | None, validate_return: bool):
|
||||
self.raw_function = function
|
||||
self._config = config
|
||||
self._validate_return = validate_return
|
||||
self.__signature__ = inspect.signature(function)
|
||||
if isinstance(function, partial):
|
||||
func = function.func
|
||||
schema_type = func
|
||||
self.__name__ = f'partial({func.__name__})'
|
||||
self.__qualname__ = f'partial({func.__qualname__})'
|
||||
self.__annotations__ = func.__annotations__
|
||||
self.__module__ = func.__module__
|
||||
self.__doc__ = func.__doc__
|
||||
else:
|
||||
schema_type = function
|
||||
self.__name__ = function.__name__
|
||||
self.__qualname__ = function.__qualname__
|
||||
self.__annotations__ = function.__annotations__
|
||||
self.__module__ = function.__module__
|
||||
self.__doc__ = function.__doc__
|
||||
|
||||
namespace = _typing_extra.add_module_globals(function, None)
|
||||
config_wrapper = ConfigWrapper(config)
|
||||
gen_schema = _generate_schema.GenerateSchema(config_wrapper, namespace)
|
||||
schema = gen_schema.clean_schema(gen_schema.generate_schema(function))
|
||||
self.__pydantic_core_schema__ = schema
|
||||
core_config = config_wrapper.core_config(self)
|
||||
|
||||
self.__pydantic_validator__ = create_schema_validator(
|
||||
schema,
|
||||
schema_type,
|
||||
self.__module__,
|
||||
self.__qualname__,
|
||||
'validate_call',
|
||||
core_config,
|
||||
config_wrapper.plugin_settings,
|
||||
)
|
||||
|
||||
if self._validate_return:
|
||||
return_type = (
|
||||
self.__signature__.return_annotation
|
||||
if self.__signature__.return_annotation is not self.__signature__.empty
|
||||
else Any
|
||||
)
|
||||
gen_schema = _generate_schema.GenerateSchema(config_wrapper, namespace)
|
||||
schema = gen_schema.clean_schema(gen_schema.generate_schema(return_type))
|
||||
self.__return_pydantic_core_schema__ = schema
|
||||
validator = create_schema_validator(
|
||||
schema,
|
||||
schema_type,
|
||||
self.__module__,
|
||||
self.__qualname__,
|
||||
'validate_call',
|
||||
core_config,
|
||||
config_wrapper.plugin_settings,
|
||||
)
|
||||
if inspect.iscoroutinefunction(self.raw_function):
|
||||
|
||||
async def return_val_wrapper(aw: Awaitable[Any]) -> None:
|
||||
return validator.validate_python(await aw)
|
||||
|
||||
self.__return_pydantic_validator__ = return_val_wrapper
|
||||
else:
|
||||
self.__return_pydantic_validator__ = validator.validate_python
|
||||
else:
|
||||
self.__return_pydantic_core_schema__ = None
|
||||
self.__return_pydantic_validator__ = None
|
||||
|
||||
self._name: str | None = None # set by __get__, used to set the instance attribute when decorating methods
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
||||
res = self.__pydantic_validator__.validate_python(pydantic_core.ArgsKwargs(args, kwargs))
|
||||
if self.__return_pydantic_validator__:
|
||||
return self.__return_pydantic_validator__(res)
|
||||
return res
|
||||
|
||||
def __get__(self, obj: Any, objtype: type[Any] | None = None) -> ValidateCallWrapper:
|
||||
"""Bind the raw function and return another ValidateCallWrapper wrapping that."""
|
||||
if obj is None:
|
||||
try:
|
||||
# Handle the case where a method is accessed as a class attribute
|
||||
return objtype.__getattribute__(objtype, self._name) # type: ignore
|
||||
except AttributeError:
|
||||
# This will happen the first time the attribute is accessed
|
||||
pass
|
||||
|
||||
bound_function = self.raw_function.__get__(obj, objtype)
|
||||
result = self.__class__(bound_function, self._config, self._validate_return)
|
||||
|
||||
# skip binding to instance when obj or objtype has __slots__ attribute
|
||||
if hasattr(obj, '__slots__') or hasattr(objtype, '__slots__'):
|
||||
return result
|
||||
|
||||
if self._name is not None:
|
||||
if obj is not None:
|
||||
object.__setattr__(obj, self._name, result)
|
||||
else:
|
||||
object.__setattr__(objtype, self._name, result)
|
||||
return result
|
||||
|
||||
def __set_name__(self, owner: Any, name: str) -> None:
|
||||
self._name = name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'ValidateCallWrapper({self.raw_function})'
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.raw_function == other.raw_function
|
||||
@@ -0,0 +1,278 @@
|
||||
"""Validator functions for standard library types.
|
||||
|
||||
Import of this module is deferred since it contains imports of many standard library modules.
|
||||
"""
|
||||
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import math
|
||||
import re
|
||||
import typing
|
||||
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
|
||||
from typing import Any
|
||||
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
from pydantic_core._pydantic_core import PydanticKnownError
|
||||
|
||||
|
||||
def sequence_validator(
|
||||
__input_value: typing.Sequence[Any],
|
||||
validator: core_schema.ValidatorFunctionWrapHandler,
|
||||
) -> typing.Sequence[Any]:
|
||||
"""Validator for `Sequence` types, isinstance(v, Sequence) has already been called."""
|
||||
value_type = type(__input_value)
|
||||
|
||||
# We don't accept any plain string as a sequence
|
||||
# Relevant issue: https://github.com/pydantic/pydantic/issues/5595
|
||||
if issubclass(value_type, (str, bytes)):
|
||||
raise PydanticCustomError(
|
||||
'sequence_str',
|
||||
"'{type_name}' instances are not allowed as a Sequence value",
|
||||
{'type_name': value_type.__name__},
|
||||
)
|
||||
|
||||
v_list = validator(__input_value)
|
||||
|
||||
# the rest of the logic is just re-creating the original type from `v_list`
|
||||
if value_type == list:
|
||||
return v_list
|
||||
elif issubclass(value_type, range):
|
||||
# return the list as we probably can't re-create the range
|
||||
return v_list
|
||||
else:
|
||||
# best guess at how to re-create the original type, more custom construction logic might be required
|
||||
return value_type(v_list) # type: ignore[call-arg]
|
||||
|
||||
|
||||
def import_string(value: Any) -> Any:
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
return _import_string_logic(value)
|
||||
except ImportError as e:
|
||||
raise PydanticCustomError('import_error', 'Invalid python path: {error}', {'error': str(e)}) from e
|
||||
else:
|
||||
# otherwise we just return the value and let the next validator do the rest of the work
|
||||
return value
|
||||
|
||||
|
||||
def _import_string_logic(dotted_path: str) -> Any:
|
||||
"""Inspired by uvicorn — dotted paths should include a colon before the final item if that item is not a module.
|
||||
(This is necessary to distinguish between a submodule and an attribute when there is a conflict.).
|
||||
|
||||
If the dotted path does not include a colon and the final item is not a valid module, importing as an attribute
|
||||
rather than a submodule will be attempted automatically.
|
||||
|
||||
So, for example, the following values of `dotted_path` result in the following returned values:
|
||||
* 'collections': <module 'collections'>
|
||||
* 'collections.abc': <module 'collections.abc'>
|
||||
* 'collections.abc:Mapping': <class 'collections.abc.Mapping'>
|
||||
* `collections.abc.Mapping`: <class 'collections.abc.Mapping'> (though this is a bit slower than the previous line)
|
||||
|
||||
An error will be raised under any of the following scenarios:
|
||||
* `dotted_path` contains more than one colon (e.g., 'collections:abc:Mapping')
|
||||
* the substring of `dotted_path` before the colon is not a valid module in the environment (e.g., '123:Mapping')
|
||||
* the substring of `dotted_path` after the colon is not an attribute of the module (e.g., 'collections:abc123')
|
||||
"""
|
||||
from importlib import import_module
|
||||
|
||||
components = dotted_path.strip().split(':')
|
||||
if len(components) > 2:
|
||||
raise ImportError(f"Import strings should have at most one ':'; received {dotted_path!r}")
|
||||
|
||||
module_path = components[0]
|
||||
if not module_path:
|
||||
raise ImportError(f'Import strings should have a nonempty module name; received {dotted_path!r}')
|
||||
|
||||
try:
|
||||
module = import_module(module_path)
|
||||
except ModuleNotFoundError as e:
|
||||
if '.' in module_path:
|
||||
# Check if it would be valid if the final item was separated from its module with a `:`
|
||||
maybe_module_path, maybe_attribute = dotted_path.strip().rsplit('.', 1)
|
||||
try:
|
||||
return _import_string_logic(f'{maybe_module_path}:{maybe_attribute}')
|
||||
except ImportError:
|
||||
pass
|
||||
raise ImportError(f'No module named {module_path!r}') from e
|
||||
raise e
|
||||
|
||||
if len(components) > 1:
|
||||
attribute = components[1]
|
||||
try:
|
||||
return getattr(module, attribute)
|
||||
except AttributeError as e:
|
||||
raise ImportError(f'cannot import name {attribute!r} from {module_path!r}') from e
|
||||
else:
|
||||
return module
|
||||
|
||||
|
||||
def pattern_either_validator(__input_value: Any) -> typing.Pattern[Any]:
|
||||
if isinstance(__input_value, typing.Pattern):
|
||||
return __input_value
|
||||
elif isinstance(__input_value, (str, bytes)):
|
||||
# todo strict mode
|
||||
return compile_pattern(__input_value) # type: ignore
|
||||
else:
|
||||
raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
|
||||
|
||||
|
||||
def pattern_str_validator(__input_value: Any) -> typing.Pattern[str]:
|
||||
if isinstance(__input_value, typing.Pattern):
|
||||
if isinstance(__input_value.pattern, str):
|
||||
return __input_value
|
||||
else:
|
||||
raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern')
|
||||
elif isinstance(__input_value, str):
|
||||
return compile_pattern(__input_value)
|
||||
elif isinstance(__input_value, bytes):
|
||||
raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern')
|
||||
else:
|
||||
raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
|
||||
|
||||
|
||||
def pattern_bytes_validator(__input_value: Any) -> typing.Pattern[bytes]:
|
||||
if isinstance(__input_value, typing.Pattern):
|
||||
if isinstance(__input_value.pattern, bytes):
|
||||
return __input_value
|
||||
else:
|
||||
raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern')
|
||||
elif isinstance(__input_value, bytes):
|
||||
return compile_pattern(__input_value)
|
||||
elif isinstance(__input_value, str):
|
||||
raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern')
|
||||
else:
|
||||
raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
|
||||
|
||||
|
||||
PatternType = typing.TypeVar('PatternType', str, bytes)
|
||||
|
||||
|
||||
def compile_pattern(pattern: PatternType) -> typing.Pattern[PatternType]:
|
||||
try:
|
||||
return re.compile(pattern)
|
||||
except re.error:
|
||||
raise PydanticCustomError('pattern_regex', 'Input should be a valid regular expression')
|
||||
|
||||
|
||||
def ip_v4_address_validator(__input_value: Any) -> IPv4Address:
|
||||
if isinstance(__input_value, IPv4Address):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv4Address(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v4_address', 'Input is not a valid IPv4 address')
|
||||
|
||||
|
||||
def ip_v6_address_validator(__input_value: Any) -> IPv6Address:
|
||||
if isinstance(__input_value, IPv6Address):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv6Address(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v6_address', 'Input is not a valid IPv6 address')
|
||||
|
||||
|
||||
def ip_v4_network_validator(__input_value: Any) -> IPv4Network:
|
||||
"""Assume IPv4Network initialised with a default `strict` argument.
|
||||
|
||||
See more:
|
||||
https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network
|
||||
"""
|
||||
if isinstance(__input_value, IPv4Network):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv4Network(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v4_network', 'Input is not a valid IPv4 network')
|
||||
|
||||
|
||||
def ip_v6_network_validator(__input_value: Any) -> IPv6Network:
|
||||
"""Assume IPv6Network initialised with a default `strict` argument.
|
||||
|
||||
See more:
|
||||
https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network
|
||||
"""
|
||||
if isinstance(__input_value, IPv6Network):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv6Network(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v6_network', 'Input is not a valid IPv6 network')
|
||||
|
||||
|
||||
def ip_v4_interface_validator(__input_value: Any) -> IPv4Interface:
|
||||
if isinstance(__input_value, IPv4Interface):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv4Interface(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v4_interface', 'Input is not a valid IPv4 interface')
|
||||
|
||||
|
||||
def ip_v6_interface_validator(__input_value: Any) -> IPv6Interface:
|
||||
if isinstance(__input_value, IPv6Interface):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv6Interface(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v6_interface', 'Input is not a valid IPv6 interface')
|
||||
|
||||
|
||||
def greater_than_validator(x: Any, gt: Any) -> Any:
|
||||
if not (x > gt):
|
||||
raise PydanticKnownError('greater_than', {'gt': gt})
|
||||
return x
|
||||
|
||||
|
||||
def greater_than_or_equal_validator(x: Any, ge: Any) -> Any:
|
||||
if not (x >= ge):
|
||||
raise PydanticKnownError('greater_than_equal', {'ge': ge})
|
||||
return x
|
||||
|
||||
|
||||
def less_than_validator(x: Any, lt: Any) -> Any:
|
||||
if not (x < lt):
|
||||
raise PydanticKnownError('less_than', {'lt': lt})
|
||||
return x
|
||||
|
||||
|
||||
def less_than_or_equal_validator(x: Any, le: Any) -> Any:
|
||||
if not (x <= le):
|
||||
raise PydanticKnownError('less_than_equal', {'le': le})
|
||||
return x
|
||||
|
||||
|
||||
def multiple_of_validator(x: Any, multiple_of: Any) -> Any:
|
||||
if not (x % multiple_of == 0):
|
||||
raise PydanticKnownError('multiple_of', {'multiple_of': multiple_of})
|
||||
return x
|
||||
|
||||
|
||||
def min_length_validator(x: Any, min_length: Any) -> Any:
|
||||
if not (len(x) >= min_length):
|
||||
raise PydanticKnownError(
|
||||
'too_short',
|
||||
{'field_type': 'Value', 'min_length': min_length, 'actual_length': len(x)},
|
||||
)
|
||||
return x
|
||||
|
||||
|
||||
def max_length_validator(x: Any, max_length: Any) -> Any:
|
||||
if len(x) > max_length:
|
||||
raise PydanticKnownError(
|
||||
'too_long',
|
||||
{'field_type': 'Value', 'max_length': max_length, 'actual_length': len(x)},
|
||||
)
|
||||
return x
|
||||
|
||||
|
||||
def forbid_inf_nan_check(x: Any) -> Any:
|
||||
if not math.isfinite(x):
|
||||
raise PydanticKnownError('finite_number')
|
||||
return x
|
||||
308
Backend/venv/lib/python3.12/site-packages/pydantic/_migration.py
Normal file
308
Backend/venv/lib/python3.12/site-packages/pydantic/_migration.py
Normal file
@@ -0,0 +1,308 @@
|
||||
import sys
|
||||
from typing import Any, Callable, Dict
|
||||
|
||||
from .version import version_short
|
||||
|
||||
MOVED_IN_V2 = {
|
||||
'pydantic.utils:version_info': 'pydantic.version:version_info',
|
||||
'pydantic.error_wrappers:ValidationError': 'pydantic:ValidationError',
|
||||
'pydantic.utils:to_camel': 'pydantic.alias_generators:to_pascal',
|
||||
'pydantic.utils:to_lower_camel': 'pydantic.alias_generators:to_camel',
|
||||
'pydantic:PyObject': 'pydantic.types:ImportString',
|
||||
'pydantic.types:PyObject': 'pydantic.types:ImportString',
|
||||
'pydantic.generics:GenericModel': 'pydantic.BaseModel',
|
||||
}
|
||||
|
||||
DEPRECATED_MOVED_IN_V2 = {
|
||||
'pydantic.tools:schema_of': 'pydantic.deprecated.tools:schema_of',
|
||||
'pydantic.tools:parse_obj_as': 'pydantic.deprecated.tools:parse_obj_as',
|
||||
'pydantic.tools:schema_json_of': 'pydantic.deprecated.tools:schema_json_of',
|
||||
'pydantic.json:pydantic_encoder': 'pydantic.deprecated.json:pydantic_encoder',
|
||||
'pydantic:validate_arguments': 'pydantic.deprecated.decorator:validate_arguments',
|
||||
'pydantic.json:custom_pydantic_encoder': 'pydantic.deprecated.json:custom_pydantic_encoder',
|
||||
'pydantic.json:timedelta_isoformat': 'pydantic.deprecated.json:timedelta_isoformat',
|
||||
'pydantic.decorator:validate_arguments': 'pydantic.deprecated.decorator:validate_arguments',
|
||||
'pydantic.class_validators:validator': 'pydantic.deprecated.class_validators:validator',
|
||||
'pydantic.class_validators:root_validator': 'pydantic.deprecated.class_validators:root_validator',
|
||||
'pydantic.config:BaseConfig': 'pydantic.deprecated.config:BaseConfig',
|
||||
'pydantic.config:Extra': 'pydantic.deprecated.config:Extra',
|
||||
}
|
||||
|
||||
REDIRECT_TO_V1 = {
|
||||
f'pydantic.utils:{obj}': f'pydantic.v1.utils:{obj}'
|
||||
for obj in (
|
||||
'deep_update',
|
||||
'GetterDict',
|
||||
'lenient_issubclass',
|
||||
'lenient_isinstance',
|
||||
'is_valid_field',
|
||||
'update_not_none',
|
||||
'import_string',
|
||||
'Representation',
|
||||
'ROOT_KEY',
|
||||
'smart_deepcopy',
|
||||
'sequence_like',
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
REMOVED_IN_V2 = {
|
||||
'pydantic:ConstrainedBytes',
|
||||
'pydantic:ConstrainedDate',
|
||||
'pydantic:ConstrainedDecimal',
|
||||
'pydantic:ConstrainedFloat',
|
||||
'pydantic:ConstrainedFrozenSet',
|
||||
'pydantic:ConstrainedInt',
|
||||
'pydantic:ConstrainedList',
|
||||
'pydantic:ConstrainedSet',
|
||||
'pydantic:ConstrainedStr',
|
||||
'pydantic:JsonWrapper',
|
||||
'pydantic:NoneBytes',
|
||||
'pydantic:NoneStr',
|
||||
'pydantic:NoneStrBytes',
|
||||
'pydantic:Protocol',
|
||||
'pydantic:Required',
|
||||
'pydantic:StrBytes',
|
||||
'pydantic:compiled',
|
||||
'pydantic.config:get_config',
|
||||
'pydantic.config:inherit_config',
|
||||
'pydantic.config:prepare_config',
|
||||
'pydantic:create_model_from_namedtuple',
|
||||
'pydantic:create_model_from_typeddict',
|
||||
'pydantic.dataclasses:create_pydantic_model_from_dataclass',
|
||||
'pydantic.dataclasses:make_dataclass_validator',
|
||||
'pydantic.dataclasses:set_validation',
|
||||
'pydantic.datetime_parse:parse_date',
|
||||
'pydantic.datetime_parse:parse_time',
|
||||
'pydantic.datetime_parse:parse_datetime',
|
||||
'pydantic.datetime_parse:parse_duration',
|
||||
'pydantic.error_wrappers:ErrorWrapper',
|
||||
'pydantic.errors:AnyStrMaxLengthError',
|
||||
'pydantic.errors:AnyStrMinLengthError',
|
||||
'pydantic.errors:ArbitraryTypeError',
|
||||
'pydantic.errors:BoolError',
|
||||
'pydantic.errors:BytesError',
|
||||
'pydantic.errors:CallableError',
|
||||
'pydantic.errors:ClassError',
|
||||
'pydantic.errors:ColorError',
|
||||
'pydantic.errors:ConfigError',
|
||||
'pydantic.errors:DataclassTypeError',
|
||||
'pydantic.errors:DateError',
|
||||
'pydantic.errors:DateNotInTheFutureError',
|
||||
'pydantic.errors:DateNotInThePastError',
|
||||
'pydantic.errors:DateTimeError',
|
||||
'pydantic.errors:DecimalError',
|
||||
'pydantic.errors:DecimalIsNotFiniteError',
|
||||
'pydantic.errors:DecimalMaxDigitsError',
|
||||
'pydantic.errors:DecimalMaxPlacesError',
|
||||
'pydantic.errors:DecimalWholeDigitsError',
|
||||
'pydantic.errors:DictError',
|
||||
'pydantic.errors:DurationError',
|
||||
'pydantic.errors:EmailError',
|
||||
'pydantic.errors:EnumError',
|
||||
'pydantic.errors:EnumMemberError',
|
||||
'pydantic.errors:ExtraError',
|
||||
'pydantic.errors:FloatError',
|
||||
'pydantic.errors:FrozenSetError',
|
||||
'pydantic.errors:FrozenSetMaxLengthError',
|
||||
'pydantic.errors:FrozenSetMinLengthError',
|
||||
'pydantic.errors:HashableError',
|
||||
'pydantic.errors:IPv4AddressError',
|
||||
'pydantic.errors:IPv4InterfaceError',
|
||||
'pydantic.errors:IPv4NetworkError',
|
||||
'pydantic.errors:IPv6AddressError',
|
||||
'pydantic.errors:IPv6InterfaceError',
|
||||
'pydantic.errors:IPv6NetworkError',
|
||||
'pydantic.errors:IPvAnyAddressError',
|
||||
'pydantic.errors:IPvAnyInterfaceError',
|
||||
'pydantic.errors:IPvAnyNetworkError',
|
||||
'pydantic.errors:IntEnumError',
|
||||
'pydantic.errors:IntegerError',
|
||||
'pydantic.errors:InvalidByteSize',
|
||||
'pydantic.errors:InvalidByteSizeUnit',
|
||||
'pydantic.errors:InvalidDiscriminator',
|
||||
'pydantic.errors:InvalidLengthForBrand',
|
||||
'pydantic.errors:JsonError',
|
||||
'pydantic.errors:JsonTypeError',
|
||||
'pydantic.errors:ListError',
|
||||
'pydantic.errors:ListMaxLengthError',
|
||||
'pydantic.errors:ListMinLengthError',
|
||||
'pydantic.errors:ListUniqueItemsError',
|
||||
'pydantic.errors:LuhnValidationError',
|
||||
'pydantic.errors:MissingDiscriminator',
|
||||
'pydantic.errors:MissingError',
|
||||
'pydantic.errors:NoneIsAllowedError',
|
||||
'pydantic.errors:NoneIsNotAllowedError',
|
||||
'pydantic.errors:NotDigitError',
|
||||
'pydantic.errors:NotNoneError',
|
||||
'pydantic.errors:NumberNotGeError',
|
||||
'pydantic.errors:NumberNotGtError',
|
||||
'pydantic.errors:NumberNotLeError',
|
||||
'pydantic.errors:NumberNotLtError',
|
||||
'pydantic.errors:NumberNotMultipleError',
|
||||
'pydantic.errors:PathError',
|
||||
'pydantic.errors:PathNotADirectoryError',
|
||||
'pydantic.errors:PathNotAFileError',
|
||||
'pydantic.errors:PathNotExistsError',
|
||||
'pydantic.errors:PatternError',
|
||||
'pydantic.errors:PyObjectError',
|
||||
'pydantic.errors:PydanticTypeError',
|
||||
'pydantic.errors:PydanticValueError',
|
||||
'pydantic.errors:SequenceError',
|
||||
'pydantic.errors:SetError',
|
||||
'pydantic.errors:SetMaxLengthError',
|
||||
'pydantic.errors:SetMinLengthError',
|
||||
'pydantic.errors:StrError',
|
||||
'pydantic.errors:StrRegexError',
|
||||
'pydantic.errors:StrictBoolError',
|
||||
'pydantic.errors:SubclassError',
|
||||
'pydantic.errors:TimeError',
|
||||
'pydantic.errors:TupleError',
|
||||
'pydantic.errors:TupleLengthError',
|
||||
'pydantic.errors:UUIDError',
|
||||
'pydantic.errors:UUIDVersionError',
|
||||
'pydantic.errors:UrlError',
|
||||
'pydantic.errors:UrlExtraError',
|
||||
'pydantic.errors:UrlHostError',
|
||||
'pydantic.errors:UrlHostTldError',
|
||||
'pydantic.errors:UrlPortError',
|
||||
'pydantic.errors:UrlSchemeError',
|
||||
'pydantic.errors:UrlSchemePermittedError',
|
||||
'pydantic.errors:UrlUserInfoError',
|
||||
'pydantic.errors:WrongConstantError',
|
||||
'pydantic.main:validate_model',
|
||||
'pydantic.networks:stricturl',
|
||||
'pydantic:parse_file_as',
|
||||
'pydantic:parse_raw_as',
|
||||
'pydantic:stricturl',
|
||||
'pydantic.tools:parse_file_as',
|
||||
'pydantic.tools:parse_raw_as',
|
||||
'pydantic.types:ConstrainedBytes',
|
||||
'pydantic.types:ConstrainedDate',
|
||||
'pydantic.types:ConstrainedDecimal',
|
||||
'pydantic.types:ConstrainedFloat',
|
||||
'pydantic.types:ConstrainedFrozenSet',
|
||||
'pydantic.types:ConstrainedInt',
|
||||
'pydantic.types:ConstrainedList',
|
||||
'pydantic.types:ConstrainedSet',
|
||||
'pydantic.types:ConstrainedStr',
|
||||
'pydantic.types:JsonWrapper',
|
||||
'pydantic.types:NoneBytes',
|
||||
'pydantic.types:NoneStr',
|
||||
'pydantic.types:NoneStrBytes',
|
||||
'pydantic.types:StrBytes',
|
||||
'pydantic.typing:evaluate_forwardref',
|
||||
'pydantic.typing:AbstractSetIntStr',
|
||||
'pydantic.typing:AnyCallable',
|
||||
'pydantic.typing:AnyClassMethod',
|
||||
'pydantic.typing:CallableGenerator',
|
||||
'pydantic.typing:DictAny',
|
||||
'pydantic.typing:DictIntStrAny',
|
||||
'pydantic.typing:DictStrAny',
|
||||
'pydantic.typing:IntStr',
|
||||
'pydantic.typing:ListStr',
|
||||
'pydantic.typing:MappingIntStrAny',
|
||||
'pydantic.typing:NoArgAnyCallable',
|
||||
'pydantic.typing:NoneType',
|
||||
'pydantic.typing:ReprArgs',
|
||||
'pydantic.typing:SetStr',
|
||||
'pydantic.typing:StrPath',
|
||||
'pydantic.typing:TupleGenerator',
|
||||
'pydantic.typing:WithArgsTypes',
|
||||
'pydantic.typing:all_literal_values',
|
||||
'pydantic.typing:display_as_type',
|
||||
'pydantic.typing:get_all_type_hints',
|
||||
'pydantic.typing:get_args',
|
||||
'pydantic.typing:get_origin',
|
||||
'pydantic.typing:get_sub_types',
|
||||
'pydantic.typing:is_callable_type',
|
||||
'pydantic.typing:is_classvar',
|
||||
'pydantic.typing:is_finalvar',
|
||||
'pydantic.typing:is_literal_type',
|
||||
'pydantic.typing:is_namedtuple',
|
||||
'pydantic.typing:is_new_type',
|
||||
'pydantic.typing:is_none_type',
|
||||
'pydantic.typing:is_typeddict',
|
||||
'pydantic.typing:is_typeddict_special',
|
||||
'pydantic.typing:is_union',
|
||||
'pydantic.typing:new_type_supertype',
|
||||
'pydantic.typing:resolve_annotations',
|
||||
'pydantic.typing:typing_base',
|
||||
'pydantic.typing:update_field_forward_refs',
|
||||
'pydantic.typing:update_model_forward_refs',
|
||||
'pydantic.utils:ClassAttribute',
|
||||
'pydantic.utils:DUNDER_ATTRIBUTES',
|
||||
'pydantic.utils:PyObjectStr',
|
||||
'pydantic.utils:ValueItems',
|
||||
'pydantic.utils:almost_equal_floats',
|
||||
'pydantic.utils:get_discriminator_alias_and_values',
|
||||
'pydantic.utils:get_model',
|
||||
'pydantic.utils:get_unique_discriminator_alias',
|
||||
'pydantic.utils:in_ipython',
|
||||
'pydantic.utils:is_valid_identifier',
|
||||
'pydantic.utils:path_type',
|
||||
'pydantic.utils:validate_field_name',
|
||||
'pydantic:validate_model',
|
||||
}
|
||||
|
||||
|
||||
def getattr_migration(module: str) -> Callable[[str], Any]:
|
||||
"""Implement PEP 562 for objects that were either moved or removed on the migration
|
||||
to V2.
|
||||
|
||||
Args:
|
||||
module: The module name.
|
||||
|
||||
Returns:
|
||||
A callable that will raise an error if the object is not found.
|
||||
"""
|
||||
# This avoids circular import with errors.py.
|
||||
from .errors import PydanticImportError
|
||||
|
||||
def wrapper(name: str) -> object:
|
||||
"""Raise an error if the object is not found, or warn if it was moved.
|
||||
|
||||
In case it was moved, it still returns the object.
|
||||
|
||||
Args:
|
||||
name: The object name.
|
||||
|
||||
Returns:
|
||||
The object.
|
||||
"""
|
||||
if name == '__path__':
|
||||
raise AttributeError(f'module {module!r} has no attribute {name!r}')
|
||||
|
||||
import warnings
|
||||
|
||||
from ._internal._validators import import_string
|
||||
|
||||
import_path = f'{module}:{name}'
|
||||
if import_path in MOVED_IN_V2.keys():
|
||||
new_location = MOVED_IN_V2[import_path]
|
||||
warnings.warn(f'`{import_path}` has been moved to `{new_location}`.')
|
||||
return import_string(MOVED_IN_V2[import_path])
|
||||
if import_path in DEPRECATED_MOVED_IN_V2:
|
||||
# skip the warning here because a deprecation warning will be raised elsewhere
|
||||
return import_string(DEPRECATED_MOVED_IN_V2[import_path])
|
||||
if import_path in REDIRECT_TO_V1:
|
||||
new_location = REDIRECT_TO_V1[import_path]
|
||||
warnings.warn(
|
||||
f'`{import_path}` has been removed. We are importing from `{new_location}` instead.'
|
||||
'See the migration guide for more details: https://docs.pydantic.dev/latest/migration/'
|
||||
)
|
||||
return import_string(REDIRECT_TO_V1[import_path])
|
||||
if import_path == 'pydantic:BaseSettings':
|
||||
raise PydanticImportError(
|
||||
'`BaseSettings` has been moved to the `pydantic-settings` package. '
|
||||
f'See https://docs.pydantic.dev/{version_short()}/migration/#basesettings-has-moved-to-pydantic-settings '
|
||||
'for more details.'
|
||||
)
|
||||
if import_path in REMOVED_IN_V2:
|
||||
raise PydanticImportError(f'`{import_path}` has been removed in V2.')
|
||||
globals: Dict[str, Any] = sys.modules[module].__dict__
|
||||
if name in globals:
|
||||
return globals[name]
|
||||
raise AttributeError(f'module {module!r} has no attribute {name!r}')
|
||||
|
||||
return wrapper
|
||||
@@ -0,0 +1,44 @@
|
||||
"""Alias generators for converting between different capitalization conventions."""
|
||||
import re
|
||||
|
||||
__all__ = ('to_pascal', 'to_camel', 'to_snake')
|
||||
|
||||
|
||||
def to_pascal(snake: str) -> str:
|
||||
"""Convert a snake_case string to PascalCase.
|
||||
|
||||
Args:
|
||||
snake: The string to convert.
|
||||
|
||||
Returns:
|
||||
The PascalCase string.
|
||||
"""
|
||||
camel = snake.title()
|
||||
return re.sub('([0-9A-Za-z])_(?=[0-9A-Z])', lambda m: m.group(1), camel)
|
||||
|
||||
|
||||
def to_camel(snake: str) -> str:
|
||||
"""Convert a snake_case string to camelCase.
|
||||
|
||||
Args:
|
||||
snake: The string to convert.
|
||||
|
||||
Returns:
|
||||
The converted camelCase string.
|
||||
"""
|
||||
camel = to_pascal(snake)
|
||||
return re.sub('(^_*[A-Z])', lambda m: m.group(1).lower(), camel)
|
||||
|
||||
|
||||
def to_snake(camel: str) -> str:
|
||||
"""Convert a PascalCase or camelCase string to snake_case.
|
||||
|
||||
Args:
|
||||
camel: The string to convert.
|
||||
|
||||
Returns:
|
||||
The converted string in snake_case.
|
||||
"""
|
||||
snake = re.sub(r'([a-zA-Z])([0-9])', lambda m: f'{m.group(1)}_{m.group(2)}', camel)
|
||||
snake = re.sub(r'([a-z0-9])([A-Z])', lambda m: f'{m.group(1)}_{m.group(2)}', snake)
|
||||
return snake.lower()
|
||||
@@ -0,0 +1,120 @@
|
||||
"""Type annotations to use with `__get_pydantic_core_schema__` and `__get_pydantic_json_schema__`."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Union
|
||||
|
||||
from pydantic_core import core_schema
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .json_schema import JsonSchemaMode, JsonSchemaValue
|
||||
|
||||
CoreSchemaOrField = Union[
|
||||
core_schema.CoreSchema,
|
||||
core_schema.ModelField,
|
||||
core_schema.DataclassField,
|
||||
core_schema.TypedDictField,
|
||||
core_schema.ComputedField,
|
||||
]
|
||||
|
||||
__all__ = 'GetJsonSchemaHandler', 'GetCoreSchemaHandler'
|
||||
|
||||
|
||||
class GetJsonSchemaHandler:
|
||||
"""Handler to call into the next JSON schema generation function.
|
||||
|
||||
Attributes:
|
||||
mode: Json schema mode, can be `validation` or `serialization`.
|
||||
"""
|
||||
|
||||
mode: JsonSchemaMode
|
||||
|
||||
def __call__(self, __core_schema: CoreSchemaOrField) -> JsonSchemaValue:
|
||||
"""Call the inner handler and get the JsonSchemaValue it returns.
|
||||
This will call the next JSON schema modifying function up until it calls
|
||||
into `pydantic.json_schema.GenerateJsonSchema`, which will raise a
|
||||
`pydantic.errors.PydanticInvalidForJsonSchema` error if it cannot generate
|
||||
a JSON schema.
|
||||
|
||||
Args:
|
||||
__core_schema: A `pydantic_core.core_schema.CoreSchema`.
|
||||
|
||||
Returns:
|
||||
JsonSchemaValue: The JSON schema generated by the inner JSON schema modify
|
||||
functions.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def resolve_ref_schema(self, __maybe_ref_json_schema: JsonSchemaValue) -> JsonSchemaValue:
|
||||
"""Get the real schema for a `{"$ref": ...}` schema.
|
||||
If the schema given is not a `$ref` schema, it will be returned as is.
|
||||
This means you don't have to check before calling this function.
|
||||
|
||||
Args:
|
||||
__maybe_ref_json_schema: A JsonSchemaValue, ref based or not.
|
||||
|
||||
Raises:
|
||||
LookupError: If the ref is not found.
|
||||
|
||||
Returns:
|
||||
JsonSchemaValue: A JsonSchemaValue that has no `$ref`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class GetCoreSchemaHandler:
|
||||
"""Handler to call into the next CoreSchema schema generation function."""
|
||||
|
||||
def __call__(self, __source_type: Any) -> core_schema.CoreSchema:
|
||||
"""Call the inner handler and get the CoreSchema it returns.
|
||||
This will call the next CoreSchema modifying function up until it calls
|
||||
into Pydantic's internal schema generation machinery, which will raise a
|
||||
`pydantic.errors.PydanticSchemaGenerationError` error if it cannot generate
|
||||
a CoreSchema for the given source type.
|
||||
|
||||
Args:
|
||||
__source_type: The input type.
|
||||
|
||||
Returns:
|
||||
CoreSchema: The `pydantic-core` CoreSchema generated.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def generate_schema(self, __source_type: Any) -> core_schema.CoreSchema:
|
||||
"""Generate a schema unrelated to the current context.
|
||||
Use this function if e.g. you are handling schema generation for a sequence
|
||||
and want to generate a schema for its items.
|
||||
Otherwise, you may end up doing something like applying a `min_length` constraint
|
||||
that was intended for the sequence itself to its items!
|
||||
|
||||
Args:
|
||||
__source_type: The input type.
|
||||
|
||||
Returns:
|
||||
CoreSchema: The `pydantic-core` CoreSchema generated.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def resolve_ref_schema(self, __maybe_ref_schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
"""Get the real schema for a `definition-ref` schema.
|
||||
If the schema given is not a `definition-ref` schema, it will be returned as is.
|
||||
This means you don't have to check before calling this function.
|
||||
|
||||
Args:
|
||||
__maybe_ref_schema: A `CoreSchema`, `ref`-based or not.
|
||||
|
||||
Raises:
|
||||
LookupError: If the `ref` is not found.
|
||||
|
||||
Returns:
|
||||
A concrete `CoreSchema`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def field_name(self) -> str | None:
|
||||
"""Get the name of the closest field to this validator."""
|
||||
raise NotImplementedError
|
||||
|
||||
def _get_types_namespace(self) -> dict[str, Any] | None:
|
||||
"""Internal method used during type resolution for serializer annotations."""
|
||||
raise NotImplementedError
|
||||
@@ -0,0 +1,4 @@
|
||||
"""`class_validators` module is a backport module from V1."""
|
||||
from ._migration import getattr_migration
|
||||
|
||||
__getattr__ = getattr_migration(__name__)
|
||||
603
Backend/venv/lib/python3.12/site-packages/pydantic/color.py
Normal file
603
Backend/venv/lib/python3.12/site-packages/pydantic/color.py
Normal file
@@ -0,0 +1,603 @@
|
||||
"""Color definitions are used as per the CSS3
|
||||
[CSS Color Module Level 3](http://www.w3.org/TR/css3-color/#svg-color) specification.
|
||||
|
||||
A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`.
|
||||
|
||||
In these cases the _last_ color when sorted alphabetically takes preferences,
|
||||
eg. `Color((0, 255, 255)).as_named() == 'cyan'` because "cyan" comes after "aqua".
|
||||
|
||||
Warning: Deprecated
|
||||
The `Color` class is deprecated, use `pydantic_extra_types` instead.
|
||||
See [`pydantic-extra-types.Color`](../usage/types/extra_types/color_types.md)
|
||||
for more information.
|
||||
"""
|
||||
import math
|
||||
import re
|
||||
from colorsys import hls_to_rgb, rgb_to_hls
|
||||
from typing import Any, Callable, Optional, Tuple, Type, Union, cast
|
||||
|
||||
from pydantic_core import CoreSchema, PydanticCustomError, core_schema
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from ._internal import _repr
|
||||
from ._internal._schema_generation_shared import GetJsonSchemaHandler as _GetJsonSchemaHandler
|
||||
from .json_schema import JsonSchemaValue
|
||||
from .warnings import PydanticDeprecatedSince20
|
||||
|
||||
ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]]
|
||||
ColorType = Union[ColorTuple, str]
|
||||
HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]]
|
||||
|
||||
|
||||
class RGBA:
|
||||
"""Internal use only as a representation of a color."""
|
||||
|
||||
__slots__ = 'r', 'g', 'b', 'alpha', '_tuple'
|
||||
|
||||
def __init__(self, r: float, g: float, b: float, alpha: Optional[float]):
|
||||
self.r = r
|
||||
self.g = g
|
||||
self.b = b
|
||||
self.alpha = alpha
|
||||
|
||||
self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha)
|
||||
|
||||
def __getitem__(self, item: Any) -> Any:
|
||||
return self._tuple[item]
|
||||
|
||||
|
||||
# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached
|
||||
_r_255 = r'(\d{1,3}(?:\.\d+)?)'
|
||||
_r_comma = r'\s*,\s*'
|
||||
_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)'
|
||||
_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?'
|
||||
_r_sl = r'(\d{1,3}(?:\.\d+)?)%'
|
||||
r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*'
|
||||
r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*'
|
||||
# CSS3 RGB examples: rgb(0, 0, 0), rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 50%)
|
||||
r_rgb = rf'\s*rgba?\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}(?:{_r_comma}{_r_alpha})?\s*\)\s*'
|
||||
# CSS3 HSL examples: hsl(270, 60%, 50%), hsla(270, 60%, 50%, 0.5), hsla(270, 60%, 50%, 50%)
|
||||
r_hsl = rf'\s*hsla?\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}(?:{_r_comma}{_r_alpha})?\s*\)\s*'
|
||||
# CSS4 RGB examples: rgb(0 0 0), rgb(0 0 0 / 0.5), rgb(0 0 0 / 50%), rgba(0 0 0 / 50%)
|
||||
r_rgb_v4_style = rf'\s*rgba?\(\s*{_r_255}\s+{_r_255}\s+{_r_255}(?:\s*/\s*{_r_alpha})?\s*\)\s*'
|
||||
# CSS4 HSL examples: hsl(270 60% 50%), hsl(270 60% 50% / 0.5), hsl(270 60% 50% / 50%), hsla(270 60% 50% / 50%)
|
||||
r_hsl_v4_style = rf'\s*hsla?\(\s*{_r_h}\s+{_r_sl}\s+{_r_sl}(?:\s*/\s*{_r_alpha})?\s*\)\s*'
|
||||
|
||||
# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used
|
||||
repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'}
|
||||
rads = 2 * math.pi
|
||||
|
||||
|
||||
@deprecated(
|
||||
'The `Color` class is deprecated, use `pydantic_extra_types` instead. '
|
||||
'See https://docs.pydantic.dev/latest/api/pydantic_extra_types_color/.',
|
||||
category=PydanticDeprecatedSince20,
|
||||
)
|
||||
class Color(_repr.Representation):
|
||||
"""Represents a color."""
|
||||
|
||||
__slots__ = '_original', '_rgba'
|
||||
|
||||
def __init__(self, value: ColorType) -> None:
|
||||
self._rgba: RGBA
|
||||
self._original: ColorType
|
||||
if isinstance(value, (tuple, list)):
|
||||
self._rgba = parse_tuple(value)
|
||||
elif isinstance(value, str):
|
||||
self._rgba = parse_str(value)
|
||||
elif isinstance(value, Color):
|
||||
self._rgba = value._rgba
|
||||
value = value._original
|
||||
else:
|
||||
raise PydanticCustomError(
|
||||
'color_error', 'value is not a valid color: value must be a tuple, list or string'
|
||||
)
|
||||
|
||||
# if we've got here value must be a valid color
|
||||
self._original = value
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, core_schema: core_schema.CoreSchema, handler: _GetJsonSchemaHandler
|
||||
) -> JsonSchemaValue:
|
||||
field_schema = {}
|
||||
field_schema.update(type='string', format='color')
|
||||
return field_schema
|
||||
|
||||
def original(self) -> ColorType:
|
||||
"""Original value passed to `Color`."""
|
||||
return self._original
|
||||
|
||||
def as_named(self, *, fallback: bool = False) -> str:
|
||||
"""Returns the name of the color if it can be found in `COLORS_BY_VALUE` dictionary,
|
||||
otherwise returns the hexadecimal representation of the color or raises `ValueError`.
|
||||
|
||||
Args:
|
||||
fallback: If True, falls back to returning the hexadecimal representation of
|
||||
the color instead of raising a ValueError when no named color is found.
|
||||
|
||||
Returns:
|
||||
The name of the color, or the hexadecimal representation of the color.
|
||||
|
||||
Raises:
|
||||
ValueError: When no named color is found and fallback is `False`.
|
||||
"""
|
||||
if self._rgba.alpha is None:
|
||||
rgb = cast(Tuple[int, int, int], self.as_rgb_tuple())
|
||||
try:
|
||||
return COLORS_BY_VALUE[rgb]
|
||||
except KeyError as e:
|
||||
if fallback:
|
||||
return self.as_hex()
|
||||
else:
|
||||
raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e
|
||||
else:
|
||||
return self.as_hex()
|
||||
|
||||
def as_hex(self) -> str:
|
||||
"""Returns the hexadecimal representation of the color.
|
||||
|
||||
Hex string representing the color can be 3, 4, 6, or 8 characters depending on whether the string
|
||||
a "short" representation of the color is possible and whether there's an alpha channel.
|
||||
|
||||
Returns:
|
||||
The hexadecimal representation of the color.
|
||||
"""
|
||||
values = [float_to_255(c) for c in self._rgba[:3]]
|
||||
if self._rgba.alpha is not None:
|
||||
values.append(float_to_255(self._rgba.alpha))
|
||||
|
||||
as_hex = ''.join(f'{v:02x}' for v in values)
|
||||
if all(c in repeat_colors for c in values):
|
||||
as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2))
|
||||
return '#' + as_hex
|
||||
|
||||
def as_rgb(self) -> str:
|
||||
"""Color as an `rgb(<r>, <g>, <b>)` or `rgba(<r>, <g>, <b>, <a>)` string."""
|
||||
if self._rgba.alpha is None:
|
||||
return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})'
|
||||
else:
|
||||
return (
|
||||
f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, '
|
||||
f'{round(self._alpha_float(), 2)})'
|
||||
)
|
||||
|
||||
def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple:
|
||||
"""Returns the color as an RGB or RGBA tuple.
|
||||
|
||||
Args:
|
||||
alpha: Whether to include the alpha channel. There are three options for this input:
|
||||
|
||||
- `None` (default): Include alpha only if it's set. (e.g. not `None`)
|
||||
- `True`: Always include alpha.
|
||||
- `False`: Always omit alpha.
|
||||
|
||||
Returns:
|
||||
A tuple that contains the values of the red, green, and blue channels in the range 0 to 255.
|
||||
If alpha is included, it is in the range 0 to 1.
|
||||
"""
|
||||
r, g, b = (float_to_255(c) for c in self._rgba[:3])
|
||||
if alpha is None:
|
||||
if self._rgba.alpha is None:
|
||||
return r, g, b
|
||||
else:
|
||||
return r, g, b, self._alpha_float()
|
||||
elif alpha:
|
||||
return r, g, b, self._alpha_float()
|
||||
else:
|
||||
# alpha is False
|
||||
return r, g, b
|
||||
|
||||
def as_hsl(self) -> str:
|
||||
"""Color as an `hsl(<h>, <s>, <l>)` or `hsl(<h>, <s>, <l>, <a>)` string."""
|
||||
if self._rgba.alpha is None:
|
||||
h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore
|
||||
return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})'
|
||||
else:
|
||||
h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore
|
||||
return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})'
|
||||
|
||||
def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple:
|
||||
"""Returns the color as an HSL or HSLA tuple.
|
||||
|
||||
Args:
|
||||
alpha: Whether to include the alpha channel.
|
||||
|
||||
- `None` (default): Include the alpha channel only if it's set (e.g. not `None`).
|
||||
- `True`: Always include alpha.
|
||||
- `False`: Always omit alpha.
|
||||
|
||||
Returns:
|
||||
The color as a tuple of hue, saturation, lightness, and alpha (if included).
|
||||
All elements are in the range 0 to 1.
|
||||
|
||||
Note:
|
||||
This is HSL as used in HTML and most other places, not HLS as used in Python's `colorsys`.
|
||||
"""
|
||||
h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) # noqa: E741
|
||||
if alpha is None:
|
||||
if self._rgba.alpha is None:
|
||||
return h, s, l
|
||||
else:
|
||||
return h, s, l, self._alpha_float()
|
||||
if alpha:
|
||||
return h, s, l, self._alpha_float()
|
||||
else:
|
||||
# alpha is False
|
||||
return h, s, l
|
||||
|
||||
def _alpha_float(self) -> float:
|
||||
return 1 if self._rgba.alpha is None else self._rgba.alpha
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source: Type[Any], handler: Callable[[Any], CoreSchema]
|
||||
) -> core_schema.CoreSchema:
|
||||
return core_schema.with_info_plain_validator_function(
|
||||
cls._validate, serialization=core_schema.to_string_ser_schema()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: Any, _: Any) -> 'Color':
|
||||
return cls(__input_value)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.as_named(fallback=True)
|
||||
|
||||
def __repr_args__(self) -> '_repr.ReprArgs':
|
||||
return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())]
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple()
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.as_rgb_tuple())
|
||||
|
||||
|
||||
def parse_tuple(value: Tuple[Any, ...]) -> RGBA:
|
||||
"""Parse a tuple or list to get RGBA values.
|
||||
|
||||
Args:
|
||||
value: A tuple or list.
|
||||
|
||||
Returns:
|
||||
An `RGBA` tuple parsed from the input tuple.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If tuple is not valid.
|
||||
"""
|
||||
if len(value) == 3:
|
||||
r, g, b = (parse_color_value(v) for v in value)
|
||||
return RGBA(r, g, b, None)
|
||||
elif len(value) == 4:
|
||||
r, g, b = (parse_color_value(v) for v in value[:3])
|
||||
return RGBA(r, g, b, parse_float_alpha(value[3]))
|
||||
else:
|
||||
raise PydanticCustomError('color_error', 'value is not a valid color: tuples must have length 3 or 4')
|
||||
|
||||
|
||||
def parse_str(value: str) -> RGBA:
|
||||
"""Parse a string representing a color to an RGBA tuple.
|
||||
|
||||
Possible formats for the input string include:
|
||||
|
||||
* named color, see `COLORS_BY_NAME`
|
||||
* hex short eg. `<prefix>fff` (prefix can be `#`, `0x` or nothing)
|
||||
* hex long eg. `<prefix>ffffff` (prefix can be `#`, `0x` or nothing)
|
||||
* `rgb(<r>, <g>, <b>)`
|
||||
* `rgba(<r>, <g>, <b>, <a>)`
|
||||
|
||||
Args:
|
||||
value: A string representing a color.
|
||||
|
||||
Returns:
|
||||
An `RGBA` tuple parsed from the input string.
|
||||
|
||||
Raises:
|
||||
ValueError: If the input string cannot be parsed to an RGBA tuple.
|
||||
"""
|
||||
value_lower = value.lower()
|
||||
try:
|
||||
r, g, b = COLORS_BY_NAME[value_lower]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
return ints_to_rgba(r, g, b, None)
|
||||
|
||||
m = re.fullmatch(r_hex_short, value_lower)
|
||||
if m:
|
||||
*rgb, a = m.groups()
|
||||
r, g, b = (int(v * 2, 16) for v in rgb)
|
||||
if a:
|
||||
alpha: Optional[float] = int(a * 2, 16) / 255
|
||||
else:
|
||||
alpha = None
|
||||
return ints_to_rgba(r, g, b, alpha)
|
||||
|
||||
m = re.fullmatch(r_hex_long, value_lower)
|
||||
if m:
|
||||
*rgb, a = m.groups()
|
||||
r, g, b = (int(v, 16) for v in rgb)
|
||||
if a:
|
||||
alpha = int(a, 16) / 255
|
||||
else:
|
||||
alpha = None
|
||||
return ints_to_rgba(r, g, b, alpha)
|
||||
|
||||
m = re.fullmatch(r_rgb, value_lower) or re.fullmatch(r_rgb_v4_style, value_lower)
|
||||
if m:
|
||||
return ints_to_rgba(*m.groups()) # type: ignore
|
||||
|
||||
m = re.fullmatch(r_hsl, value_lower) or re.fullmatch(r_hsl_v4_style, value_lower)
|
||||
if m:
|
||||
return parse_hsl(*m.groups()) # type: ignore
|
||||
|
||||
raise PydanticCustomError('color_error', 'value is not a valid color: string not recognised as a valid color')
|
||||
|
||||
|
||||
def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float] = None) -> RGBA:
|
||||
"""Converts integer or string values for RGB color and an optional alpha value to an `RGBA` object.
|
||||
|
||||
Args:
|
||||
r: An integer or string representing the red color value.
|
||||
g: An integer or string representing the green color value.
|
||||
b: An integer or string representing the blue color value.
|
||||
alpha: A float representing the alpha value. Defaults to None.
|
||||
|
||||
Returns:
|
||||
An instance of the `RGBA` class with the corresponding color and alpha values.
|
||||
"""
|
||||
return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha))
|
||||
|
||||
|
||||
def parse_color_value(value: Union[int, str], max_val: int = 255) -> float:
|
||||
"""Parse the color value provided and return a number between 0 and 1.
|
||||
|
||||
Args:
|
||||
value: An integer or string color value.
|
||||
max_val: Maximum range value. Defaults to 255.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the value is not a valid color.
|
||||
|
||||
Returns:
|
||||
A number between 0 and 1.
|
||||
"""
|
||||
try:
|
||||
color = float(value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('color_error', 'value is not a valid color: color values must be a valid number')
|
||||
if 0 <= color <= max_val:
|
||||
return color / max_val
|
||||
else:
|
||||
raise PydanticCustomError(
|
||||
'color_error',
|
||||
'value is not a valid color: color values must be in the range 0 to {max_val}',
|
||||
{'max_val': max_val},
|
||||
)
|
||||
|
||||
|
||||
def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]:
|
||||
"""Parse an alpha value checking it's a valid float in the range 0 to 1.
|
||||
|
||||
Args:
|
||||
value: The input value to parse.
|
||||
|
||||
Returns:
|
||||
The parsed value as a float, or `None` if the value was None or equal 1.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the input value cannot be successfully parsed as a float in the expected range.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
if isinstance(value, str) and value.endswith('%'):
|
||||
alpha = float(value[:-1]) / 100
|
||||
else:
|
||||
alpha = float(value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('color_error', 'value is not a valid color: alpha values must be a valid float')
|
||||
|
||||
if math.isclose(alpha, 1):
|
||||
return None
|
||||
elif 0 <= alpha <= 1:
|
||||
return alpha
|
||||
else:
|
||||
raise PydanticCustomError('color_error', 'value is not a valid color: alpha values must be in the range 0 to 1')
|
||||
|
||||
|
||||
def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA:
|
||||
"""Parse raw hue, saturation, lightness, and alpha values and convert to RGBA.
|
||||
|
||||
Args:
|
||||
h: The hue value.
|
||||
h_units: The unit for hue value.
|
||||
sat: The saturation value.
|
||||
light: The lightness value.
|
||||
alpha: Alpha value.
|
||||
|
||||
Returns:
|
||||
An instance of `RGBA`.
|
||||
"""
|
||||
s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100)
|
||||
|
||||
h_value = float(h)
|
||||
if h_units in {None, 'deg'}:
|
||||
h_value = h_value % 360 / 360
|
||||
elif h_units == 'rad':
|
||||
h_value = h_value % rads / rads
|
||||
else:
|
||||
# turns
|
||||
h_value = h_value % 1
|
||||
|
||||
r, g, b = hls_to_rgb(h_value, l_value, s_value)
|
||||
return RGBA(r, g, b, parse_float_alpha(alpha))
|
||||
|
||||
|
||||
def float_to_255(c: float) -> int:
|
||||
"""Converts a float value between 0 and 1 (inclusive) to an integer between 0 and 255 (inclusive).
|
||||
|
||||
Args:
|
||||
c: The float value to be converted. Must be between 0 and 1 (inclusive).
|
||||
|
||||
Returns:
|
||||
The integer equivalent of the given float value rounded to the nearest whole number.
|
||||
|
||||
Raises:
|
||||
ValueError: If the given float value is outside the acceptable range of 0 to 1 (inclusive).
|
||||
"""
|
||||
return int(round(c * 255))
|
||||
|
||||
|
||||
COLORS_BY_NAME = {
|
||||
'aliceblue': (240, 248, 255),
|
||||
'antiquewhite': (250, 235, 215),
|
||||
'aqua': (0, 255, 255),
|
||||
'aquamarine': (127, 255, 212),
|
||||
'azure': (240, 255, 255),
|
||||
'beige': (245, 245, 220),
|
||||
'bisque': (255, 228, 196),
|
||||
'black': (0, 0, 0),
|
||||
'blanchedalmond': (255, 235, 205),
|
||||
'blue': (0, 0, 255),
|
||||
'blueviolet': (138, 43, 226),
|
||||
'brown': (165, 42, 42),
|
||||
'burlywood': (222, 184, 135),
|
||||
'cadetblue': (95, 158, 160),
|
||||
'chartreuse': (127, 255, 0),
|
||||
'chocolate': (210, 105, 30),
|
||||
'coral': (255, 127, 80),
|
||||
'cornflowerblue': (100, 149, 237),
|
||||
'cornsilk': (255, 248, 220),
|
||||
'crimson': (220, 20, 60),
|
||||
'cyan': (0, 255, 255),
|
||||
'darkblue': (0, 0, 139),
|
||||
'darkcyan': (0, 139, 139),
|
||||
'darkgoldenrod': (184, 134, 11),
|
||||
'darkgray': (169, 169, 169),
|
||||
'darkgreen': (0, 100, 0),
|
||||
'darkgrey': (169, 169, 169),
|
||||
'darkkhaki': (189, 183, 107),
|
||||
'darkmagenta': (139, 0, 139),
|
||||
'darkolivegreen': (85, 107, 47),
|
||||
'darkorange': (255, 140, 0),
|
||||
'darkorchid': (153, 50, 204),
|
||||
'darkred': (139, 0, 0),
|
||||
'darksalmon': (233, 150, 122),
|
||||
'darkseagreen': (143, 188, 143),
|
||||
'darkslateblue': (72, 61, 139),
|
||||
'darkslategray': (47, 79, 79),
|
||||
'darkslategrey': (47, 79, 79),
|
||||
'darkturquoise': (0, 206, 209),
|
||||
'darkviolet': (148, 0, 211),
|
||||
'deeppink': (255, 20, 147),
|
||||
'deepskyblue': (0, 191, 255),
|
||||
'dimgray': (105, 105, 105),
|
||||
'dimgrey': (105, 105, 105),
|
||||
'dodgerblue': (30, 144, 255),
|
||||
'firebrick': (178, 34, 34),
|
||||
'floralwhite': (255, 250, 240),
|
||||
'forestgreen': (34, 139, 34),
|
||||
'fuchsia': (255, 0, 255),
|
||||
'gainsboro': (220, 220, 220),
|
||||
'ghostwhite': (248, 248, 255),
|
||||
'gold': (255, 215, 0),
|
||||
'goldenrod': (218, 165, 32),
|
||||
'gray': (128, 128, 128),
|
||||
'green': (0, 128, 0),
|
||||
'greenyellow': (173, 255, 47),
|
||||
'grey': (128, 128, 128),
|
||||
'honeydew': (240, 255, 240),
|
||||
'hotpink': (255, 105, 180),
|
||||
'indianred': (205, 92, 92),
|
||||
'indigo': (75, 0, 130),
|
||||
'ivory': (255, 255, 240),
|
||||
'khaki': (240, 230, 140),
|
||||
'lavender': (230, 230, 250),
|
||||
'lavenderblush': (255, 240, 245),
|
||||
'lawngreen': (124, 252, 0),
|
||||
'lemonchiffon': (255, 250, 205),
|
||||
'lightblue': (173, 216, 230),
|
||||
'lightcoral': (240, 128, 128),
|
||||
'lightcyan': (224, 255, 255),
|
||||
'lightgoldenrodyellow': (250, 250, 210),
|
||||
'lightgray': (211, 211, 211),
|
||||
'lightgreen': (144, 238, 144),
|
||||
'lightgrey': (211, 211, 211),
|
||||
'lightpink': (255, 182, 193),
|
||||
'lightsalmon': (255, 160, 122),
|
||||
'lightseagreen': (32, 178, 170),
|
||||
'lightskyblue': (135, 206, 250),
|
||||
'lightslategray': (119, 136, 153),
|
||||
'lightslategrey': (119, 136, 153),
|
||||
'lightsteelblue': (176, 196, 222),
|
||||
'lightyellow': (255, 255, 224),
|
||||
'lime': (0, 255, 0),
|
||||
'limegreen': (50, 205, 50),
|
||||
'linen': (250, 240, 230),
|
||||
'magenta': (255, 0, 255),
|
||||
'maroon': (128, 0, 0),
|
||||
'mediumaquamarine': (102, 205, 170),
|
||||
'mediumblue': (0, 0, 205),
|
||||
'mediumorchid': (186, 85, 211),
|
||||
'mediumpurple': (147, 112, 219),
|
||||
'mediumseagreen': (60, 179, 113),
|
||||
'mediumslateblue': (123, 104, 238),
|
||||
'mediumspringgreen': (0, 250, 154),
|
||||
'mediumturquoise': (72, 209, 204),
|
||||
'mediumvioletred': (199, 21, 133),
|
||||
'midnightblue': (25, 25, 112),
|
||||
'mintcream': (245, 255, 250),
|
||||
'mistyrose': (255, 228, 225),
|
||||
'moccasin': (255, 228, 181),
|
||||
'navajowhite': (255, 222, 173),
|
||||
'navy': (0, 0, 128),
|
||||
'oldlace': (253, 245, 230),
|
||||
'olive': (128, 128, 0),
|
||||
'olivedrab': (107, 142, 35),
|
||||
'orange': (255, 165, 0),
|
||||
'orangered': (255, 69, 0),
|
||||
'orchid': (218, 112, 214),
|
||||
'palegoldenrod': (238, 232, 170),
|
||||
'palegreen': (152, 251, 152),
|
||||
'paleturquoise': (175, 238, 238),
|
||||
'palevioletred': (219, 112, 147),
|
||||
'papayawhip': (255, 239, 213),
|
||||
'peachpuff': (255, 218, 185),
|
||||
'peru': (205, 133, 63),
|
||||
'pink': (255, 192, 203),
|
||||
'plum': (221, 160, 221),
|
||||
'powderblue': (176, 224, 230),
|
||||
'purple': (128, 0, 128),
|
||||
'red': (255, 0, 0),
|
||||
'rosybrown': (188, 143, 143),
|
||||
'royalblue': (65, 105, 225),
|
||||
'saddlebrown': (139, 69, 19),
|
||||
'salmon': (250, 128, 114),
|
||||
'sandybrown': (244, 164, 96),
|
||||
'seagreen': (46, 139, 87),
|
||||
'seashell': (255, 245, 238),
|
||||
'sienna': (160, 82, 45),
|
||||
'silver': (192, 192, 192),
|
||||
'skyblue': (135, 206, 235),
|
||||
'slateblue': (106, 90, 205),
|
||||
'slategray': (112, 128, 144),
|
||||
'slategrey': (112, 128, 144),
|
||||
'snow': (255, 250, 250),
|
||||
'springgreen': (0, 255, 127),
|
||||
'steelblue': (70, 130, 180),
|
||||
'tan': (210, 180, 140),
|
||||
'teal': (0, 128, 128),
|
||||
'thistle': (216, 191, 216),
|
||||
'tomato': (255, 99, 71),
|
||||
'turquoise': (64, 224, 208),
|
||||
'violet': (238, 130, 238),
|
||||
'wheat': (245, 222, 179),
|
||||
'white': (255, 255, 255),
|
||||
'whitesmoke': (245, 245, 245),
|
||||
'yellow': (255, 255, 0),
|
||||
'yellowgreen': (154, 205, 50),
|
||||
}
|
||||
|
||||
COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()}
|
||||
872
Backend/venv/lib/python3.12/site-packages/pydantic/config.py
Normal file
872
Backend/venv/lib/python3.12/site-packages/pydantic/config.py
Normal file
@@ -0,0 +1,872 @@
|
||||
"""Configuration for Pydantic models."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Type, Union
|
||||
|
||||
from typing_extensions import Literal, TypeAlias, TypedDict
|
||||
|
||||
from ._migration import getattr_migration
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._internal._generate_schema import GenerateSchema as _GenerateSchema
|
||||
|
||||
__all__ = ('ConfigDict',)
|
||||
|
||||
|
||||
JsonValue: TypeAlias = Union[int, float, str, bool, None, List['JsonValue'], 'JsonDict']
|
||||
JsonDict: TypeAlias = Dict[str, JsonValue]
|
||||
|
||||
JsonEncoder = Callable[[Any], Any]
|
||||
|
||||
JsonSchemaExtraCallable: TypeAlias = Union[
|
||||
Callable[[JsonDict], None],
|
||||
Callable[[JsonDict, Type[Any]], None],
|
||||
]
|
||||
|
||||
ExtraValues = Literal['allow', 'ignore', 'forbid']
|
||||
|
||||
|
||||
class ConfigDict(TypedDict, total=False):
|
||||
"""A TypedDict for configuring Pydantic behaviour."""
|
||||
|
||||
title: str | None
|
||||
"""The title for the generated JSON schema, defaults to the model's name"""
|
||||
|
||||
str_to_lower: bool
|
||||
"""Whether to convert all characters to lowercase for str types. Defaults to `False`."""
|
||||
|
||||
str_to_upper: bool
|
||||
"""Whether to convert all characters to uppercase for str types. Defaults to `False`."""
|
||||
str_strip_whitespace: bool
|
||||
"""Whether to strip leading and trailing whitespace for str types."""
|
||||
|
||||
str_min_length: int
|
||||
"""The minimum length for str types. Defaults to `None`."""
|
||||
|
||||
str_max_length: int | None
|
||||
"""The maximum length for str types. Defaults to `None`."""
|
||||
|
||||
extra: ExtraValues | None
|
||||
"""
|
||||
Whether to ignore, allow, or forbid extra attributes during model initialization. Defaults to `'ignore'`.
|
||||
|
||||
You can configure how pydantic handles the attributes that are not defined in the model:
|
||||
|
||||
* `allow` - Allow any extra attributes.
|
||||
* `forbid` - Forbid any extra attributes.
|
||||
* `ignore` - Ignore any extra attributes.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
model_config = ConfigDict(extra='ignore') # (1)!
|
||||
|
||||
name: str
|
||||
|
||||
|
||||
user = User(name='John Doe', age=20) # (2)!
|
||||
print(user)
|
||||
#> name='John Doe'
|
||||
```
|
||||
|
||||
1. This is the default behaviour.
|
||||
2. The `age` argument is ignored.
|
||||
|
||||
Instead, with `extra='allow'`, the `age` argument is included:
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
model_config = ConfigDict(extra='allow')
|
||||
|
||||
name: str
|
||||
|
||||
|
||||
user = User(name='John Doe', age=20) # (1)!
|
||||
print(user)
|
||||
#> name='John Doe' age=20
|
||||
```
|
||||
|
||||
1. The `age` argument is included.
|
||||
|
||||
With `extra='forbid'`, an error is raised:
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict, ValidationError
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
model_config = ConfigDict(extra='forbid')
|
||||
|
||||
name: str
|
||||
|
||||
|
||||
try:
|
||||
User(name='John Doe', age=20)
|
||||
except ValidationError as e:
|
||||
print(e)
|
||||
'''
|
||||
1 validation error for User
|
||||
age
|
||||
Extra inputs are not permitted [type=extra_forbidden, input_value=20, input_type=int]
|
||||
'''
|
||||
```
|
||||
"""
|
||||
|
||||
frozen: bool
|
||||
"""
|
||||
Whether or not models are faux-immutable, i.e. whether `__setattr__` is allowed, and also generates
|
||||
a `__hash__()` method for the model. This makes instances of the model potentially hashable if all the
|
||||
attributes are hashable. Defaults to `False`.
|
||||
|
||||
Note:
|
||||
On V1, this setting was called `allow_mutation`, and was `True` by default.
|
||||
"""
|
||||
|
||||
populate_by_name: bool
|
||||
"""
|
||||
Whether an aliased field may be populated by its name as given by the model
|
||||
attribute, as well as the alias. Defaults to `False`.
|
||||
|
||||
Note:
|
||||
The name of this configuration setting was changed in **v2.0** from
|
||||
`allow_population_by_field_name` to `populate_by_name`.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
name: str = Field(alias='full_name') # (1)!
|
||||
age: int
|
||||
|
||||
|
||||
user = User(full_name='John Doe', age=20) # (2)!
|
||||
print(user)
|
||||
#> name='John Doe' age=20
|
||||
user = User(name='John Doe', age=20) # (3)!
|
||||
print(user)
|
||||
#> name='John Doe' age=20
|
||||
```
|
||||
|
||||
1. The field `'name'` has an alias `'full_name'`.
|
||||
2. The model is populated by the alias `'full_name'`.
|
||||
3. The model is populated by the field name `'name'`.
|
||||
"""
|
||||
|
||||
use_enum_values: bool
|
||||
"""
|
||||
Whether to populate models with the `value` property of enums, rather than the raw enum.
|
||||
This may be useful if you want to serialize `model.model_dump()` later. Defaults to `False`.
|
||||
|
||||
!!! note
|
||||
If you have an `Optional[Enum]` value that you set a default for, you need to use `validate_default=True`
|
||||
for said Field to ensure that the `use_enum_values` flag takes effect on the default, as extracting an
|
||||
enum's value occurs during validation, not serialization.
|
||||
|
||||
```py
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
class SomeEnum(Enum):
|
||||
FOO = 'foo'
|
||||
BAR = 'bar'
|
||||
BAZ = 'baz'
|
||||
|
||||
|
||||
class SomeModel(BaseModel):
|
||||
model_config = ConfigDict(use_enum_values=True)
|
||||
|
||||
some_enum: SomeEnum
|
||||
another_enum: Optional[SomeEnum] = Field(default=SomeEnum.FOO, validate_default=True)
|
||||
|
||||
|
||||
model1 = SomeModel(some_enum=SomeEnum.BAR)
|
||||
print(model1.model_dump())
|
||||
# {'some_enum': 'bar', 'another_enum': 'foo'}
|
||||
|
||||
model2 = SomeModel(some_enum=SomeEnum.BAR, another_enum=SomeEnum.BAZ)
|
||||
print(model2.model_dump())
|
||||
#> {'some_enum': 'bar', 'another_enum': 'baz'}
|
||||
```
|
||||
"""
|
||||
|
||||
validate_assignment: bool
|
||||
"""
|
||||
Whether to validate the data when the model is changed. Defaults to `False`.
|
||||
|
||||
The default behavior of Pydantic is to validate the data when the model is created.
|
||||
|
||||
In case the user changes the data after the model is created, the model is _not_ revalidated.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
class User(BaseModel):
|
||||
name: str
|
||||
|
||||
user = User(name='John Doe') # (1)!
|
||||
print(user)
|
||||
#> name='John Doe'
|
||||
user.name = 123 # (1)!
|
||||
print(user)
|
||||
#> name=123
|
||||
```
|
||||
|
||||
1. The validation happens only when the model is created.
|
||||
2. The validation does not happen when the data is changed.
|
||||
|
||||
In case you want to revalidate the model when the data is changed, you can use `validate_assignment=True`:
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
class User(BaseModel, validate_assignment=True): # (1)!
|
||||
name: str
|
||||
|
||||
user = User(name='John Doe') # (2)!
|
||||
print(user)
|
||||
#> name='John Doe'
|
||||
try:
|
||||
user.name = 123 # (3)!
|
||||
except ValidationError as e:
|
||||
print(e)
|
||||
'''
|
||||
1 validation error for User
|
||||
name
|
||||
Input should be a valid string [type=string_type, input_value=123, input_type=int]
|
||||
'''
|
||||
```
|
||||
|
||||
1. You can either use class keyword arguments, or `model_config` to set `validate_assignment=True`.
|
||||
2. The validation happens when the model is created.
|
||||
3. The validation _also_ happens when the data is changed.
|
||||
"""
|
||||
|
||||
arbitrary_types_allowed: bool
|
||||
"""
|
||||
Whether arbitrary types are allowed for field types. Defaults to `False`.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict, ValidationError
|
||||
|
||||
# This is not a pydantic model, it's an arbitrary class
|
||||
class Pet:
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
|
||||
class Model(BaseModel):
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
pet: Pet
|
||||
owner: str
|
||||
|
||||
pet = Pet(name='Hedwig')
|
||||
# A simple check of instance type is used to validate the data
|
||||
model = Model(owner='Harry', pet=pet)
|
||||
print(model)
|
||||
#> pet=<__main__.Pet object at 0x0123456789ab> owner='Harry'
|
||||
print(model.pet)
|
||||
#> <__main__.Pet object at 0x0123456789ab>
|
||||
print(model.pet.name)
|
||||
#> Hedwig
|
||||
print(type(model.pet))
|
||||
#> <class '__main__.Pet'>
|
||||
try:
|
||||
# If the value is not an instance of the type, it's invalid
|
||||
Model(owner='Harry', pet='Hedwig')
|
||||
except ValidationError as e:
|
||||
print(e)
|
||||
'''
|
||||
1 validation error for Model
|
||||
pet
|
||||
Input should be an instance of Pet [type=is_instance_of, input_value='Hedwig', input_type=str]
|
||||
'''
|
||||
|
||||
# Nothing in the instance of the arbitrary type is checked
|
||||
# Here name probably should have been a str, but it's not validated
|
||||
pet2 = Pet(name=42)
|
||||
model2 = Model(owner='Harry', pet=pet2)
|
||||
print(model2)
|
||||
#> pet=<__main__.Pet object at 0x0123456789ab> owner='Harry'
|
||||
print(model2.pet)
|
||||
#> <__main__.Pet object at 0x0123456789ab>
|
||||
print(model2.pet.name)
|
||||
#> 42
|
||||
print(type(model2.pet))
|
||||
#> <class '__main__.Pet'>
|
||||
```
|
||||
"""
|
||||
|
||||
from_attributes: bool
|
||||
"""
|
||||
Whether to build models and look up discriminators of tagged unions using python object attributes.
|
||||
"""
|
||||
|
||||
loc_by_alias: bool
|
||||
"""Whether to use the actual key provided in the data (e.g. alias) for error `loc`s rather than the field's name. Defaults to `True`."""
|
||||
|
||||
alias_generator: Callable[[str], str] | None
|
||||
"""
|
||||
A callable that takes a field name and returns an alias for it.
|
||||
|
||||
If data source field names do not match your code style (e. g. CamelCase fields),
|
||||
you can automatically generate aliases using `alias_generator`:
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from pydantic.alias_generators import to_pascal
|
||||
|
||||
class Voice(BaseModel):
|
||||
model_config = ConfigDict(alias_generator=to_pascal)
|
||||
|
||||
name: str
|
||||
language_code: str
|
||||
|
||||
voice = Voice(Name='Filiz', LanguageCode='tr-TR')
|
||||
print(voice.language_code)
|
||||
#> tr-TR
|
||||
print(voice.model_dump(by_alias=True))
|
||||
#> {'Name': 'Filiz', 'LanguageCode': 'tr-TR'}
|
||||
```
|
||||
|
||||
Note:
|
||||
Pydantic offers three built-in alias generators: [`to_pascal`][pydantic.alias_generators.to_pascal],
|
||||
[`to_camel`][pydantic.alias_generators.to_camel], and [`to_snake`][pydantic.alias_generators.to_snake].
|
||||
"""
|
||||
|
||||
ignored_types: tuple[type, ...]
|
||||
"""A tuple of types that may occur as values of class attributes without annotations. This is
|
||||
typically used for custom descriptors (classes that behave like `property`). If an attribute is set on a
|
||||
class without an annotation and has a type that is not in this tuple (or otherwise recognized by
|
||||
_pydantic_), an error will be raised. Defaults to `()`.
|
||||
"""
|
||||
|
||||
allow_inf_nan: bool
|
||||
"""Whether to allow infinity (`+inf` an `-inf`) and NaN values to float fields. Defaults to `True`."""
|
||||
|
||||
json_schema_extra: JsonDict | JsonSchemaExtraCallable | None
|
||||
"""A dict or callable to provide extra JSON schema properties. Defaults to `None`."""
|
||||
|
||||
json_encoders: dict[type[object], JsonEncoder] | None
|
||||
"""
|
||||
A `dict` of custom JSON encoders for specific types. Defaults to `None`.
|
||||
|
||||
!!! warning "Deprecated"
|
||||
This config option is a carryover from v1.
|
||||
We originally planned to remove it in v2 but didn't have a 1:1 replacement so we are keeping it for now.
|
||||
It is still deprecated and will likely be removed in the future.
|
||||
"""
|
||||
|
||||
# new in V2
|
||||
strict: bool
|
||||
"""
|
||||
_(new in V2)_ If `True`, strict validation is applied to all fields on the model.
|
||||
|
||||
By default, Pydantic attempts to coerce values to the correct type, when possible.
|
||||
|
||||
There are situations in which you may want to disable this behavior, and instead raise an error if a value's type
|
||||
does not match the field's type annotation.
|
||||
|
||||
To configure strict mode for all fields on a model, you can set `strict=True` on the model.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
class Model(BaseModel):
|
||||
model_config = ConfigDict(strict=True)
|
||||
|
||||
name: str
|
||||
age: int
|
||||
```
|
||||
|
||||
See [Strict Mode](../concepts/strict_mode.md) for more details.
|
||||
|
||||
See the [Conversion Table](../concepts/conversion_table.md) for more details on how Pydantic converts data in both
|
||||
strict and lax modes.
|
||||
"""
|
||||
# whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never'
|
||||
revalidate_instances: Literal['always', 'never', 'subclass-instances']
|
||||
"""
|
||||
When and how to revalidate models and dataclasses during validation. Accepts the string
|
||||
values of `'never'`, `'always'` and `'subclass-instances'`. Defaults to `'never'`.
|
||||
|
||||
- `'never'` will not revalidate models and dataclasses during validation
|
||||
- `'always'` will revalidate models and dataclasses during validation
|
||||
- `'subclass-instances'` will revalidate models and dataclasses during validation if the instance is a
|
||||
subclass of the model or dataclass
|
||||
|
||||
By default, model and dataclass instances are not revalidated during validation.
|
||||
|
||||
```py
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
class User(BaseModel, revalidate_instances='never'): # (1)!
|
||||
hobbies: List[str]
|
||||
|
||||
class SubUser(User):
|
||||
sins: List[str]
|
||||
|
||||
class Transaction(BaseModel):
|
||||
user: User
|
||||
|
||||
my_user = User(hobbies=['reading'])
|
||||
t = Transaction(user=my_user)
|
||||
print(t)
|
||||
#> user=User(hobbies=['reading'])
|
||||
|
||||
my_user.hobbies = [1] # (2)!
|
||||
t = Transaction(user=my_user) # (3)!
|
||||
print(t)
|
||||
#> user=User(hobbies=[1])
|
||||
|
||||
my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying'])
|
||||
t = Transaction(user=my_sub_user)
|
||||
print(t)
|
||||
#> user=SubUser(hobbies=['scuba diving'], sins=['lying'])
|
||||
```
|
||||
|
||||
1. `revalidate_instances` is set to `'never'` by **default.
|
||||
2. The assignment is not validated, unless you set `validate_assignment` to `True` in the model's config.
|
||||
3. Since `revalidate_instances` is set to `never`, this is not revalidated.
|
||||
|
||||
If you want to revalidate instances during validation, you can set `revalidate_instances` to `'always'`
|
||||
in the model's config.
|
||||
|
||||
```py
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
class User(BaseModel, revalidate_instances='always'): # (1)!
|
||||
hobbies: List[str]
|
||||
|
||||
class SubUser(User):
|
||||
sins: List[str]
|
||||
|
||||
class Transaction(BaseModel):
|
||||
user: User
|
||||
|
||||
my_user = User(hobbies=['reading'])
|
||||
t = Transaction(user=my_user)
|
||||
print(t)
|
||||
#> user=User(hobbies=['reading'])
|
||||
|
||||
my_user.hobbies = [1]
|
||||
try:
|
||||
t = Transaction(user=my_user) # (2)!
|
||||
except ValidationError as e:
|
||||
print(e)
|
||||
'''
|
||||
1 validation error for Transaction
|
||||
user.hobbies.0
|
||||
Input should be a valid string [type=string_type, input_value=1, input_type=int]
|
||||
'''
|
||||
|
||||
my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying'])
|
||||
t = Transaction(user=my_sub_user)
|
||||
print(t) # (3)!
|
||||
#> user=User(hobbies=['scuba diving'])
|
||||
```
|
||||
|
||||
1. `revalidate_instances` is set to `'always'`.
|
||||
2. The model is revalidated, since `revalidate_instances` is set to `'always'`.
|
||||
3. Using `'never'` we would have gotten `user=SubUser(hobbies=['scuba diving'], sins=['lying'])`.
|
||||
|
||||
It's also possible to set `revalidate_instances` to `'subclass-instances'` to only revalidate instances
|
||||
of subclasses of the model.
|
||||
|
||||
```py
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
class User(BaseModel, revalidate_instances='subclass-instances'): # (1)!
|
||||
hobbies: List[str]
|
||||
|
||||
class SubUser(User):
|
||||
sins: List[str]
|
||||
|
||||
class Transaction(BaseModel):
|
||||
user: User
|
||||
|
||||
my_user = User(hobbies=['reading'])
|
||||
t = Transaction(user=my_user)
|
||||
print(t)
|
||||
#> user=User(hobbies=['reading'])
|
||||
|
||||
my_user.hobbies = [1]
|
||||
t = Transaction(user=my_user) # (2)!
|
||||
print(t)
|
||||
#> user=User(hobbies=[1])
|
||||
|
||||
my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying'])
|
||||
t = Transaction(user=my_sub_user)
|
||||
print(t) # (3)!
|
||||
#> user=User(hobbies=['scuba diving'])
|
||||
```
|
||||
|
||||
1. `revalidate_instances` is set to `'subclass-instances'`.
|
||||
2. This is not revalidated, since `my_user` is not a subclass of `User`.
|
||||
3. Using `'never'` we would have gotten `user=SubUser(hobbies=['scuba diving'], sins=['lying'])`.
|
||||
"""
|
||||
|
||||
ser_json_timedelta: Literal['iso8601', 'float']
|
||||
"""
|
||||
The format of JSON serialized timedeltas. Accepts the string values of `'iso8601'` and
|
||||
`'float'`. Defaults to `'iso8601'`.
|
||||
|
||||
- `'iso8601'` will serialize timedeltas to ISO 8601 durations.
|
||||
- `'float'` will serialize timedeltas to the total number of seconds.
|
||||
"""
|
||||
|
||||
ser_json_bytes: Literal['utf8', 'base64']
|
||||
"""
|
||||
The encoding of JSON serialized bytes. Accepts the string values of `'utf8'` and `'base64'`.
|
||||
Defaults to `'utf8'`.
|
||||
|
||||
- `'utf8'` will serialize bytes to UTF-8 strings.
|
||||
- `'base64'` will serialize bytes to URL safe base64 strings.
|
||||
"""
|
||||
|
||||
# whether to validate default values during validation, default False
|
||||
validate_default: bool
|
||||
"""Whether to validate default values during validation. Defaults to `False`."""
|
||||
|
||||
validate_return: bool
|
||||
"""whether to validate the return value from call validators. Defaults to `False`."""
|
||||
|
||||
protected_namespaces: tuple[str, ...]
|
||||
"""
|
||||
A `tuple` of strings that prevent model to have field which conflict with them.
|
||||
Defaults to `('model_', )`).
|
||||
|
||||
Pydantic prevents collisions between model attributes and `BaseModel`'s own methods by
|
||||
namespacing them with the prefix `model_`.
|
||||
|
||||
```py
|
||||
import warnings
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
warnings.filterwarnings('error') # Raise warnings as errors
|
||||
|
||||
try:
|
||||
|
||||
class Model(BaseModel):
|
||||
model_prefixed_field: str
|
||||
|
||||
except UserWarning as e:
|
||||
print(e)
|
||||
'''
|
||||
Field "model_prefixed_field" has conflict with protected namespace "model_".
|
||||
|
||||
You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ()`.
|
||||
'''
|
||||
```
|
||||
|
||||
You can customize this behavior using the `protected_namespaces` setting:
|
||||
|
||||
```py
|
||||
import warnings
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
warnings.filterwarnings('error') # Raise warnings as errors
|
||||
|
||||
try:
|
||||
|
||||
class Model(BaseModel):
|
||||
model_prefixed_field: str
|
||||
also_protect_field: str
|
||||
|
||||
model_config = ConfigDict(
|
||||
protected_namespaces=('protect_me_', 'also_protect_')
|
||||
)
|
||||
|
||||
except UserWarning as e:
|
||||
print(e)
|
||||
'''
|
||||
Field "also_protect_field" has conflict with protected namespace "also_protect_".
|
||||
|
||||
You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ('protect_me_',)`.
|
||||
'''
|
||||
```
|
||||
|
||||
While Pydantic will only emit a warning when an item is in a protected namespace but does not actually have a collision,
|
||||
an error _is_ raised if there is an actual collision with an existing attribute:
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
try:
|
||||
|
||||
class Model(BaseModel):
|
||||
model_validate: str
|
||||
|
||||
except NameError as e:
|
||||
print(e)
|
||||
'''
|
||||
Field "model_validate" conflicts with member <bound method BaseModel.model_validate of <class 'pydantic.main.BaseModel'>> of protected namespace "model_".
|
||||
'''
|
||||
```
|
||||
"""
|
||||
|
||||
hide_input_in_errors: bool
|
||||
"""
|
||||
Whether to hide inputs when printing errors. Defaults to `False`.
|
||||
|
||||
Pydantic shows the input value and type when it raises `ValidationError` during the validation.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
class Model(BaseModel):
|
||||
a: str
|
||||
|
||||
try:
|
||||
Model(a=123)
|
||||
except ValidationError as e:
|
||||
print(e)
|
||||
'''
|
||||
1 validation error for Model
|
||||
a
|
||||
Input should be a valid string [type=string_type, input_value=123, input_type=int]
|
||||
'''
|
||||
```
|
||||
|
||||
You can hide the input value and type by setting the `hide_input_in_errors` config to `True`.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict, ValidationError
|
||||
|
||||
class Model(BaseModel):
|
||||
a: str
|
||||
model_config = ConfigDict(hide_input_in_errors=True)
|
||||
|
||||
try:
|
||||
Model(a=123)
|
||||
except ValidationError as e:
|
||||
print(e)
|
||||
'''
|
||||
1 validation error for Model
|
||||
a
|
||||
Input should be a valid string [type=string_type]
|
||||
'''
|
||||
```
|
||||
"""
|
||||
|
||||
defer_build: bool
|
||||
"""
|
||||
Whether to defer model validator and serializer construction until the first model validation.
|
||||
|
||||
This can be useful to avoid the overhead of building models which are only
|
||||
used nested within other models, or when you want to manually define type namespace via
|
||||
[`Model.model_rebuild(_types_namespace=...)`][pydantic.BaseModel.model_rebuild]. Defaults to False.
|
||||
"""
|
||||
|
||||
plugin_settings: dict[str, object] | None
|
||||
"""A `dict` of settings for plugins. Defaults to `None`.
|
||||
|
||||
See [Pydantic Plugins](../concepts/plugins.md) for details.
|
||||
"""
|
||||
|
||||
schema_generator: type[_GenerateSchema] | None
|
||||
"""
|
||||
A custom core schema generator class to use when generating JSON schemas.
|
||||
Useful if you want to change the way types are validated across an entire model/schema. Defaults to `None`.
|
||||
|
||||
The `GenerateSchema` interface is subject to change, currently only the `string_schema` method is public.
|
||||
|
||||
See [#6737](https://github.com/pydantic/pydantic/pull/6737) for details.
|
||||
"""
|
||||
|
||||
json_schema_serialization_defaults_required: bool
|
||||
"""
|
||||
Whether fields with default values should be marked as required in the serialization schema. Defaults to `False`.
|
||||
|
||||
This ensures that the serialization schema will reflect the fact a field with a default will always be present
|
||||
when serializing the model, even though it is not required for validation.
|
||||
|
||||
However, there are scenarios where this may be undesirable — in particular, if you want to share the schema
|
||||
between validation and serialization, and don't mind fields with defaults being marked as not required during
|
||||
serialization. See [#7209](https://github.com/pydantic/pydantic/issues/7209) for more details.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
class Model(BaseModel):
|
||||
a: str = 'a'
|
||||
|
||||
model_config = ConfigDict(json_schema_serialization_defaults_required=True)
|
||||
|
||||
print(Model.model_json_schema(mode='validation'))
|
||||
'''
|
||||
{
|
||||
'properties': {'a': {'default': 'a', 'title': 'A', 'type': 'string'}},
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
}
|
||||
'''
|
||||
print(Model.model_json_schema(mode='serialization'))
|
||||
'''
|
||||
{
|
||||
'properties': {'a': {'default': 'a', 'title': 'A', 'type': 'string'}},
|
||||
'required': ['a'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
}
|
||||
'''
|
||||
```
|
||||
"""
|
||||
|
||||
json_schema_mode_override: Literal['validation', 'serialization', None]
|
||||
"""
|
||||
If not `None`, the specified mode will be used to generate the JSON schema regardless of what `mode` was passed to
|
||||
the function call. Defaults to `None`.
|
||||
|
||||
This provides a way to force the JSON schema generation to reflect a specific mode, e.g., to always use the
|
||||
validation schema.
|
||||
|
||||
It can be useful when using frameworks (such as FastAPI) that may generate different schemas for validation
|
||||
and serialization that must both be referenced from the same schema; when this happens, we automatically append
|
||||
`-Input` to the definition reference for the validation schema and `-Output` to the definition reference for the
|
||||
serialization schema. By specifying a `json_schema_mode_override` though, this prevents the conflict between
|
||||
the validation and serialization schemas (since both will use the specified schema), and so prevents the suffixes
|
||||
from being added to the definition references.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict, Json
|
||||
|
||||
class Model(BaseModel):
|
||||
a: Json[int] # requires a string to validate, but will dump an int
|
||||
|
||||
print(Model.model_json_schema(mode='serialization'))
|
||||
'''
|
||||
{
|
||||
'properties': {'a': {'title': 'A', 'type': 'integer'}},
|
||||
'required': ['a'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
}
|
||||
'''
|
||||
|
||||
class ForceInputModel(Model):
|
||||
# the following ensures that even with mode='serialization', we
|
||||
# will get the schema that would be generated for validation.
|
||||
model_config = ConfigDict(json_schema_mode_override='validation')
|
||||
|
||||
print(ForceInputModel.model_json_schema(mode='serialization'))
|
||||
'''
|
||||
{
|
||||
'properties': {
|
||||
'a': {
|
||||
'contentMediaType': 'application/json',
|
||||
'contentSchema': {'type': 'integer'},
|
||||
'title': 'A',
|
||||
'type': 'string',
|
||||
}
|
||||
},
|
||||
'required': ['a'],
|
||||
'title': 'ForceInputModel',
|
||||
'type': 'object',
|
||||
}
|
||||
'''
|
||||
```
|
||||
"""
|
||||
|
||||
coerce_numbers_to_str: bool
|
||||
"""
|
||||
If `True`, enables automatic coercion of any `Number` type to `str` in "lax" (non-strict) mode. Defaults to `False`.
|
||||
|
||||
Pydantic doesn't allow number types (`int`, `float`, `Decimal`) to be coerced as type `str` by default.
|
||||
|
||||
```py
|
||||
from decimal import Decimal
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, ValidationError
|
||||
|
||||
class Model(BaseModel):
|
||||
value: str
|
||||
|
||||
try:
|
||||
print(Model(value=42))
|
||||
except ValidationError as e:
|
||||
print(e)
|
||||
'''
|
||||
1 validation error for Model
|
||||
value
|
||||
Input should be a valid string [type=string_type, input_value=42, input_type=int]
|
||||
'''
|
||||
|
||||
class Model(BaseModel):
|
||||
model_config = ConfigDict(coerce_numbers_to_str=True)
|
||||
|
||||
value: str
|
||||
|
||||
repr(Model(value=42).value)
|
||||
#> "42"
|
||||
repr(Model(value=42.13).value)
|
||||
#> "42.13"
|
||||
repr(Model(value=Decimal('42.13')).value)
|
||||
#> "42.13"
|
||||
```
|
||||
"""
|
||||
|
||||
regex_engine: Literal['rust-regex', 'python-re']
|
||||
"""
|
||||
The regex engine to used for pattern validation
|
||||
Defaults to `'rust-regex'`.
|
||||
|
||||
- `rust-regex` uses the [`regex`](https://docs.rs/regex) Rust crate,
|
||||
which is non-backtracking and therefore more DDoS resistant, but does not support all regex features.
|
||||
- `python-re` use the [`re`](https://docs.python.org/3/library/re.html) module,
|
||||
which supports all regex features, but may be slower.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel, ConfigDict, Field, ValidationError
|
||||
|
||||
class Model(BaseModel):
|
||||
model_config = ConfigDict(regex_engine='python-re')
|
||||
|
||||
value: str = Field(pattern=r'^abc(?=def)')
|
||||
|
||||
print(Model(value='abcdef').value)
|
||||
#> abcdef
|
||||
|
||||
try:
|
||||
print(Model(value='abxyzcdef'))
|
||||
except ValidationError as e:
|
||||
print(e)
|
||||
'''
|
||||
1 validation error for Model
|
||||
value
|
||||
String should match pattern '^abc(?=def)' [type=string_pattern_mismatch, input_value='abxyzcdef', input_type=str]
|
||||
'''
|
||||
```
|
||||
"""
|
||||
|
||||
validation_error_cause: bool
|
||||
"""
|
||||
If `True`, python exceptions that were part of a validation failure will be shown as an exception group as a cause. Can be useful for debugging. Defaults to `False`.
|
||||
|
||||
Note:
|
||||
Python 3.10 and older don't support exception groups natively. <=3.10, backport must be installed: `pip install exceptiongroup`.
|
||||
|
||||
Note:
|
||||
The structure of validation errors are likely to change in future pydantic versions. Pydantic offers no guarantees about the structure of validation errors. Should be used for visual traceback debugging only.
|
||||
"""
|
||||
|
||||
|
||||
__getattr__ = getattr_migration(__name__)
|
||||
@@ -0,0 +1,312 @@
|
||||
"""Provide an enhanced dataclass that performs validation."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import dataclasses
|
||||
import sys
|
||||
import types
|
||||
from typing import TYPE_CHECKING, Any, Callable, Generic, NoReturn, TypeVar, overload
|
||||
|
||||
from typing_extensions import Literal, TypeGuard, dataclass_transform
|
||||
|
||||
from ._internal import _config, _decorators, _typing_extra
|
||||
from ._internal import _dataclasses as _pydantic_dataclasses
|
||||
from ._migration import getattr_migration
|
||||
from .config import ConfigDict
|
||||
from .fields import Field, FieldInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._internal._dataclasses import PydanticDataclass
|
||||
|
||||
__all__ = 'dataclass', 'rebuild_dataclass'
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
|
||||
@dataclass_transform(field_specifiers=(dataclasses.field, Field))
|
||||
@overload
|
||||
def dataclass(
|
||||
*,
|
||||
init: Literal[False] = False,
|
||||
repr: bool = True,
|
||||
eq: bool = True,
|
||||
order: bool = False,
|
||||
unsafe_hash: bool = False,
|
||||
frozen: bool = False,
|
||||
config: ConfigDict | type[object] | None = None,
|
||||
validate_on_init: bool | None = None,
|
||||
kw_only: bool = ...,
|
||||
slots: bool = ...,
|
||||
) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore
|
||||
...
|
||||
|
||||
@dataclass_transform(field_specifiers=(dataclasses.field, Field))
|
||||
@overload
|
||||
def dataclass(
|
||||
_cls: type[_T], # type: ignore
|
||||
*,
|
||||
init: Literal[False] = False,
|
||||
repr: bool = True,
|
||||
eq: bool = True,
|
||||
order: bool = False,
|
||||
unsafe_hash: bool = False,
|
||||
frozen: bool = False,
|
||||
config: ConfigDict | type[object] | None = None,
|
||||
validate_on_init: bool | None = None,
|
||||
kw_only: bool = ...,
|
||||
slots: bool = ...,
|
||||
) -> type[PydanticDataclass]:
|
||||
...
|
||||
|
||||
else:
|
||||
|
||||
@dataclass_transform(field_specifiers=(dataclasses.field, Field))
|
||||
@overload
|
||||
def dataclass(
|
||||
*,
|
||||
init: Literal[False] = False,
|
||||
repr: bool = True,
|
||||
eq: bool = True,
|
||||
order: bool = False,
|
||||
unsafe_hash: bool = False,
|
||||
frozen: bool = False,
|
||||
config: ConfigDict | type[object] | None = None,
|
||||
validate_on_init: bool | None = None,
|
||||
) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore
|
||||
...
|
||||
|
||||
@dataclass_transform(field_specifiers=(dataclasses.field, Field))
|
||||
@overload
|
||||
def dataclass(
|
||||
_cls: type[_T], # type: ignore
|
||||
*,
|
||||
init: Literal[False] = False,
|
||||
repr: bool = True,
|
||||
eq: bool = True,
|
||||
order: bool = False,
|
||||
unsafe_hash: bool = False,
|
||||
frozen: bool = False,
|
||||
config: ConfigDict | type[object] | None = None,
|
||||
validate_on_init: bool | None = None,
|
||||
) -> type[PydanticDataclass]:
|
||||
...
|
||||
|
||||
|
||||
@dataclass_transform(field_specifiers=(dataclasses.field, Field))
|
||||
def dataclass(
|
||||
_cls: type[_T] | None = None,
|
||||
*,
|
||||
init: Literal[False] = False,
|
||||
repr: bool = True,
|
||||
eq: bool = True,
|
||||
order: bool = False,
|
||||
unsafe_hash: bool = False,
|
||||
frozen: bool = False,
|
||||
config: ConfigDict | type[object] | None = None,
|
||||
validate_on_init: bool | None = None,
|
||||
kw_only: bool = False,
|
||||
slots: bool = False,
|
||||
) -> Callable[[type[_T]], type[PydanticDataclass]] | type[PydanticDataclass]:
|
||||
"""Usage docs: https://docs.pydantic.dev/2.5/concepts/dataclasses/
|
||||
|
||||
A decorator used to create a Pydantic-enhanced dataclass, similar to the standard Python `dataclass`,
|
||||
but with added validation.
|
||||
|
||||
This function should be used similarly to `dataclasses.dataclass`.
|
||||
|
||||
Args:
|
||||
_cls: The target `dataclass`.
|
||||
init: Included for signature compatibility with `dataclasses.dataclass`, and is passed through to
|
||||
`dataclasses.dataclass` when appropriate. If specified, must be set to `False`, as pydantic inserts its
|
||||
own `__init__` function.
|
||||
repr: A boolean indicating whether or not to include the field in the `__repr__` output.
|
||||
eq: Determines if a `__eq__` should be generated for the class.
|
||||
order: Determines if comparison magic methods should be generated, such as `__lt__`, but not `__eq__`.
|
||||
unsafe_hash: Determines if an unsafe hashing function should be included in the class.
|
||||
frozen: Determines if the generated class should be a 'frozen' `dataclass`, which does not allow its
|
||||
attributes to be modified from its constructor.
|
||||
config: A configuration for the `dataclass` generation.
|
||||
validate_on_init: A deprecated parameter included for backwards compatibility; in V2, all Pydantic dataclasses
|
||||
are validated on init.
|
||||
kw_only: Determines if `__init__` method parameters must be specified by keyword only. Defaults to `False`.
|
||||
slots: Determines if the generated class should be a 'slots' `dataclass`, which does not allow the addition of
|
||||
new attributes after instantiation.
|
||||
|
||||
Returns:
|
||||
A decorator that accepts a class as its argument and returns a Pydantic `dataclass`.
|
||||
|
||||
Raises:
|
||||
AssertionError: Raised if `init` is not `False` or `validate_on_init` is `False`.
|
||||
"""
|
||||
assert init is False, 'pydantic.dataclasses.dataclass only supports init=False'
|
||||
assert validate_on_init is not False, 'validate_on_init=False is no longer supported'
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
kwargs = dict(kw_only=kw_only, slots=slots)
|
||||
|
||||
def make_pydantic_fields_compatible(cls: type[Any]) -> None:
|
||||
"""Make sure that stdlib `dataclasses` understands `Field` kwargs like `kw_only`
|
||||
To do that, we simply change
|
||||
`x: int = pydantic.Field(..., kw_only=True)`
|
||||
into
|
||||
`x: int = dataclasses.field(default=pydantic.Field(..., kw_only=True), kw_only=True)`
|
||||
"""
|
||||
for field_name in cls.__annotations__:
|
||||
try:
|
||||
field_value = getattr(cls, field_name)
|
||||
except AttributeError:
|
||||
# no default value has been set for this field
|
||||
continue
|
||||
if isinstance(field_value, FieldInfo) and field_value.kw_only:
|
||||
setattr(cls, field_name, dataclasses.field(default=field_value, kw_only=True))
|
||||
|
||||
else:
|
||||
kwargs = {}
|
||||
|
||||
def make_pydantic_fields_compatible(_) -> None:
|
||||
return None
|
||||
|
||||
def create_dataclass(cls: type[Any]) -> type[PydanticDataclass]:
|
||||
"""Create a Pydantic dataclass from a regular dataclass.
|
||||
|
||||
Args:
|
||||
cls: The class to create the Pydantic dataclass from.
|
||||
|
||||
Returns:
|
||||
A Pydantic dataclass.
|
||||
"""
|
||||
original_cls = cls
|
||||
|
||||
config_dict = config
|
||||
if config_dict is None:
|
||||
# if not explicitly provided, read from the type
|
||||
cls_config = getattr(cls, '__pydantic_config__', None)
|
||||
if cls_config is not None:
|
||||
config_dict = cls_config
|
||||
config_wrapper = _config.ConfigWrapper(config_dict)
|
||||
decorators = _decorators.DecoratorInfos.build(cls)
|
||||
|
||||
# Keep track of the original __doc__ so that we can restore it after applying the dataclasses decorator
|
||||
# Otherwise, classes with no __doc__ will have their signature added into the JSON schema description,
|
||||
# since dataclasses.dataclass will set this as the __doc__
|
||||
original_doc = cls.__doc__
|
||||
|
||||
if _pydantic_dataclasses.is_builtin_dataclass(cls):
|
||||
# Don't preserve the docstring for vanilla dataclasses, as it may include the signature
|
||||
# This matches v1 behavior, and there was an explicit test for it
|
||||
original_doc = None
|
||||
|
||||
# We don't want to add validation to the existing std lib dataclass, so we will subclass it
|
||||
# If the class is generic, we need to make sure the subclass also inherits from Generic
|
||||
# with all the same parameters.
|
||||
bases = (cls,)
|
||||
if issubclass(cls, Generic):
|
||||
generic_base = Generic[cls.__parameters__] # type: ignore
|
||||
bases = bases + (generic_base,)
|
||||
cls = types.new_class(cls.__name__, bases)
|
||||
|
||||
make_pydantic_fields_compatible(cls)
|
||||
|
||||
cls = dataclasses.dataclass( # type: ignore[call-overload]
|
||||
cls,
|
||||
# the value of init here doesn't affect anything except that it makes it easier to generate a signature
|
||||
init=True,
|
||||
repr=repr,
|
||||
eq=eq,
|
||||
order=order,
|
||||
unsafe_hash=unsafe_hash,
|
||||
frozen=frozen,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
cls.__pydantic_decorators__ = decorators # type: ignore
|
||||
cls.__doc__ = original_doc
|
||||
cls.__module__ = original_cls.__module__
|
||||
cls.__qualname__ = original_cls.__qualname__
|
||||
pydantic_complete = _pydantic_dataclasses.complete_dataclass(
|
||||
cls, config_wrapper, raise_errors=False, types_namespace=None
|
||||
)
|
||||
cls.__pydantic_complete__ = pydantic_complete # type: ignore
|
||||
return cls
|
||||
|
||||
if _cls is None:
|
||||
return create_dataclass
|
||||
|
||||
return create_dataclass(_cls)
|
||||
|
||||
|
||||
__getattr__ = getattr_migration(__name__)
|
||||
|
||||
if (3, 8) <= sys.version_info < (3, 11):
|
||||
# Monkeypatch dataclasses.InitVar so that typing doesn't error if it occurs as a type when evaluating type hints
|
||||
# Starting in 3.11, typing.get_type_hints will not raise an error if the retrieved type hints are not callable.
|
||||
|
||||
def _call_initvar(*args: Any, **kwargs: Any) -> NoReturn:
|
||||
"""This function does nothing but raise an error that is as similar as possible to what you'd get
|
||||
if you were to try calling `InitVar[int]()` without this monkeypatch. The whole purpose is just
|
||||
to ensure typing._type_check does not error if the type hint evaluates to `InitVar[<parameter>]`.
|
||||
"""
|
||||
raise TypeError("'InitVar' object is not callable")
|
||||
|
||||
dataclasses.InitVar.__call__ = _call_initvar
|
||||
|
||||
|
||||
def rebuild_dataclass(
|
||||
cls: type[PydanticDataclass],
|
||||
*,
|
||||
force: bool = False,
|
||||
raise_errors: bool = True,
|
||||
_parent_namespace_depth: int = 2,
|
||||
_types_namespace: dict[str, Any] | None = None,
|
||||
) -> bool | None:
|
||||
"""Try to rebuild the pydantic-core schema for the dataclass.
|
||||
|
||||
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
|
||||
the initial attempt to build the schema, and automatic rebuilding fails.
|
||||
|
||||
This is analogous to `BaseModel.model_rebuild`.
|
||||
|
||||
Args:
|
||||
cls: The class to build the dataclass core schema for.
|
||||
force: Whether to force the rebuilding of the model schema, defaults to `False`.
|
||||
raise_errors: Whether to raise errors, defaults to `True`.
|
||||
_parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
|
||||
_types_namespace: The types namespace, defaults to `None`.
|
||||
|
||||
Returns:
|
||||
Returns `None` if the schema is already "complete" and rebuilding was not required.
|
||||
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
|
||||
"""
|
||||
if not force and cls.__pydantic_complete__:
|
||||
return None
|
||||
else:
|
||||
if _types_namespace is not None:
|
||||
types_namespace: dict[str, Any] | None = _types_namespace.copy()
|
||||
else:
|
||||
if _parent_namespace_depth > 0:
|
||||
frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {}
|
||||
# Note: we may need to add something similar to cls.__pydantic_parent_namespace__ from BaseModel
|
||||
# here when implementing handling of recursive generics. See BaseModel.model_rebuild for reference.
|
||||
types_namespace = frame_parent_ns
|
||||
else:
|
||||
types_namespace = {}
|
||||
|
||||
types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace)
|
||||
return _pydantic_dataclasses.complete_dataclass(
|
||||
cls,
|
||||
_config.ConfigWrapper(cls.__pydantic_config__, check=False),
|
||||
raise_errors=raise_errors,
|
||||
types_namespace=types_namespace,
|
||||
)
|
||||
|
||||
|
||||
def is_pydantic_dataclass(__cls: type[Any]) -> TypeGuard[type[PydanticDataclass]]:
|
||||
"""Whether a class is a pydantic dataclass.
|
||||
|
||||
Args:
|
||||
__cls: The class.
|
||||
|
||||
Returns:
|
||||
`True` if the class is a pydantic dataclass, `False` otherwise.
|
||||
"""
|
||||
return dataclasses.is_dataclass(__cls) and '__pydantic_validator__' in __cls.__dict__
|
||||
@@ -0,0 +1,4 @@
|
||||
"""The `datetime_parse` module is a backport module from V1."""
|
||||
from ._migration import getattr_migration
|
||||
|
||||
__getattr__ = getattr_migration(__name__)
|
||||
@@ -0,0 +1,4 @@
|
||||
"""The `decorator` module is a backport module from V1."""
|
||||
from ._migration import getattr_migration
|
||||
|
||||
__getattr__ = getattr_migration(__name__)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,253 @@
|
||||
"""Old `@validator` and `@root_validator` function validators from V1."""
|
||||
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from functools import partial, partialmethod
|
||||
from types import FunctionType
|
||||
from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload
|
||||
from warnings import warn
|
||||
|
||||
from typing_extensions import Literal, Protocol, TypeAlias
|
||||
|
||||
from .._internal import _decorators, _decorators_v1
|
||||
from ..errors import PydanticUserError
|
||||
from ..warnings import PydanticDeprecatedSince20
|
||||
|
||||
_ALLOW_REUSE_WARNING_MESSAGE = '`allow_reuse` is deprecated and will be ignored; it should no longer be necessary'
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class _OnlyValueValidatorClsMethod(Protocol):
|
||||
def __call__(self, __cls: Any, __value: Any) -> Any:
|
||||
...
|
||||
|
||||
class _V1ValidatorWithValuesClsMethod(Protocol):
|
||||
def __call__(self, __cls: Any, __value: Any, values: dict[str, Any]) -> Any:
|
||||
...
|
||||
|
||||
class _V1ValidatorWithValuesKwOnlyClsMethod(Protocol):
|
||||
def __call__(self, __cls: Any, __value: Any, *, values: dict[str, Any]) -> Any:
|
||||
...
|
||||
|
||||
class _V1ValidatorWithKwargsClsMethod(Protocol):
|
||||
def __call__(self, __cls: Any, **kwargs: Any) -> Any:
|
||||
...
|
||||
|
||||
class _V1ValidatorWithValuesAndKwargsClsMethod(Protocol):
|
||||
def __call__(self, __cls: Any, values: dict[str, Any], **kwargs: Any) -> Any:
|
||||
...
|
||||
|
||||
class _V1RootValidatorClsMethod(Protocol):
|
||||
def __call__(
|
||||
self, __cls: Any, __values: _decorators_v1.RootValidatorValues
|
||||
) -> _decorators_v1.RootValidatorValues:
|
||||
...
|
||||
|
||||
V1Validator = Union[
|
||||
_OnlyValueValidatorClsMethod,
|
||||
_V1ValidatorWithValuesClsMethod,
|
||||
_V1ValidatorWithValuesKwOnlyClsMethod,
|
||||
_V1ValidatorWithKwargsClsMethod,
|
||||
_V1ValidatorWithValuesAndKwargsClsMethod,
|
||||
_decorators_v1.V1ValidatorWithValues,
|
||||
_decorators_v1.V1ValidatorWithValuesKwOnly,
|
||||
_decorators_v1.V1ValidatorWithKwargs,
|
||||
_decorators_v1.V1ValidatorWithValuesAndKwargs,
|
||||
]
|
||||
|
||||
V1RootValidator = Union[
|
||||
_V1RootValidatorClsMethod,
|
||||
_decorators_v1.V1RootValidatorFunction,
|
||||
]
|
||||
|
||||
_PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]]
|
||||
|
||||
# Allow both a V1 (assumed pre=False) or V2 (assumed mode='after') validator
|
||||
# We lie to type checkers and say we return the same thing we get
|
||||
# but in reality we return a proxy object that _mostly_ behaves like the wrapped thing
|
||||
_V1ValidatorType = TypeVar('_V1ValidatorType', V1Validator, _PartialClsOrStaticMethod)
|
||||
_V1RootValidatorFunctionType = TypeVar(
|
||||
'_V1RootValidatorFunctionType',
|
||||
_decorators_v1.V1RootValidatorFunction,
|
||||
_V1RootValidatorClsMethod,
|
||||
_PartialClsOrStaticMethod,
|
||||
)
|
||||
else:
|
||||
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
||||
# and https://youtrack.jetbrains.com/issue/PY-51428
|
||||
DeprecationWarning = PydanticDeprecatedSince20
|
||||
|
||||
|
||||
def validator(
|
||||
__field: str,
|
||||
*fields: str,
|
||||
pre: bool = False,
|
||||
each_item: bool = False,
|
||||
always: bool = False,
|
||||
check_fields: bool | None = None,
|
||||
allow_reuse: bool = False,
|
||||
) -> Callable[[_V1ValidatorType], _V1ValidatorType]:
|
||||
"""Decorate methods on the class indicating that they should be used to validate fields.
|
||||
|
||||
Args:
|
||||
__field (str): The first field the validator should be called on; this is separate
|
||||
from `fields` to ensure an error is raised if you don't pass at least one.
|
||||
*fields (str): Additional field(s) the validator should be called on.
|
||||
pre (bool, optional): Whether or not this validator should be called before the standard
|
||||
validators (else after). Defaults to False.
|
||||
each_item (bool, optional): For complex objects (sets, lists etc.) whether to validate
|
||||
individual elements rather than the whole object. Defaults to False.
|
||||
always (bool, optional): Whether this method and other validators should be called even if
|
||||
the value is missing. Defaults to False.
|
||||
check_fields (bool | None, optional): Whether to check that the fields actually exist on the model.
|
||||
Defaults to None.
|
||||
allow_reuse (bool, optional): Whether to track and raise an error if another validator refers to
|
||||
the decorated function. Defaults to False.
|
||||
|
||||
Returns:
|
||||
Callable: A decorator that can be used to decorate a
|
||||
function to be used as a validator.
|
||||
"""
|
||||
if allow_reuse is True: # pragma: no cover
|
||||
warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning)
|
||||
fields = tuple((__field, *fields))
|
||||
if isinstance(fields[0], FunctionType):
|
||||
raise PydanticUserError(
|
||||
'`@validator` should be used with fields and keyword arguments, not bare. '
|
||||
"E.g. usage should be `@validator('<field_name>', ...)`",
|
||||
code='validator-no-fields',
|
||||
)
|
||||
elif not all(isinstance(field, str) for field in fields):
|
||||
raise PydanticUserError(
|
||||
'`@validator` fields should be passed as separate string args. '
|
||||
"E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`",
|
||||
code='validator-invalid-fields',
|
||||
)
|
||||
|
||||
warn(
|
||||
'Pydantic V1 style `@validator` validators are deprecated.'
|
||||
' You should migrate to Pydantic V2 style `@field_validator` validators,'
|
||||
' see the migration guide for more details',
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
mode: Literal['before', 'after'] = 'before' if pre is True else 'after'
|
||||
|
||||
def dec(f: Any) -> _decorators.PydanticDescriptorProxy[Any]:
|
||||
if _decorators.is_instance_method_from_sig(f):
|
||||
raise PydanticUserError(
|
||||
'`@validator` cannot be applied to instance methods', code='validator-instance-method'
|
||||
)
|
||||
# auto apply the @classmethod decorator
|
||||
f = _decorators.ensure_classmethod_based_on_signature(f)
|
||||
wrap = _decorators_v1.make_generic_v1_field_validator
|
||||
validator_wrapper_info = _decorators.ValidatorDecoratorInfo(
|
||||
fields=fields,
|
||||
mode=mode,
|
||||
each_item=each_item,
|
||||
always=always,
|
||||
check_fields=check_fields,
|
||||
)
|
||||
return _decorators.PydanticDescriptorProxy(f, validator_wrapper_info, shim=wrap)
|
||||
|
||||
return dec # type: ignore[return-value]
|
||||
|
||||
|
||||
@overload
|
||||
def root_validator(
|
||||
*,
|
||||
# if you don't specify `pre` the default is `pre=False`
|
||||
# which means you need to specify `skip_on_failure=True`
|
||||
skip_on_failure: Literal[True],
|
||||
allow_reuse: bool = ...,
|
||||
) -> Callable[
|
||||
[_V1RootValidatorFunctionType],
|
||||
_V1RootValidatorFunctionType,
|
||||
]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def root_validator(
|
||||
*,
|
||||
# if you specify `pre=True` then you don't need to specify
|
||||
# `skip_on_failure`, in fact it is not allowed as an argument!
|
||||
pre: Literal[True],
|
||||
allow_reuse: bool = ...,
|
||||
) -> Callable[
|
||||
[_V1RootValidatorFunctionType],
|
||||
_V1RootValidatorFunctionType,
|
||||
]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def root_validator(
|
||||
*,
|
||||
# if you explicitly specify `pre=False` then you
|
||||
# MUST specify `skip_on_failure=True`
|
||||
pre: Literal[False],
|
||||
skip_on_failure: Literal[True],
|
||||
allow_reuse: bool = ...,
|
||||
) -> Callable[
|
||||
[_V1RootValidatorFunctionType],
|
||||
_V1RootValidatorFunctionType,
|
||||
]:
|
||||
...
|
||||
|
||||
|
||||
def root_validator(
|
||||
*__args,
|
||||
pre: bool = False,
|
||||
skip_on_failure: bool = False,
|
||||
allow_reuse: bool = False,
|
||||
) -> Any:
|
||||
"""Decorate methods on a model indicating that they should be used to validate (and perhaps
|
||||
modify) data either before or after standard model parsing/validation is performed.
|
||||
|
||||
Args:
|
||||
pre (bool, optional): Whether this validator should be called before the standard
|
||||
validators (else after). Defaults to False.
|
||||
skip_on_failure (bool, optional): Whether to stop validation and return as soon as a
|
||||
failure is encountered. Defaults to False.
|
||||
allow_reuse (bool, optional): Whether to track and raise an error if another validator
|
||||
refers to the decorated function. Defaults to False.
|
||||
|
||||
Returns:
|
||||
Any: A decorator that can be used to decorate a function to be used as a root_validator.
|
||||
"""
|
||||
warn(
|
||||
'Pydantic V1 style `@root_validator` validators are deprecated.'
|
||||
' You should migrate to Pydantic V2 style `@model_validator` validators,'
|
||||
' see the migration guide for more details',
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
if __args:
|
||||
# Ensure a nice error is raised if someone attempts to use the bare decorator
|
||||
return root_validator()(*__args) # type: ignore
|
||||
|
||||
if allow_reuse is True: # pragma: no cover
|
||||
warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning)
|
||||
mode: Literal['before', 'after'] = 'before' if pre is True else 'after'
|
||||
if pre is False and skip_on_failure is not True:
|
||||
raise PydanticUserError(
|
||||
'If you use `@root_validator` with pre=False (the default) you MUST specify `skip_on_failure=True`.'
|
||||
' Note that `@root_validator` is deprecated and should be replaced with `@model_validator`.',
|
||||
code='root-validator-pre-skip',
|
||||
)
|
||||
|
||||
wrap = partial(_decorators_v1.make_v1_generic_root_validator, pre=pre)
|
||||
|
||||
def dec(f: Callable[..., Any] | classmethod[Any, Any, Any] | staticmethod[Any, Any]) -> Any:
|
||||
if _decorators.is_instance_method_from_sig(f):
|
||||
raise TypeError('`@root_validator` cannot be applied to instance methods')
|
||||
# auto apply the @classmethod decorator
|
||||
res = _decorators.ensure_classmethod_based_on_signature(f)
|
||||
dec_info = _decorators.RootValidatorDecoratorInfo(mode=mode)
|
||||
return _decorators.PydanticDescriptorProxy(res, dec_info, shim=wrap)
|
||||
|
||||
return dec
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user