This commit is contained in:
Iliyan Angelov
2025-12-01 06:50:10 +02:00
parent 91f51bc6fe
commit 62c1fe5951
4682 changed files with 544807 additions and 31208 deletions

View File

@@ -0,0 +1,50 @@
from .main import BaseConfig as BaseConfig
from .main import PydanticSchemaGenerationError as PydanticSchemaGenerationError
from .main import RequiredParam as RequiredParam
from .main import Undefined as Undefined
from .main import UndefinedType as UndefinedType
from .main import Url as Url
from .main import Validator as Validator
from .main import _get_model_config as _get_model_config
from .main import _is_error_wrapper as _is_error_wrapper
from .main import _is_model_class as _is_model_class
from .main import _is_model_field as _is_model_field
from .main import _is_undefined as _is_undefined
from .main import _model_dump as _model_dump
from .main import _model_rebuild as _model_rebuild
from .main import copy_field_info as copy_field_info
from .main import create_body_model as create_body_model
from .main import evaluate_forwardref as evaluate_forwardref
from .main import get_annotation_from_field_info as get_annotation_from_field_info
from .main import get_cached_model_fields as get_cached_model_fields
from .main import get_compat_model_name_map as get_compat_model_name_map
from .main import get_definitions as get_definitions
from .main import get_missing_field_error as get_missing_field_error
from .main import get_schema_from_model_field as get_schema_from_model_field
from .main import is_bytes_field as is_bytes_field
from .main import is_bytes_sequence_field as is_bytes_sequence_field
from .main import is_scalar_field as is_scalar_field
from .main import is_scalar_sequence_field as is_scalar_sequence_field
from .main import is_sequence_field as is_sequence_field
from .main import serialize_sequence_value as serialize_sequence_value
from .main import (
with_info_plain_validator_function as with_info_plain_validator_function,
)
from .may_v1 import CoreSchema as CoreSchema
from .may_v1 import GetJsonSchemaHandler as GetJsonSchemaHandler
from .may_v1 import JsonSchemaValue as JsonSchemaValue
from .may_v1 import _normalize_errors as _normalize_errors
from .model_field import ModelField as ModelField
from .shared import PYDANTIC_V2 as PYDANTIC_V2
from .shared import PYDANTIC_VERSION_MINOR_TUPLE as PYDANTIC_VERSION_MINOR_TUPLE
from .shared import annotation_is_pydantic_v1 as annotation_is_pydantic_v1
from .shared import field_annotation_is_scalar as field_annotation_is_scalar
from .shared import (
is_uploadfile_or_nonable_uploadfile_annotation as is_uploadfile_or_nonable_uploadfile_annotation,
)
from .shared import (
is_uploadfile_sequence_annotation as is_uploadfile_sequence_annotation,
)
from .shared import lenient_issubclass as lenient_issubclass
from .shared import sequence_types as sequence_types
from .shared import value_is_sequence as value_is_sequence

View File

@@ -0,0 +1,362 @@
import sys
from functools import lru_cache
from typing import (
Any,
Dict,
List,
Sequence,
Tuple,
Type,
)
from fastapi._compat import may_v1
from fastapi._compat.shared import PYDANTIC_V2, lenient_issubclass
from fastapi.types import ModelNameMap
from pydantic import BaseModel
from typing_extensions import Literal
from .model_field import ModelField
if PYDANTIC_V2:
from .v2 import BaseConfig as BaseConfig
from .v2 import FieldInfo as FieldInfo
from .v2 import PydanticSchemaGenerationError as PydanticSchemaGenerationError
from .v2 import RequiredParam as RequiredParam
from .v2 import Undefined as Undefined
from .v2 import UndefinedType as UndefinedType
from .v2 import Url as Url
from .v2 import Validator as Validator
from .v2 import evaluate_forwardref as evaluate_forwardref
from .v2 import get_missing_field_error as get_missing_field_error
from .v2 import (
with_info_plain_validator_function as with_info_plain_validator_function,
)
else:
from .v1 import BaseConfig as BaseConfig # type: ignore[assignment]
from .v1 import FieldInfo as FieldInfo
from .v1 import ( # type: ignore[assignment]
PydanticSchemaGenerationError as PydanticSchemaGenerationError,
)
from .v1 import RequiredParam as RequiredParam
from .v1 import Undefined as Undefined
from .v1 import UndefinedType as UndefinedType
from .v1 import Url as Url # type: ignore[assignment]
from .v1 import Validator as Validator
from .v1 import evaluate_forwardref as evaluate_forwardref
from .v1 import get_missing_field_error as get_missing_field_error
from .v1 import ( # type: ignore[assignment]
with_info_plain_validator_function as with_info_plain_validator_function,
)
@lru_cache
def get_cached_model_fields(model: Type[BaseModel]) -> List[ModelField]:
if lenient_issubclass(model, may_v1.BaseModel):
from fastapi._compat import v1
return v1.get_model_fields(model)
else:
from . import v2
return v2.get_model_fields(model) # type: ignore[return-value]
def _is_undefined(value: object) -> bool:
if isinstance(value, may_v1.UndefinedType):
return True
elif PYDANTIC_V2:
from . import v2
return isinstance(value, v2.UndefinedType)
return False
def _get_model_config(model: BaseModel) -> Any:
if isinstance(model, may_v1.BaseModel):
from fastapi._compat import v1
return v1._get_model_config(model)
elif PYDANTIC_V2:
from . import v2
return v2._get_model_config(model)
def _model_dump(
model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
) -> Any:
if isinstance(model, may_v1.BaseModel):
from fastapi._compat import v1
return v1._model_dump(model, mode=mode, **kwargs)
elif PYDANTIC_V2:
from . import v2
return v2._model_dump(model, mode=mode, **kwargs)
def _is_error_wrapper(exc: Exception) -> bool:
if isinstance(exc, may_v1.ErrorWrapper):
return True
elif PYDANTIC_V2:
from . import v2
return isinstance(exc, v2.ErrorWrapper)
return False
def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
if isinstance(field_info, may_v1.FieldInfo):
from fastapi._compat import v1
return v1.copy_field_info(field_info=field_info, annotation=annotation)
else:
assert PYDANTIC_V2
from . import v2
return v2.copy_field_info(field_info=field_info, annotation=annotation)
def create_body_model(
*, fields: Sequence[ModelField], model_name: str
) -> Type[BaseModel]:
if fields and isinstance(fields[0], may_v1.ModelField):
from fastapi._compat import v1
return v1.create_body_model(fields=fields, model_name=model_name)
else:
assert PYDANTIC_V2
from . import v2
return v2.create_body_model(fields=fields, model_name=model_name) # type: ignore[arg-type]
def get_annotation_from_field_info(
annotation: Any, field_info: FieldInfo, field_name: str
) -> Any:
if isinstance(field_info, may_v1.FieldInfo):
from fastapi._compat import v1
return v1.get_annotation_from_field_info(
annotation=annotation, field_info=field_info, field_name=field_name
)
else:
assert PYDANTIC_V2
from . import v2
return v2.get_annotation_from_field_info(
annotation=annotation, field_info=field_info, field_name=field_name
)
def is_bytes_field(field: ModelField) -> bool:
if isinstance(field, may_v1.ModelField):
from fastapi._compat import v1
return v1.is_bytes_field(field)
else:
assert PYDANTIC_V2
from . import v2
return v2.is_bytes_field(field) # type: ignore[arg-type]
def is_bytes_sequence_field(field: ModelField) -> bool:
if isinstance(field, may_v1.ModelField):
from fastapi._compat import v1
return v1.is_bytes_sequence_field(field)
else:
assert PYDANTIC_V2
from . import v2
return v2.is_bytes_sequence_field(field) # type: ignore[arg-type]
def is_scalar_field(field: ModelField) -> bool:
if isinstance(field, may_v1.ModelField):
from fastapi._compat import v1
return v1.is_scalar_field(field)
else:
assert PYDANTIC_V2
from . import v2
return v2.is_scalar_field(field) # type: ignore[arg-type]
def is_scalar_sequence_field(field: ModelField) -> bool:
if isinstance(field, may_v1.ModelField):
from fastapi._compat import v1
return v1.is_scalar_sequence_field(field)
else:
assert PYDANTIC_V2
from . import v2
return v2.is_scalar_sequence_field(field) # type: ignore[arg-type]
def is_sequence_field(field: ModelField) -> bool:
if isinstance(field, may_v1.ModelField):
from fastapi._compat import v1
return v1.is_sequence_field(field)
else:
assert PYDANTIC_V2
from . import v2
return v2.is_sequence_field(field) # type: ignore[arg-type]
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
if isinstance(field, may_v1.ModelField):
from fastapi._compat import v1
return v1.serialize_sequence_value(field=field, value=value)
else:
assert PYDANTIC_V2
from . import v2
return v2.serialize_sequence_value(field=field, value=value) # type: ignore[arg-type]
def _model_rebuild(model: Type[BaseModel]) -> None:
if lenient_issubclass(model, may_v1.BaseModel):
from fastapi._compat import v1
v1._model_rebuild(model)
elif PYDANTIC_V2:
from . import v2
v2._model_rebuild(model)
def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
v1_model_fields = [
field for field in fields if isinstance(field, may_v1.ModelField)
]
if v1_model_fields:
from fastapi._compat import v1
v1_flat_models = v1.get_flat_models_from_fields(
v1_model_fields, known_models=set()
)
all_flat_models = v1_flat_models
else:
all_flat_models = set()
if PYDANTIC_V2:
from . import v2
v2_model_fields = [
field for field in fields if isinstance(field, v2.ModelField)
]
v2_flat_models = v2.get_flat_models_from_fields(
v2_model_fields, known_models=set()
)
all_flat_models = all_flat_models.union(v2_flat_models)
model_name_map = v2.get_model_name_map(all_flat_models)
return model_name_map
from fastapi._compat import v1
model_name_map = v1.get_model_name_map(all_flat_models)
return model_name_map
def get_definitions(
*,
fields: List[ModelField],
model_name_map: ModelNameMap,
separate_input_output_schemas: bool = True,
) -> Tuple[
Dict[
Tuple[ModelField, Literal["validation", "serialization"]],
may_v1.JsonSchemaValue,
],
Dict[str, Dict[str, Any]],
]:
if sys.version_info < (3, 14):
v1_fields = [field for field in fields if isinstance(field, may_v1.ModelField)]
v1_field_maps, v1_definitions = may_v1.get_definitions(
fields=v1_fields,
model_name_map=model_name_map,
separate_input_output_schemas=separate_input_output_schemas,
)
if not PYDANTIC_V2:
return v1_field_maps, v1_definitions
else:
from . import v2
v2_fields = [field for field in fields if isinstance(field, v2.ModelField)]
v2_field_maps, v2_definitions = v2.get_definitions(
fields=v2_fields,
model_name_map=model_name_map,
separate_input_output_schemas=separate_input_output_schemas,
)
all_definitions = {**v1_definitions, **v2_definitions}
all_field_maps = {**v1_field_maps, **v2_field_maps}
return all_field_maps, all_definitions
# Pydantic v1 is not supported since Python 3.14
else:
from . import v2
v2_fields = [field for field in fields if isinstance(field, v2.ModelField)]
v2_field_maps, v2_definitions = v2.get_definitions(
fields=v2_fields,
model_name_map=model_name_map,
separate_input_output_schemas=separate_input_output_schemas,
)
return v2_field_maps, v2_definitions
def get_schema_from_model_field(
*,
field: ModelField,
model_name_map: ModelNameMap,
field_mapping: Dict[
Tuple[ModelField, Literal["validation", "serialization"]],
may_v1.JsonSchemaValue,
],
separate_input_output_schemas: bool = True,
) -> Dict[str, Any]:
if isinstance(field, may_v1.ModelField):
from fastapi._compat import v1
return v1.get_schema_from_model_field(
field=field,
model_name_map=model_name_map,
field_mapping=field_mapping,
separate_input_output_schemas=separate_input_output_schemas,
)
else:
assert PYDANTIC_V2
from . import v2
return v2.get_schema_from_model_field(
field=field, # type: ignore[arg-type]
model_name_map=model_name_map,
field_mapping=field_mapping, # type: ignore[arg-type]
separate_input_output_schemas=separate_input_output_schemas,
)
def _is_model_field(value: Any) -> bool:
if isinstance(value, may_v1.ModelField):
return True
elif PYDANTIC_V2:
from . import v2
return isinstance(value, v2.ModelField)
return False
def _is_model_class(value: Any) -> bool:
if lenient_issubclass(value, may_v1.BaseModel):
return True
elif PYDANTIC_V2:
from . import v2
return lenient_issubclass(value, v2.BaseModel) # type: ignore[attr-defined]
return False

View File

@@ -0,0 +1,123 @@
import sys
from typing import Any, Dict, List, Literal, Sequence, Tuple, Type, Union
from fastapi.types import ModelNameMap
if sys.version_info >= (3, 14):
class AnyUrl:
pass
class BaseConfig:
pass
class BaseModel:
pass
class Color:
pass
class CoreSchema:
pass
class ErrorWrapper:
pass
class FieldInfo:
pass
class GetJsonSchemaHandler:
pass
class JsonSchemaValue:
pass
class ModelField:
pass
class NameEmail:
pass
class RequiredParam:
pass
class SecretBytes:
pass
class SecretStr:
pass
class Undefined:
pass
class UndefinedType:
pass
class Url:
pass
from .v2 import ValidationError, create_model
def get_definitions(
*,
fields: List[ModelField],
model_name_map: ModelNameMap,
separate_input_output_schemas: bool = True,
) -> Tuple[
Dict[
Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
],
Dict[str, Dict[str, Any]],
]:
return {}, {} # pragma: no cover
else:
from .v1 import AnyUrl as AnyUrl
from .v1 import BaseConfig as BaseConfig
from .v1 import BaseModel as BaseModel
from .v1 import Color as Color
from .v1 import CoreSchema as CoreSchema
from .v1 import ErrorWrapper as ErrorWrapper
from .v1 import FieldInfo as FieldInfo
from .v1 import GetJsonSchemaHandler as GetJsonSchemaHandler
from .v1 import JsonSchemaValue as JsonSchemaValue
from .v1 import ModelField as ModelField
from .v1 import NameEmail as NameEmail
from .v1 import RequiredParam as RequiredParam
from .v1 import SecretBytes as SecretBytes
from .v1 import SecretStr as SecretStr
from .v1 import Undefined as Undefined
from .v1 import UndefinedType as UndefinedType
from .v1 import Url as Url
from .v1 import ValidationError, create_model
from .v1 import get_definitions as get_definitions
RequestErrorModel: Type[BaseModel] = create_model("Request")
def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]:
use_errors: List[Any] = []
for error in errors:
if isinstance(error, ErrorWrapper):
new_errors = ValidationError( # type: ignore[call-arg]
errors=[error], model=RequestErrorModel
).errors()
use_errors.extend(new_errors)
elif isinstance(error, list):
use_errors.extend(_normalize_errors(error))
else:
use_errors.append(error)
return use_errors
def _regenerate_error_with_loc(
*, errors: Sequence[Any], loc_prefix: Tuple[Union[str, int], ...]
) -> List[Dict[str, Any]]:
updated_loc_errors: List[Any] = [
{**err, "loc": loc_prefix + err.get("loc", ())}
for err in _normalize_errors(errors)
]
return updated_loc_errors

View File

@@ -0,0 +1,53 @@
from typing import (
Any,
Dict,
List,
Tuple,
Union,
)
from fastapi.types import IncEx
from pydantic.fields import FieldInfo
from typing_extensions import Literal, Protocol
class ModelField(Protocol):
field_info: "FieldInfo"
name: str
mode: Literal["validation", "serialization"] = "validation"
_version: Literal["v1", "v2"] = "v1"
@property
def alias(self) -> str: ...
@property
def required(self) -> bool: ...
@property
def default(self) -> Any: ...
@property
def type_(self) -> Any: ...
def get_default(self) -> Any: ...
def validate(
self,
value: Any,
values: Dict[str, Any] = {}, # noqa: B006
*,
loc: Tuple[Union[int, str], ...] = (),
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]: ...
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any: ...

View File

@@ -0,0 +1,211 @@
import sys
import types
import typing
from collections import deque
from dataclasses import is_dataclass
from typing import (
Any,
Deque,
FrozenSet,
List,
Mapping,
Sequence,
Set,
Tuple,
Type,
Union,
)
from fastapi._compat import may_v1
from fastapi.types import UnionType
from pydantic import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from starlette.datastructures import UploadFile
from typing_extensions import Annotated, get_args, get_origin
# Copy from Pydantic v2, compatible with v1
if sys.version_info < (3, 9):
# Pydantic no longer supports Python 3.8, this might be incorrect, but the code
# this is used for is also never reached in this codebase, as it's a copy of
# Pydantic's lenient_issubclass, just for compatibility with v1
# TODO: remove when dropping support for Python 3.8
WithArgsTypes: Tuple[Any, ...] = ()
elif sys.version_info < (3, 10):
WithArgsTypes: tuple[Any, ...] = (typing._GenericAlias, types.GenericAlias) # type: ignore[attr-defined]
else:
WithArgsTypes: tuple[Any, ...] = (
typing._GenericAlias, # type: ignore[attr-defined]
types.GenericAlias,
types.UnionType,
) # pyright: ignore[reportAttributeAccessIssue]
PYDANTIC_VERSION_MINOR_TUPLE = tuple(int(x) for x in PYDANTIC_VERSION.split(".")[:2])
PYDANTIC_V2 = PYDANTIC_VERSION_MINOR_TUPLE[0] == 2
sequence_annotation_to_type = {
Sequence: list,
List: list,
list: list,
Tuple: tuple,
tuple: tuple,
Set: set,
set: set,
FrozenSet: frozenset,
frozenset: frozenset,
Deque: deque,
deque: deque,
}
sequence_types = tuple(sequence_annotation_to_type.keys())
Url: Type[Any]
# Copy of Pydantic v2, compatible with v1
def lenient_issubclass(
cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]
) -> bool:
try:
return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type]
except TypeError: # pragma: no cover
if isinstance(cls, WithArgsTypes):
return False
raise # pragma: no cover
def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
if lenient_issubclass(annotation, (str, bytes)):
return False
return lenient_issubclass(annotation, sequence_types) # type: ignore[arg-type]
def field_annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
for arg in get_args(annotation):
if field_annotation_is_sequence(arg):
return True
return False
return _annotation_is_sequence(annotation) or _annotation_is_sequence(
get_origin(annotation)
)
def value_is_sequence(value: Any) -> bool:
return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) # type: ignore[arg-type]
def _annotation_is_complex(annotation: Union[Type[Any], None]) -> bool:
return (
lenient_issubclass(
annotation, (BaseModel, may_v1.BaseModel, Mapping, UploadFile)
)
or _annotation_is_sequence(annotation)
or is_dataclass(annotation)
)
def field_annotation_is_complex(annotation: Union[Type[Any], None]) -> bool:
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
return any(field_annotation_is_complex(arg) for arg in get_args(annotation))
if origin is Annotated:
return field_annotation_is_complex(get_args(annotation)[0])
return (
_annotation_is_complex(annotation)
or _annotation_is_complex(origin)
or hasattr(origin, "__pydantic_core_schema__")
or hasattr(origin, "__get_pydantic_core_schema__")
)
def field_annotation_is_scalar(annotation: Any) -> bool:
# handle Ellipsis here to make tuple[int, ...] work nicely
return annotation is Ellipsis or not field_annotation_is_complex(annotation)
def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool:
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
at_least_one_scalar_sequence = False
for arg in get_args(annotation):
if field_annotation_is_scalar_sequence(arg):
at_least_one_scalar_sequence = True
continue
elif not field_annotation_is_scalar(arg):
return False
return at_least_one_scalar_sequence
return field_annotation_is_sequence(annotation) and all(
field_annotation_is_scalar(sub_annotation)
for sub_annotation in get_args(annotation)
)
def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool:
if lenient_issubclass(annotation, bytes):
return True
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
for arg in get_args(annotation):
if lenient_issubclass(arg, bytes):
return True
return False
def is_uploadfile_or_nonable_uploadfile_annotation(annotation: Any) -> bool:
if lenient_issubclass(annotation, UploadFile):
return True
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
for arg in get_args(annotation):
if lenient_issubclass(arg, UploadFile):
return True
return False
def is_bytes_sequence_annotation(annotation: Any) -> bool:
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
at_least_one = False
for arg in get_args(annotation):
if is_bytes_sequence_annotation(arg):
at_least_one = True
continue
return at_least_one
return field_annotation_is_sequence(annotation) and all(
is_bytes_or_nonable_bytes_annotation(sub_annotation)
for sub_annotation in get_args(annotation)
)
def is_uploadfile_sequence_annotation(annotation: Any) -> bool:
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
at_least_one = False
for arg in get_args(annotation):
if is_uploadfile_sequence_annotation(arg):
at_least_one = True
continue
return at_least_one
return field_annotation_is_sequence(annotation) and all(
is_uploadfile_or_nonable_uploadfile_annotation(sub_annotation)
for sub_annotation in get_args(annotation)
)
def annotation_is_pydantic_v1(annotation: Any) -> bool:
if lenient_issubclass(annotation, may_v1.BaseModel):
return True
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
for arg in get_args(annotation):
if lenient_issubclass(arg, may_v1.BaseModel):
return True
if field_annotation_is_sequence(annotation):
for sub_annotation in get_args(annotation):
if annotation_is_pydantic_v1(sub_annotation):
return True
return False

View File

@@ -0,0 +1,312 @@
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import (
Any,
Callable,
Dict,
List,
Sequence,
Set,
Tuple,
Type,
Union,
)
from fastapi._compat import shared
from fastapi.openapi.constants import REF_PREFIX as REF_PREFIX
from fastapi.types import ModelNameMap
from pydantic.version import VERSION as PYDANTIC_VERSION
from typing_extensions import Literal
PYDANTIC_VERSION_MINOR_TUPLE = tuple(int(x) for x in PYDANTIC_VERSION.split(".")[:2])
PYDANTIC_V2 = PYDANTIC_VERSION_MINOR_TUPLE[0] == 2
# Keeping old "Required" functionality from Pydantic V1, without
# shadowing typing.Required.
RequiredParam: Any = Ellipsis
if not PYDANTIC_V2:
from pydantic import BaseConfig as BaseConfig
from pydantic import BaseModel as BaseModel
from pydantic import ValidationError as ValidationError
from pydantic import create_model as create_model
from pydantic.class_validators import Validator as Validator
from pydantic.color import Color as Color
from pydantic.error_wrappers import ErrorWrapper as ErrorWrapper
from pydantic.errors import MissingError
from pydantic.fields import ( # type: ignore[attr-defined]
SHAPE_FROZENSET,
SHAPE_LIST,
SHAPE_SEQUENCE,
SHAPE_SET,
SHAPE_SINGLETON,
SHAPE_TUPLE,
SHAPE_TUPLE_ELLIPSIS,
)
from pydantic.fields import FieldInfo as FieldInfo
from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined]
from pydantic.fields import Undefined as Undefined # type: ignore[attr-defined]
from pydantic.fields import ( # type: ignore[attr-defined]
UndefinedType as UndefinedType,
)
from pydantic.networks import AnyUrl as AnyUrl
from pydantic.networks import NameEmail as NameEmail
from pydantic.schema import TypeModelSet as TypeModelSet
from pydantic.schema import (
field_schema,
model_process_schema,
)
from pydantic.schema import (
get_annotation_from_field_info as get_annotation_from_field_info,
)
from pydantic.schema import get_flat_models_from_field as get_flat_models_from_field
from pydantic.schema import (
get_flat_models_from_fields as get_flat_models_from_fields,
)
from pydantic.schema import get_model_name_map as get_model_name_map
from pydantic.types import SecretBytes as SecretBytes
from pydantic.types import SecretStr as SecretStr
from pydantic.typing import evaluate_forwardref as evaluate_forwardref
from pydantic.utils import lenient_issubclass as lenient_issubclass
else:
from pydantic.v1 import BaseConfig as BaseConfig # type: ignore[assignment]
from pydantic.v1 import BaseModel as BaseModel # type: ignore[assignment]
from pydantic.v1 import ( # type: ignore[assignment]
ValidationError as ValidationError,
)
from pydantic.v1 import create_model as create_model # type: ignore[no-redef]
from pydantic.v1.class_validators import Validator as Validator
from pydantic.v1.color import Color as Color # type: ignore[assignment]
from pydantic.v1.error_wrappers import ErrorWrapper as ErrorWrapper
from pydantic.v1.errors import MissingError
from pydantic.v1.fields import (
SHAPE_FROZENSET,
SHAPE_LIST,
SHAPE_SEQUENCE,
SHAPE_SET,
SHAPE_SINGLETON,
SHAPE_TUPLE,
SHAPE_TUPLE_ELLIPSIS,
)
from pydantic.v1.fields import FieldInfo as FieldInfo # type: ignore[assignment]
from pydantic.v1.fields import ModelField as ModelField
from pydantic.v1.fields import Undefined as Undefined
from pydantic.v1.fields import UndefinedType as UndefinedType
from pydantic.v1.networks import AnyUrl as AnyUrl
from pydantic.v1.networks import ( # type: ignore[assignment]
NameEmail as NameEmail,
)
from pydantic.v1.schema import TypeModelSet as TypeModelSet
from pydantic.v1.schema import (
field_schema,
model_process_schema,
)
from pydantic.v1.schema import (
get_annotation_from_field_info as get_annotation_from_field_info,
)
from pydantic.v1.schema import (
get_flat_models_from_field as get_flat_models_from_field,
)
from pydantic.v1.schema import (
get_flat_models_from_fields as get_flat_models_from_fields,
)
from pydantic.v1.schema import get_model_name_map as get_model_name_map
from pydantic.v1.types import ( # type: ignore[assignment]
SecretBytes as SecretBytes,
)
from pydantic.v1.types import ( # type: ignore[assignment]
SecretStr as SecretStr,
)
from pydantic.v1.typing import evaluate_forwardref as evaluate_forwardref
from pydantic.v1.utils import lenient_issubclass as lenient_issubclass
GetJsonSchemaHandler = Any
JsonSchemaValue = Dict[str, Any]
CoreSchema = Any
Url = AnyUrl
sequence_shapes = {
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
}
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
@dataclass
class GenerateJsonSchema:
ref_template: str
class PydanticSchemaGenerationError(Exception):
pass
RequestErrorModel: Type[BaseModel] = create_model("Request")
def with_info_plain_validator_function(
function: Callable[..., Any],
*,
ref: Union[str, None] = None,
metadata: Any = None,
serialization: Any = None,
) -> Any:
return {}
def get_model_definitions(
*,
flat_models: Set[Union[Type[BaseModel], Type[Enum]]],
model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str],
) -> Dict[str, Any]:
definitions: Dict[str, Dict[str, Any]] = {}
for model in flat_models:
m_schema, m_definitions, m_nested_models = model_process_schema(
model, model_name_map=model_name_map, ref_prefix=REF_PREFIX
)
definitions.update(m_definitions)
model_name = model_name_map[model]
definitions[model_name] = m_schema
for m_schema in definitions.values():
if "description" in m_schema:
m_schema["description"] = m_schema["description"].split("\f")[0]
return definitions
def is_pv1_scalar_field(field: ModelField) -> bool:
from fastapi import params
field_info = field.field_info
if not (
field.shape == SHAPE_SINGLETON
and not lenient_issubclass(field.type_, BaseModel)
and not lenient_issubclass(field.type_, dict)
and not shared.field_annotation_is_sequence(field.type_)
and not is_dataclass(field.type_)
and not isinstance(field_info, params.Body)
):
return False
if field.sub_fields:
if not all(is_pv1_scalar_field(f) for f in field.sub_fields):
return False
return True
def is_pv1_scalar_sequence_field(field: ModelField) -> bool:
if (field.shape in sequence_shapes) and not lenient_issubclass(
field.type_, BaseModel
):
if field.sub_fields is not None:
for sub_field in field.sub_fields:
if not is_pv1_scalar_field(sub_field):
return False
return True
if shared._annotation_is_sequence(field.type_):
return True
return False
def _model_rebuild(model: Type[BaseModel]) -> None:
model.update_forward_refs()
def _model_dump(
model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
) -> Any:
return model.dict(**kwargs)
def _get_model_config(model: BaseModel) -> Any:
return model.__config__ # type: ignore[attr-defined]
def get_schema_from_model_field(
*,
field: ModelField,
model_name_map: ModelNameMap,
field_mapping: Dict[
Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
],
separate_input_output_schemas: bool = True,
) -> Dict[str, Any]:
return field_schema( # type: ignore[no-any-return]
field, model_name_map=model_name_map, ref_prefix=REF_PREFIX
)[0]
# def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
# models = get_flat_models_from_fields(fields, known_models=set())
# return get_model_name_map(models) # type: ignore[no-any-return]
def get_definitions(
*,
fields: List[ModelField],
model_name_map: ModelNameMap,
separate_input_output_schemas: bool = True,
) -> Tuple[
Dict[Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue],
Dict[str, Dict[str, Any]],
]:
models = get_flat_models_from_fields(fields, known_models=set())
return {}, get_model_definitions(flat_models=models, model_name_map=model_name_map)
def is_scalar_field(field: ModelField) -> bool:
return is_pv1_scalar_field(field)
def is_sequence_field(field: ModelField) -> bool:
return field.shape in sequence_shapes or shared._annotation_is_sequence(field.type_)
def is_scalar_sequence_field(field: ModelField) -> bool:
return is_pv1_scalar_sequence_field(field)
def is_bytes_field(field: ModelField) -> bool:
return lenient_issubclass(field.type_, bytes) # type: ignore[no-any-return]
def is_bytes_sequence_field(field: ModelField) -> bool:
return field.shape in sequence_shapes and lenient_issubclass(field.type_, bytes)
def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
return copy(field_info)
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
return sequence_shape_to_type[field.shape](value) # type: ignore[no-any-return]
def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
missing_field_error = ErrorWrapper(MissingError(), loc=loc)
new_error = ValidationError([missing_field_error], RequestErrorModel)
return new_error.errors()[0] # type: ignore[return-value]
def create_body_model(
*, fields: Sequence[ModelField], model_name: str
) -> Type[BaseModel]:
BodyModel = create_model(model_name)
for f in fields:
BodyModel.__fields__[f.name] = f # type: ignore[index]
return BodyModel
def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
return list(model.__fields__.values()) # type: ignore[attr-defined]

View File

@@ -0,0 +1,479 @@
import re
import warnings
from copy import copy, deepcopy
from dataclasses import dataclass
from enum import Enum
from typing import (
Any,
Dict,
List,
Sequence,
Set,
Tuple,
Type,
Union,
cast,
)
from fastapi._compat import may_v1, shared
from fastapi.openapi.constants import REF_TEMPLATE
from fastapi.types import IncEx, ModelNameMap
from pydantic import BaseModel, TypeAdapter, create_model
from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError
from pydantic import PydanticUndefinedAnnotation as PydanticUndefinedAnnotation
from pydantic import ValidationError as ValidationError
from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined]
GetJsonSchemaHandler as GetJsonSchemaHandler,
)
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic._internal._utils import lenient_issubclass as lenient_issubclass
from pydantic.fields import FieldInfo as FieldInfo
from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema
from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue
from pydantic_core import CoreSchema as CoreSchema
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from pydantic_core import Url as Url
from typing_extensions import Annotated, Literal, get_args, get_origin
try:
from pydantic_core.core_schema import (
with_info_plain_validator_function as with_info_plain_validator_function,
)
except ImportError: # pragma: no cover
from pydantic_core.core_schema import (
general_plain_validator_function as with_info_plain_validator_function, # noqa: F401
)
RequiredParam = PydanticUndefined
Undefined = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
Validator = Any
class BaseConfig:
pass
class ErrorWrapper(Exception):
pass
@dataclass
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
@property
def alias(self) -> str:
a = self.field_info.alias
return a if a is not None else self.name
@property
def required(self) -> bool:
return self.field_info.is_required()
@property
def default(self) -> Any:
return self.get_default()
@property
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
with warnings.catch_warnings():
# Pydantic >= 2.12.0 warns about field specific metadata that is unused
# (e.g. `TypeAdapter(Annotated[int, Field(alias='b')])`). In some cases, we
# end up building the type adapter from a model field annotation so we
# need to ignore the warning:
if shared.PYDANTIC_VERSION_MINOR_TUPLE >= (2, 12):
from pydantic.warnings import UnsupportedFieldAttributeWarning
warnings.simplefilter(
"ignore", category=UnsupportedFieldAttributeWarning
)
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info]
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def validate(
self,
value: Any,
values: Dict[str, Any] = {}, # noqa: B006
*,
loc: Tuple[Union[int, str], ...] = (),
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (
self._type_adapter.validate_python(value, from_attributes=True),
None,
)
except ValidationError as exc:
return None, may_v1._regenerate_error_with_loc(
errors=exc.errors(include_url=False), loc_prefix=loc
)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
# What calls this code passes a value that already called
# self._type_adapter.validate_python(value)
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes, to allow making a dict from
# ModelField to its JSON Schema.
return id(self)
def get_annotation_from_field_info(
annotation: Any, field_info: FieldInfo, field_name: str
) -> Any:
return annotation
def _model_rebuild(model: Type[BaseModel]) -> None:
model.model_rebuild()
def _model_dump(
model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
) -> Any:
return model.model_dump(mode=mode, **kwargs)
def _get_model_config(model: BaseModel) -> Any:
return model.model_config
def get_schema_from_model_field(
*,
field: ModelField,
model_name_map: ModelNameMap,
field_mapping: Dict[
Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
],
separate_input_output_schemas: bool = True,
) -> Dict[str, Any]:
override_mode: Union[Literal["validation"], None] = (
None if separate_input_output_schemas else "validation"
)
# This expects that GenerateJsonSchema was already used to generate the definitions
json_schema = field_mapping[(field, override_mode or field.mode)]
if "$ref" not in json_schema:
# TODO remove when deprecating Pydantic v1
# Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207
json_schema["title"] = field.field_info.title or field.alias.title().replace(
"_", " "
)
return json_schema
def get_definitions(
*,
fields: Sequence[ModelField],
model_name_map: ModelNameMap,
separate_input_output_schemas: bool = True,
) -> Tuple[
Dict[Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue],
Dict[str, Dict[str, Any]],
]:
schema_generator = GenerateJsonSchema(ref_template=REF_TEMPLATE)
override_mode: Union[Literal["validation"], None] = (
None if separate_input_output_schemas else "validation"
)
validation_fields = [field for field in fields if field.mode == "validation"]
serialization_fields = [field for field in fields if field.mode == "serialization"]
flat_validation_models = get_flat_models_from_fields(
validation_fields, known_models=set()
)
flat_serialization_models = get_flat_models_from_fields(
serialization_fields, known_models=set()
)
flat_validation_model_fields = [
ModelField(
field_info=FieldInfo(annotation=model),
name=model.__name__,
mode="validation",
)
for model in flat_validation_models
]
flat_serialization_model_fields = [
ModelField(
field_info=FieldInfo(annotation=model),
name=model.__name__,
mode="serialization",
)
for model in flat_serialization_models
]
flat_model_fields = flat_validation_model_fields + flat_serialization_model_fields
input_types = {f.type_ for f in fields}
unique_flat_model_fields = {
f for f in flat_model_fields if f.type_ not in input_types
}
inputs = [
(field, override_mode or field.mode, field._type_adapter.core_schema)
for field in list(fields) + list(unique_flat_model_fields)
]
field_mapping, definitions = schema_generator.generate_definitions(inputs=inputs)
for item_def in cast(Dict[str, Dict[str, Any]], definitions).values():
if "description" in item_def:
item_description = cast(str, item_def["description"]).split("\f")[0]
item_def["description"] = item_description
new_mapping, new_definitions = _remap_definitions_and_field_mappings(
model_name_map=model_name_map,
definitions=definitions, # type: ignore[arg-type]
field_mapping=field_mapping,
)
return new_mapping, new_definitions
def _replace_refs(
*,
schema: Dict[str, Any],
old_name_to_new_name_map: Dict[str, str],
) -> Dict[str, Any]:
new_schema = deepcopy(schema)
for key, value in new_schema.items():
if key == "$ref":
value = schema["$ref"]
if isinstance(value, str):
ref_name = schema["$ref"].split("/")[-1]
if ref_name in old_name_to_new_name_map:
new_name = old_name_to_new_name_map[ref_name]
new_schema["$ref"] = REF_TEMPLATE.format(model=new_name)
continue
if isinstance(value, dict):
new_schema[key] = _replace_refs(
schema=value,
old_name_to_new_name_map=old_name_to_new_name_map,
)
elif isinstance(value, list):
new_value = []
for item in value:
if isinstance(item, dict):
new_item = _replace_refs(
schema=item,
old_name_to_new_name_map=old_name_to_new_name_map,
)
new_value.append(new_item)
else:
new_value.append(item)
new_schema[key] = new_value
return new_schema
def _remap_definitions_and_field_mappings(
*,
model_name_map: ModelNameMap,
definitions: Dict[str, Any],
field_mapping: Dict[
Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
],
) -> Tuple[
Dict[Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue],
Dict[str, Any],
]:
old_name_to_new_name_map = {}
for field_key, schema in field_mapping.items():
model = field_key[0].type_
if model not in model_name_map:
continue
new_name = model_name_map[model]
old_name = schema["$ref"].split("/")[-1]
if old_name in {f"{new_name}-Input", f"{new_name}-Output"}:
continue
old_name_to_new_name_map[old_name] = new_name
new_field_mapping: Dict[
Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
] = {}
for field_key, schema in field_mapping.items():
new_schema = _replace_refs(
schema=schema,
old_name_to_new_name_map=old_name_to_new_name_map,
)
new_field_mapping[field_key] = new_schema
new_definitions = {}
for key, value in definitions.items():
if key in old_name_to_new_name_map:
new_key = old_name_to_new_name_map[key]
else:
new_key = key
new_value = _replace_refs(
schema=value,
old_name_to_new_name_map=old_name_to_new_name_map,
)
new_definitions[new_key] = new_value
return new_field_mapping, new_definitions
def is_scalar_field(field: ModelField) -> bool:
from fastapi import params
return shared.field_annotation_is_scalar(
field.field_info.annotation
) and not isinstance(field.field_info, params.Body)
def is_sequence_field(field: ModelField) -> bool:
return shared.field_annotation_is_sequence(field.field_info.annotation)
def is_scalar_sequence_field(field: ModelField) -> bool:
return shared.field_annotation_is_scalar_sequence(field.field_info.annotation)
def is_bytes_field(field: ModelField) -> bool:
return shared.is_bytes_or_nonable_bytes_annotation(field.type_)
def is_bytes_sequence_field(field: ModelField) -> bool:
return shared.is_bytes_sequence_annotation(field.type_)
def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
cls = type(field_info)
merged_field_info = cls.from_annotation(annotation)
new_field_info = copy(field_info)
new_field_info.metadata = merged_field_info.metadata
new_field_info.annotation = merged_field_info.annotation
return new_field_info
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
origin_type = get_origin(field.field_info.annotation) or field.field_info.annotation
assert issubclass(origin_type, shared.sequence_types) # type: ignore[arg-type]
return shared.sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return]
def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
error = ValidationError.from_exception_data(
"Field required", [{"type": "missing", "loc": loc, "input": {}}]
).errors(include_url=False)[0]
error["input"] = None
return error # type: ignore[return-value]
def create_body_model(
*, fields: Sequence[ModelField], model_name: str
) -> Type[BaseModel]:
field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields}
BodyModel: Type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload]
return BodyModel
def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
return [
ModelField(field_info=field_info, name=name)
for name, field_info in model.model_fields.items()
]
# Duplicate of several schema functions from Pydantic v1 to make them compatible with
# Pydantic v2 and allow mixing the models
TypeModelOrEnum = Union[Type["BaseModel"], Type[Enum]]
TypeModelSet = Set[TypeModelOrEnum]
def normalize_name(name: str) -> str:
return re.sub(r"[^a-zA-Z0-9.\-_]", "_", name)
def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]:
name_model_map = {}
conflicting_names: Set[str] = set()
for model in unique_models:
model_name = normalize_name(model.__name__)
if model_name in conflicting_names:
model_name = get_long_model_name(model)
name_model_map[model_name] = model
elif model_name in name_model_map:
conflicting_names.add(model_name)
conflicting_model = name_model_map.pop(model_name)
name_model_map[get_long_model_name(conflicting_model)] = conflicting_model
name_model_map[get_long_model_name(model)] = model
else:
name_model_map[model_name] = model
return {v: k for k, v in name_model_map.items()}
def get_flat_models_from_model(
model: Type["BaseModel"], known_models: Union[TypeModelSet, None] = None
) -> TypeModelSet:
known_models = known_models or set()
fields = get_model_fields(model)
get_flat_models_from_fields(fields, known_models=known_models)
return known_models
def get_flat_models_from_annotation(
annotation: Any, known_models: TypeModelSet
) -> TypeModelSet:
origin = get_origin(annotation)
if origin is not None:
for arg in get_args(annotation):
if lenient_issubclass(arg, (BaseModel, Enum)) and arg not in known_models:
known_models.add(arg)
if lenient_issubclass(arg, BaseModel):
get_flat_models_from_model(arg, known_models=known_models)
else:
get_flat_models_from_annotation(arg, known_models=known_models)
return known_models
def get_flat_models_from_field(
field: ModelField, known_models: TypeModelSet
) -> TypeModelSet:
field_type = field.type_
if lenient_issubclass(field_type, BaseModel):
if field_type in known_models:
return known_models
known_models.add(field_type)
get_flat_models_from_model(field_type, known_models=known_models)
elif lenient_issubclass(field_type, Enum):
known_models.add(field_type)
else:
get_flat_models_from_annotation(field_type, known_models=known_models)
return known_models
def get_flat_models_from_fields(
fields: Sequence[ModelField], known_models: TypeModelSet
) -> TypeModelSet:
for field in fields:
get_flat_models_from_field(field, known_models=known_models)
return known_models
def get_long_model_name(model: TypeModelOrEnum) -> str:
return f"{model.__module__}__{model.__qualname__}".replace(".", "__")