This commit is contained in:
Iliyan Angelov
2025-12-06 03:27:35 +02:00
parent 7667eb5eda
commit 5a8ca3c475
2211 changed files with 28086 additions and 37066 deletions

View File

@@ -3,9 +3,8 @@
from __future__ import annotations
import sys
from collections.abc import Iterator
from configparser import ConfigParser
from typing import Any, Callable
from typing import Any, Callable, Iterator
from mypy.errorcodes import ErrorCode
from mypy.expandtype import expand_type, expand_type_by_instance
@@ -15,7 +14,6 @@ from mypy.nodes import (
ARG_OPT,
ARG_POS,
ARG_STAR2,
INVARIANT,
MDEF,
Argument,
AssignmentStmt,
@@ -47,24 +45,26 @@ from mypy.options import Options
from mypy.plugin import (
CheckerPluginInterface,
ClassDefContext,
FunctionContext,
MethodContext,
Plugin,
ReportConfigContext,
SemanticAnalyzerPluginInterface,
)
from mypy.plugins import dataclasses
from mypy.plugins.common import (
deserialize_and_fixup_type,
)
from mypy.semanal import set_callable_name
from mypy.server.trigger import make_wildcard_trigger
from mypy.state import state
from mypy.type_visitor import TypeTranslator
from mypy.typeops import map_type_from_supertype
from mypy.types import (
AnyType,
CallableType,
Instance,
NoneType,
Overloaded,
Type,
TypeOfAny,
TypeType,
@@ -79,6 +79,12 @@ from mypy.version import __version__ as mypy_version
from pydantic._internal import _fields
from pydantic.version import parse_mypy_version
try:
from mypy.types import TypeVarDef # type: ignore[attr-defined]
except ImportError: # pragma: no cover
# Backward-compatible with TypeVarDef from Mypy 0.930.
from mypy.types import TypeVarType as TypeVarDef
CONFIGFILE_KEY = 'pydantic-mypy'
METADATA_KEY = 'pydantic-mypy-metadata'
BASEMODEL_FULLNAME = 'pydantic.main.BaseModel'
@@ -96,11 +102,10 @@ DECORATOR_FULLNAMES = {
'pydantic.deprecated.class_validators.validator',
'pydantic.deprecated.class_validators.root_validator',
}
IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES = DECORATOR_FULLNAMES - {'pydantic.functional_serializers.model_serializer'}
MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version)
BUILTINS_NAME = 'builtins'
BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__'
# Increment version if plugin changes and mypy caches should be invalidated
__version__ = 2
@@ -129,12 +134,12 @@ class PydanticPlugin(Plugin):
self._plugin_data = self.plugin_config.to_data()
super().__init__(options)
def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], bool] | None:
"""Update Pydantic model class."""
sym = self.lookup_fully_qualified(fullname)
if sym and isinstance(sym.node, TypeInfo): # pragma: no branch
# No branching may occur if the mypy cache has not been cleared
if sym.node.has_base(BASEMODEL_FULLNAME):
if any(base.fullname == BASEMODEL_FULLNAME for base in sym.node.mro):
return self._pydantic_model_class_maker_callback
return None
@@ -144,12 +149,28 @@ class PydanticPlugin(Plugin):
return self._pydantic_model_metaclass_marker_callback
return None
def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
"""Adjust the return type of the `Field` function."""
sym = self.lookup_fully_qualified(fullname)
if sym and sym.fullname == FIELD_FULLNAME:
return self._pydantic_field_callback
return None
def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
"""Adjust return type of `from_orm` method call."""
if fullname.endswith('.from_orm'):
return from_attributes_callback
return None
def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
"""Mark pydantic.dataclasses as dataclass.
Mypy version 1.1.1 added support for `@dataclass_transform` decorator.
"""
if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1):
return dataclasses.dataclass_class_maker_callback # type: ignore[return-value]
return None
def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]:
"""Return all plugin config data.
@@ -157,9 +178,9 @@ class PydanticPlugin(Plugin):
"""
return self._plugin_data
def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None:
def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> bool:
transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config)
transformer.transform()
return transformer.transform()
def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None:
"""Reset dataclass_transform_spec attribute of ModelMetaclass.
@@ -174,6 +195,54 @@ class PydanticPlugin(Plugin):
if getattr(info_metaclass.type, 'dataclass_transform_spec', None):
info_metaclass.type.dataclass_transform_spec = None
def _pydantic_field_callback(self, ctx: FunctionContext) -> Type:
"""Extract the type of the `default` argument from the Field function, and use it as the return type.
In particular:
* Check whether the default and default_factory argument is specified.
* Output an error if both are specified.
* Retrieve the type of the argument which is specified, and use it as return type for the function.
"""
default_any_type = ctx.default_return_type
assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()'
assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()'
default_args = ctx.args[0]
default_factory_args = ctx.args[1]
if default_args and default_factory_args:
error_default_and_default_factory_specified(ctx.api, ctx.context)
return default_any_type
if default_args:
default_type = ctx.arg_types[0][0]
default_arg = default_args[0]
# Fallback to default Any type if the field is required
if not isinstance(default_arg, EllipsisExpr):
return default_type
elif default_factory_args:
default_factory_type = ctx.arg_types[1][0]
# Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter
# Pydantic calls the default factory without any argument, so we retrieve the first item
if isinstance(default_factory_type, Overloaded):
default_factory_type = default_factory_type.items[0]
if isinstance(default_factory_type, CallableType):
ret_type = default_factory_type.ret_type
# mypy doesn't think `ret_type` has `args`, you'd think mypy should know,
# add this check in case it varies by version
args = getattr(ret_type, 'args', None)
if args:
if all(isinstance(arg, TypeVarType) for arg in args):
# Looks like the default factory is a type like `list` or `dict`, replace all args with `Any`
ret_type.args = tuple(default_any_type for _ in args) # type: ignore[attr-defined]
return ret_type
return default_any_type
class PydanticPluginConfig:
"""A Pydantic mypy plugin config holder.
@@ -238,9 +307,6 @@ def from_attributes_callback(ctx: MethodContext) -> Type:
pydantic_metadata = model_type.type.metadata.get(METADATA_KEY)
if pydantic_metadata is None:
return ctx.default_return_type
if not model_type.type.has_base(BASEMODEL_FULLNAME):
# not a Pydantic v2 model
return ctx.default_return_type
from_attributes = pydantic_metadata.get('config', {}).get('from_attributes')
if from_attributes is not True:
error_from_attributes(model_type.type.name, ctx.api, ctx.context)
@@ -254,10 +320,8 @@ class PydanticModelField:
self,
name: str,
alias: str | None,
is_frozen: bool,
has_dynamic_alias: bool,
has_default: bool,
strict: bool | None,
line: int,
column: int,
type: Type | None,
@@ -265,103 +329,41 @@ class PydanticModelField:
):
self.name = name
self.alias = alias
self.is_frozen = is_frozen
self.has_dynamic_alias = has_dynamic_alias
self.has_default = has_default
self.strict = strict
self.line = line
self.column = column
self.type = type
self.info = info
def to_argument(
self,
current_info: TypeInfo,
typed: bool,
model_strict: bool,
force_optional: bool,
use_alias: bool,
api: SemanticAnalyzerPluginInterface,
force_typevars_invariant: bool,
is_root_model_root: bool,
) -> Argument:
def to_argument(self, current_info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument:
"""Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument."""
variable = self.to_var(current_info, api, use_alias, force_typevars_invariant)
strict = model_strict if self.strict is None else self.strict
if typed or strict:
type_annotation = self.expand_type(current_info, api, include_root_type=True)
else:
type_annotation = AnyType(TypeOfAny.explicit)
return Argument(
variable=variable,
type_annotation=type_annotation,
variable=self.to_var(current_info, use_alias),
type_annotation=self.expand_type(current_info) if typed else AnyType(TypeOfAny.explicit),
initializer=None,
kind=ARG_OPT
if is_root_model_root
else (ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED),
kind=ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED,
)
def expand_type(
self,
current_info: TypeInfo,
api: SemanticAnalyzerPluginInterface,
force_typevars_invariant: bool = False,
include_root_type: bool = False,
) -> Type | None:
def expand_type(self, current_info: TypeInfo) -> Type | None:
"""Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type."""
if force_typevars_invariant:
# In some cases, mypy will emit an error "Cannot use a covariant type variable as a parameter"
# To prevent that, we add an option to replace typevars with invariant ones while building certain
# method signatures (in particular, `__init__`). There may be a better way to do this, if this causes
# us problems in the future, we should look into why the dataclasses plugin doesn't have this issue.
if isinstance(self.type, TypeVarType):
modified_type = self.type.copy_modified()
modified_type.variance = INVARIANT
self.type = modified_type
if self.type is not None and self.info.self_type is not None:
# In general, it is not safe to call `expand_type()` during semantic analysis,
# The getattr in the next line is used to prevent errors in legacy versions of mypy without this attribute
if self.type is not None and getattr(self.info, 'self_type', None) is not None:
# In general, it is not safe to call `expand_type()` during semantic analyzis,
# however this plugin is called very late, so all types should be fully ready.
# Also, it is tricky to avoid eager expansion of Self types here (e.g. because
# we serialize attributes).
with state.strict_optional_set(api.options.strict_optional):
filled_with_typevars = fill_typevars(current_info)
# Cannot be TupleType as current_info represents a Pydantic model:
assert isinstance(filled_with_typevars, Instance)
if force_typevars_invariant:
for arg in filled_with_typevars.args:
if isinstance(arg, TypeVarType):
arg.variance = INVARIANT
expanded_type = expand_type(self.type, {self.info.self_type.id: filled_with_typevars})
if include_root_type and isinstance(expanded_type, Instance) and is_root_model(expanded_type.type):
# When a root model is used as a field, Pydantic allows both an instance of the root model
# as well as instances of the `root` field type:
root_type = expanded_type.type['root'].type
if root_type is None:
# Happens if the hint for 'root' has unsolved forward references
return expanded_type
expanded_root_type = expand_type_by_instance(root_type, expanded_type)
expanded_type = UnionType([expanded_type, expanded_root_type])
return expanded_type
return expand_type(self.type, {self.info.self_type.id: fill_typevars(current_info)})
return self.type
def to_var(
self,
current_info: TypeInfo,
api: SemanticAnalyzerPluginInterface,
use_alias: bool,
force_typevars_invariant: bool = False,
) -> Var:
def to_var(self, current_info: TypeInfo, use_alias: bool) -> Var:
"""Based on mypy.plugins.dataclasses.DataclassAttribute.to_var."""
if use_alias and self.alias is not None:
name = self.alias
else:
name = self.name
return Var(name, self.expand_type(current_info, api, force_typevars_invariant))
return Var(name, self.expand_type(current_info))
def serialize(self) -> JsonDict:
"""Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
@@ -369,10 +371,8 @@ class PydanticModelField:
return {
'name': self.name,
'alias': self.alias,
'is_frozen': self.is_frozen,
'has_dynamic_alias': self.has_dynamic_alias,
'has_default': self.has_default,
'strict': self.strict,
'line': self.line,
'column': self.column,
'type': self.type.serialize(),
@@ -385,38 +385,12 @@ class PydanticModelField:
typ = deserialize_and_fixup_type(data.pop('type'), api)
return cls(type=typ, info=info, **data)
def expand_typevar_from_subtype(self, sub_type: TypeInfo, api: SemanticAnalyzerPluginInterface) -> None:
def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None:
"""Expands type vars in the context of a subtype when an attribute is inherited
from a generic super type.
"""
if self.type is not None:
with state.strict_optional_set(api.options.strict_optional):
self.type = map_type_from_supertype(self.type, sub_type, self.info)
class PydanticModelClassVar:
"""Based on mypy.plugins.dataclasses.DataclassAttribute.
ClassVars are ignored by subclasses.
Attributes:
name: the ClassVar name
"""
def __init__(self, name):
self.name = name
@classmethod
def deserialize(cls, data: JsonDict) -> PydanticModelClassVar:
"""Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
data = data.copy()
return cls(**data)
def serialize(self) -> JsonDict:
"""Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
return {
'name': self.name,
}
self.type = map_type_from_supertype(self.type, sub_type, self.info)
class PydanticModelTransformer:
@@ -431,10 +405,7 @@ class PydanticModelTransformer:
'frozen',
'from_attributes',
'populate_by_name',
'validate_by_alias',
'validate_by_name',
'alias_generator',
'strict',
}
def __init__(
@@ -461,26 +432,25 @@ class PydanticModelTransformer:
* stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
"""
info = self._cls.info
is_a_root_model = is_root_model(info)
is_root_model = any(ROOT_MODEL_FULLNAME in base.fullname for base in info.mro[:-1])
config = self.collect_config()
fields, class_vars = self.collect_fields_and_class_vars(config, is_a_root_model)
if fields is None or class_vars is None:
fields = self.collect_fields(config, is_root_model)
if fields is None:
# Some definitions are not ready. We need another pass.
return False
for field in fields:
if field.type is None:
return False
is_settings = info.has_base(BASESETTINGS_FULLNAME)
self.add_initializer(fields, config, is_settings, is_a_root_model)
self.add_model_construct_method(fields, config, is_settings, is_a_root_model)
self.set_frozen(fields, self._api, frozen=config.frozen is True)
is_settings = any(base.fullname == BASESETTINGS_FULLNAME for base in info.mro[:-1])
self.add_initializer(fields, config, is_settings, is_root_model)
self.add_model_construct_method(fields, config, is_settings)
self.set_frozen(fields, frozen=config.frozen is True)
self.adjust_decorator_signatures()
info.metadata[METADATA_KEY] = {
'fields': {field.name: field.serialize() for field in fields},
'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars},
'config': config.get_values_dict(),
}
@@ -494,13 +464,13 @@ class PydanticModelTransformer:
Teach mypy this by marking any function whose outermost decorator is a `validator()`,
`field_validator()` or `serializer()` call as a `classmethod`.
"""
for sym in self._cls.info.names.values():
for name, sym in self._cls.info.names.items():
if isinstance(sym.node, Decorator):
first_dec = sym.node.original_decorators[0]
if (
isinstance(first_dec, CallExpr)
and isinstance(first_dec.callee, NameExpr)
and first_dec.callee.fullname in IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES
and first_dec.callee.fullname in DECORATOR_FULLNAMES
# @model_validator(mode="after") is an exception, it expects a regular method
and not (
first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME
@@ -543,7 +513,7 @@ class PydanticModelTransformer:
for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args):
if arg_name is None:
continue
config.update(self.get_config_update(arg_name, arg, lax_extra=True))
config.update(self.get_config_update(arg_name, arg))
elif isinstance(stmt.rvalue, DictExpr): # dict literals
for key_expr, value_expr in stmt.rvalue.items:
if not isinstance(key_expr, StrExpr):
@@ -574,7 +544,7 @@ class PydanticModelTransformer:
if (
stmt
and config.has_alias_generator
and not (config.validate_by_name or config.populate_by_name)
and not config.populate_by_name
and self.plugin_config.warn_required_dynamic_aliases
):
error_required_dynamic_aliases(self._api, stmt)
@@ -589,13 +559,11 @@ class PydanticModelTransformer:
config.setdefault(name, value)
return config
def collect_fields_and_class_vars(
self, model_config: ModelConfigData, is_root_model: bool
) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]:
def collect_fields(self, model_config: ModelConfigData, is_root_model: bool) -> list[PydanticModelField] | None:
"""Collects the fields for the model, accounting for parent classes."""
cls = self._cls
# First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates.
# First, collect fields belonging to any class in the MRO, ignoring duplicates.
#
# We iterate through the MRO in reverse because attrs defined in the parent must appear
# earlier in the attributes list than attrs defined in the child. See:
@@ -605,11 +573,10 @@ class PydanticModelTransformer:
# in the parent. We can implement this via a dict without disrupting the attr order
# because dicts preserve insertion order in Python 3.7+.
found_fields: dict[str, PydanticModelField] = {}
found_class_vars: dict[str, PydanticModelClassVar] = {}
for info in reversed(cls.info.mro[1:-1]): # 0 is the current class, -2 is BaseModel, -1 is object
# if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata:
# # We haven't processed the base class yet. Need another pass.
# return None, None
# return None
if METADATA_KEY not in info.metadata:
continue
@@ -622,7 +589,8 @@ class PydanticModelTransformer:
# TODO: We shouldn't be performing type operations during the main
# semantic analysis pass, since some TypeInfo attributes might
# still be in flux. This should be performed in a later phase.
field.expand_typevar_from_subtype(cls.info, self._api)
with state.strict_optional_set(self._api.options.strict_optional):
field.expand_typevar_from_subtype(cls.info)
found_fields[name] = field
sym_node = cls.info.names.get(name)
@@ -631,31 +599,20 @@ class PydanticModelTransformer:
'BaseModel field may only be overridden by another field',
sym_node.node,
)
# Collect ClassVars
for name, data in info.metadata[METADATA_KEY]['class_vars'].items():
found_class_vars[name] = PydanticModelClassVar.deserialize(data)
# Second, collect fields and ClassVars belonging to the current class.
# Second, collect fields belonging to the current class.
current_field_names: set[str] = set()
current_class_vars_names: set[str] = set()
for stmt in self._get_assignment_statements_from_block(cls.defs):
maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars)
if maybe_field is None:
continue
lhs = stmt.lvalues[0]
assert isinstance(lhs, NameExpr) # collect_field_or_class_var_from_stmt guarantees this
if isinstance(maybe_field, PydanticModelField):
maybe_field = self.collect_field_from_stmt(stmt, model_config)
if maybe_field is not None:
lhs = stmt.lvalues[0]
if is_root_model and lhs.name != 'root':
error_extra_fields_on_root_model(self._api, stmt)
else:
current_field_names.add(lhs.name)
found_fields[lhs.name] = maybe_field
elif isinstance(maybe_field, PydanticModelClassVar):
current_class_vars_names.add(lhs.name)
found_class_vars[lhs.name] = maybe_field
return list(found_fields.values()), list(found_class_vars.values())
return list(found_fields.values())
def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]:
for body in stmt.body:
@@ -671,15 +628,14 @@ class PydanticModelTransformer:
elif isinstance(stmt, IfStmt):
yield from self._get_assignment_statements_from_if_statement(stmt)
def collect_field_or_class_var_from_stmt( # noqa C901
self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar]
) -> PydanticModelField | PydanticModelClassVar | None:
def collect_field_from_stmt( # noqa C901
self, stmt: AssignmentStmt, model_config: ModelConfigData
) -> PydanticModelField | None:
"""Get pydantic model field from statement.
Args:
stmt: The statement.
model_config: Configuration settings for the model.
class_vars: ClassVars already known to be defined on the model.
Returns:
A pydantic model field if it could find the field in statement. Otherwise, `None`.
@@ -702,10 +658,6 @@ class PydanticModelTransformer:
# Eventually, we may want to attempt to respect model_config['ignored_types']
return None
if lhs.name in class_vars:
# Class vars are not fields and are not required to be annotated
return None
# The assignment does not have an annotation, and it's not anything else we recognize
error_untyped_fields(self._api, stmt)
return None
@@ -750,7 +702,7 @@ class PydanticModelTransformer:
# x: ClassVar[int] is not a field
if node.is_classvar:
return PydanticModelClassVar(lhs.name)
return None
# x: InitVar[int] is not supported in BaseModel
node_type = get_proper_type(node.type)
@@ -761,7 +713,6 @@ class PydanticModelTransformer:
)
has_default = self.get_has_default(stmt)
strict = self.get_strict(stmt)
if sym.type is None and node.is_final and node.is_inferred:
# This follows the logic from the dataclasses plugin. The following comment is taken verbatim:
@@ -781,27 +732,16 @@ class PydanticModelTransformer:
)
node.type = AnyType(TypeOfAny.from_error)
if node.is_final and has_default:
# TODO this path should be removed (see https://github.com/pydantic/pydantic/issues/11119)
return PydanticModelClassVar(lhs.name)
alias, has_dynamic_alias = self.get_alias_info(stmt)
if (
has_dynamic_alias
and not (model_config.validate_by_name or model_config.populate_by_name)
and self.plugin_config.warn_required_dynamic_aliases
):
if has_dynamic_alias and not model_config.populate_by_name and self.plugin_config.warn_required_dynamic_aliases:
error_required_dynamic_aliases(self._api, stmt)
is_frozen = self.is_field_frozen(stmt)
init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt)
return PydanticModelField(
name=lhs.name,
has_dynamic_alias=has_dynamic_alias,
has_default=has_default,
strict=strict,
alias=alias,
is_frozen=is_frozen,
line=stmt.line,
column=stmt.column,
type=init_type,
@@ -857,42 +797,32 @@ class PydanticModelTransformer:
return # Don't generate an __init__ if one already exists
typed = self.plugin_config.init_typed
model_strict = bool(config.strict)
use_alias = not (config.validate_by_name or config.populate_by_name) and config.validate_by_alias is not False
requires_dynamic_aliases = bool(config.has_alias_generator and not config.validate_by_name)
args = self.get_field_arguments(
fields,
typed=typed,
model_strict=model_strict,
requires_dynamic_aliases=requires_dynamic_aliases,
use_alias=use_alias,
is_settings=is_settings,
is_root_model=is_root_model,
force_typevars_invariant=True,
)
use_alias = config.populate_by_name is not True
requires_dynamic_aliases = bool(config.has_alias_generator and not config.populate_by_name)
with state.strict_optional_set(self._api.options.strict_optional):
args = self.get_field_arguments(
fields,
typed=typed,
requires_dynamic_aliases=requires_dynamic_aliases,
use_alias=use_alias,
is_settings=is_settings,
)
if is_root_model:
# convert root argument to positional argument
args[0].kind = ARG_POS if args[0].kind == ARG_NAMED else ARG_OPT
if is_settings:
base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node
assert isinstance(base_settings_node, TypeInfo)
if '__init__' in base_settings_node.names:
base_settings_init_node = base_settings_node.names['__init__'].node
assert isinstance(base_settings_init_node, FuncDef)
if base_settings_init_node is not None and base_settings_init_node.type is not None:
func_type = base_settings_init_node.type
assert isinstance(func_type, CallableType)
for arg_idx, arg_name in enumerate(func_type.arg_names):
if arg_name is None or arg_name.startswith('__') or not arg_name.startswith('_'):
continue
analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx])
if analyzed_variable_type is not None and arg_name == '_cli_settings_source':
# _cli_settings_source is defined as CliSettingsSource[Any], and as such
# the Any causes issues with --disallow-any-explicit. As a workaround, change
# the Any type (as if CliSettingsSource was left unparameterized):
analyzed_variable_type = analyzed_variable_type.accept(
ChangeExplicitTypeOfAny(TypeOfAny.from_omitted_generics)
)
variable = Var(arg_name, analyzed_variable_type)
args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT))
if is_settings:
base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node
if '__init__' in base_settings_node.names:
base_settings_init_node = base_settings_node.names['__init__'].node
if base_settings_init_node is not None and base_settings_init_node.type is not None:
func_type = base_settings_init_node.type
for arg_idx, arg_name in enumerate(func_type.arg_names):
if arg_name.startswith('__') or not arg_name.startswith('_'):
continue
analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx])
variable = Var(arg_name, analyzed_variable_type)
args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT))
if not self.should_init_forbid_extra(fields, config):
var = Var('kwargs')
@@ -901,11 +831,7 @@ class PydanticModelTransformer:
add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType())
def add_model_construct_method(
self,
fields: list[PydanticModelField],
config: ModelConfigData,
is_settings: bool,
is_root_model: bool,
self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool
) -> None:
"""Adds a fully typed `model_construct` classmethod to the class.
@@ -917,19 +843,13 @@ class PydanticModelTransformer:
fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT)
with state.strict_optional_set(self._api.options.strict_optional):
args = self.get_field_arguments(
fields,
typed=True,
model_strict=bool(config.strict),
requires_dynamic_aliases=False,
use_alias=False,
is_settings=is_settings,
is_root_model=is_root_model,
fields, typed=True, requires_dynamic_aliases=False, use_alias=False, is_settings=is_settings
)
if not self.should_init_forbid_extra(fields, config):
var = Var('kwargs')
args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2))
args = args + [fields_set_argument] if is_root_model else [fields_set_argument] + args
args = [fields_set_argument] + args
add_method(
self._api,
@@ -940,7 +860,7 @@ class PydanticModelTransformer:
is_classmethod=True,
)
def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None:
def set_frozen(self, fields: list[PydanticModelField], frozen: bool) -> None:
"""Marks all fields as properties so that attempts to set them trigger mypy errors.
This is the same approach used by the attrs and dataclasses plugins.
@@ -951,21 +871,27 @@ class PydanticModelTransformer:
if sym_node is not None:
var = sym_node.node
if isinstance(var, Var):
var.is_property = frozen or field.is_frozen
var.is_property = frozen
elif isinstance(var, PlaceholderNode) and not self._api.final_iteration:
# See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage
self._api.defer()
# `var` can also be a FuncDef or Decorator node (e.g. when overriding a field with a function or property).
# In that case, we don't want to do anything. Mypy will already raise an error that a field was not properly
# overridden.
else: # pragma: no cover
# I don't know whether it's possible to hit this branch, but I've added it for safety
try:
var_str = str(var)
except TypeError:
# This happens for PlaceholderNode; perhaps it will happen for other types in the future..
var_str = repr(var)
detail = f'sym_node.node: {var_str} (of type {var.__class__})'
error_unexpected_behavior(detail, self._api, self._cls)
else:
var = field.to_var(info, api, use_alias=False)
var = field.to_var(info, use_alias=False)
var.info = info
var.is_property = frozen
var._fullname = info.fullname + '.' + var.name
info.names[var.name] = SymbolTableNode(MDEF, var)
def get_config_update(self, name: str, arg: Expression, lax_extra: bool = False) -> ModelConfigData | None:
def get_config_update(self, name: str, arg: Expression) -> ModelConfigData | None:
"""Determines the config update due to a single kwarg in the ConfigDict definition.
Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
@@ -978,16 +904,7 @@ class PydanticModelTransformer:
elif isinstance(arg, MemberExpr):
forbid_extra = arg.name == 'forbid'
else:
if not lax_extra:
# Only emit an error for other types of `arg` (e.g., `NameExpr`, `ConditionalExpr`, etc.) when
# reading from a config class, etc. If a ConfigDict is used, then we don't want to emit an error
# because you'll get type checking from the ConfigDict itself.
#
# It would be nice if we could introspect the types better otherwise, but I don't know what the API
# is to evaluate an expr into its type and then check if that type is compatible with the expected
# type. Note that you can still get proper type checking via: `model_config = ConfigDict(...)`, just
# if you don't use an explicit string, the plugin won't be able to infer whether extra is forbidden.
error_invalid_config_value(name, self._api, arg)
error_invalid_config_value(name, self._api, arg)
return None
return ModelConfigData(forbid_extra=forbid_extra)
if name == 'alias_generator':
@@ -1022,22 +939,6 @@ class PydanticModelTransformer:
# Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
return not isinstance(expr, EllipsisExpr)
@staticmethod
def get_strict(stmt: AssignmentStmt) -> bool | None:
"""Returns a the `strict` value of a field if defined, otherwise `None`."""
expr = stmt.rvalue
if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME:
for arg, name in zip(expr.args, expr.arg_names):
if name != 'strict':
continue
if isinstance(arg, NameExpr):
if arg.fullname == 'builtins.True':
return True
elif arg.fullname == 'builtins.False':
return False
return None
return None
@staticmethod
def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]:
"""Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.
@@ -1056,53 +957,23 @@ class PydanticModelTransformer:
# Assigned value is not a call to pydantic.fields.Field
return None, False
if 'validation_alias' in expr.arg_names:
arg = expr.args[expr.arg_names.index('validation_alias')]
elif 'alias' in expr.arg_names:
arg = expr.args[expr.arg_names.index('alias')]
else:
return None, False
if isinstance(arg, StrExpr):
return arg.value, False
else:
return None, True
@staticmethod
def is_field_frozen(stmt: AssignmentStmt) -> bool:
"""Returns whether the field is frozen, extracted from the declaration of the field defined in `stmt`.
Note that this is only whether the field was declared to be frozen in a `<field_name> = Field(frozen=True)`
sense; this does not determine whether the field is frozen because the entire model is frozen; that is
handled separately.
"""
expr = stmt.rvalue
if isinstance(expr, TempNode):
# TempNode means annotation-only
return False
if not (
isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
):
# Assigned value is not a call to pydantic.fields.Field
return False
for i, arg_name in enumerate(expr.arg_names):
if arg_name == 'frozen':
arg = expr.args[i]
return isinstance(arg, NameExpr) and arg.fullname == 'builtins.True'
return False
if arg_name != 'alias':
continue
arg = expr.args[i]
if isinstance(arg, StrExpr):
return arg.value, False
else:
return None, True
return None, False
def get_field_arguments(
self,
fields: list[PydanticModelField],
typed: bool,
model_strict: bool,
use_alias: bool,
requires_dynamic_aliases: bool,
is_settings: bool,
is_root_model: bool,
force_typevars_invariant: bool = False,
) -> list[Argument]:
"""Helper function used during the construction of the `__init__` and `model_construct` method signatures.
@@ -1111,14 +982,7 @@ class PydanticModelTransformer:
info = self._cls.info
arguments = [
field.to_argument(
info,
typed=typed,
model_strict=model_strict,
force_optional=requires_dynamic_aliases or is_settings,
use_alias=use_alias,
api=self._api,
force_typevars_invariant=force_typevars_invariant,
is_root_model_root=is_root_model and field.name == 'root',
info, typed=typed, force_optional=requires_dynamic_aliases or is_settings, use_alias=use_alias
)
for field in fields
if not (use_alias and field.has_dynamic_alias)
@@ -1131,7 +995,7 @@ class PydanticModelTransformer:
We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
*unless* a required dynamic alias is present (since then we can't determine a valid signature).
"""
if not (config.validate_by_name or config.populate_by_name):
if not config.populate_by_name:
if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)):
return False
if config.forbid_extra:
@@ -1153,20 +1017,6 @@ class PydanticModelTransformer:
return False
class ChangeExplicitTypeOfAny(TypeTranslator):
"""A type translator used to change type of Any's, if explicit."""
def __init__(self, type_of_any: int) -> None:
self._type_of_any = type_of_any
super().__init__()
def visit_any(self, t: AnyType) -> Type: # noqa: D102
if t.type_of_any == TypeOfAny.explicit:
return t.copy_modified(type_of_any=self._type_of_any)
else:
return t
class ModelConfigData:
"""Pydantic mypy plugin model config class."""
@@ -1176,19 +1026,13 @@ class ModelConfigData:
frozen: bool | None = None,
from_attributes: bool | None = None,
populate_by_name: bool | None = None,
validate_by_alias: bool | None = None,
validate_by_name: bool | None = None,
has_alias_generator: bool | None = None,
strict: bool | None = None,
):
self.forbid_extra = forbid_extra
self.frozen = frozen
self.from_attributes = from_attributes
self.populate_by_name = populate_by_name
self.validate_by_alias = validate_by_alias
self.validate_by_name = validate_by_name
self.has_alias_generator = has_alias_generator
self.strict = strict
def get_values_dict(self) -> dict[str, Any]:
"""Returns a dict of Pydantic model config names to their values.
@@ -1210,11 +1054,6 @@ class ModelConfigData:
setattr(self, key, value)
def is_root_model(info: TypeInfo) -> bool:
"""Return whether the type info is a root model subclass (or the `RootModel` class itself)."""
return info.has_base(ROOT_MODEL_FULLNAME)
ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic')
ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic')
ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic')
@@ -1263,6 +1102,11 @@ def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Conte
api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL)
def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None:
"""Emits an error when `Field` has both `default` and `default_factory` together."""
api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS)
def add_method(
api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
cls: ClassDef,
@@ -1270,7 +1114,7 @@ def add_method(
args: list[Argument],
return_type: Type,
self_type: Type | None = None,
tvar_def: TypeVarType | None = None,
tvar_def: TypeVarDef | None = None,
is_classmethod: bool = False,
) -> None:
"""Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes."""
@@ -1293,16 +1137,6 @@ def add_method(
first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)]
else:
self_type = self_type or fill_typevars(info)
# `self` is positional *ONLY* here, but this can't be expressed
# fully in the mypy internal API. ARG_POS is the closest we can get.
# Using ARG_POS will, however, give mypy errors if a `self` field
# is present on a model:
#
# Name "self" already defined (possibly by an import) [no-redef]
#
# As a workaround, we give this argument a name that will
# never conflict. By its positional nature, this name will not
# be used or exposed to users.
first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)]
args = first + args
@@ -1313,9 +1147,9 @@ def add_method(
arg_names.append(arg.variable.name)
arg_kinds.append(arg.kind)
signature = CallableType(
arg_types, arg_kinds, arg_names, return_type, function_type, variables=[tvar_def] if tvar_def else None
)
signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type)
if tvar_def:
signature.variables = [tvar_def]
func = FuncDef(name, args, Block([PassStmt()]))
func.info = info
@@ -1367,7 +1201,7 @@ def parse_toml(config_file: str) -> dict[str, Any] | None:
except ImportError: # pragma: no cover
import warnings
warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.', stacklevel=2)
warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
return None
with open(config_file, 'rb') as rf: