Skip to content

pydantic.mypy

This module includes classes and functions designed specifically for use with the mypy plugin.

ModelConfigData

ModelConfigData(
    forbid_extra=None,
    frozen=None,
    from_attributes=None,
    populate_by_name=None,
    has_alias_generator=None,
)

Pydantic mypy plugin model config class.

Source code in pydantic/mypy.py
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
def __init__(
    self,
    forbid_extra: bool | None = None,
    frozen: bool | None = None,
    from_attributes: bool | None = None,
    populate_by_name: bool | None = None,
    has_alias_generator: bool | None = None,
):
    self.forbid_extra = forbid_extra
    self.frozen = frozen
    self.from_attributes = from_attributes
    self.populate_by_name = populate_by_name
    self.has_alias_generator = has_alias_generator

get_values_dict

get_values_dict()

Returns a dict of Pydantic model config names to their values.

It includes the config if config value is not None.

Source code in pydantic/mypy.py
1025
1026
1027
1028
1029
1030
def get_values_dict(self) -> dict[str, Any]:
    """Returns a dict of Pydantic model config names to their values.

    It includes the config if config value is not `None`.
    """
    return {k: v for k, v in self.__dict__.items() if v is not None}

setdefault

setdefault(key, value)

Set default value for Pydantic model config if config value is None.

Source code in pydantic/mypy.py
1039
1040
1041
1042
def setdefault(self, key: str, value: Any) -> None:
    """Set default value for Pydantic model config if config value is `None`."""
    if getattr(self, key) is None:
        setattr(self, key, value)

update

update(config)

Update Pydantic model config values.

Source code in pydantic/mypy.py
1032
1033
1034
1035
1036
1037
def update(self, config: ModelConfigData | None) -> None:
    """Update Pydantic model config values."""
    if config is None:
        return
    for k, v in config.get_values_dict().items():
        setattr(self, k, v)

PydanticModelField

PydanticModelField(
    name,
    alias,
    has_dynamic_alias,
    has_default,
    line,
    column,
    type,
    info,
)

Based on mypy.plugins.dataclasses.DataclassAttribute.

Source code in pydantic/mypy.py
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
def __init__(
    self,
    name: str,
    alias: str | None,
    has_dynamic_alias: bool,
    has_default: bool,
    line: int,
    column: int,
    type: Type | None,
    info: TypeInfo,
):
    self.name = name
    self.alias = alias
    self.has_dynamic_alias = has_dynamic_alias
    self.has_default = has_default
    self.line = line
    self.column = column
    self.type = type
    self.info = info

deserialize classmethod

deserialize(info, data, api)

Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.

Source code in pydantic/mypy.py
379
380
381
382
383
384
@classmethod
def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField:
    """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
    data = data.copy()
    typ = deserialize_and_fixup_type(data.pop('type'), api)
    return cls(type=typ, info=info, **data)

expand_type

expand_type(current_info)

Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type.

Source code in pydantic/mypy.py
347
348
349
350
351
352
353
354
355
def expand_type(self, current_info: TypeInfo) -> Type | None:
    """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type."""
    if self.type is not None and self.info.self_type is not None:
        # In general, it is not safe to call `expand_type()` during semantic analyzis,
        # however this plugin is called very late, so all types should be fully ready.
        # Also, it is tricky to avoid eager expansion of Self types here (e.g. because
        # we serialize attributes).
        return expand_type(self.type, {self.info.self_type.id: fill_typevars(current_info)})
    return self.type

expand_typevar_from_subtype

expand_typevar_from_subtype(sub_type)

Expands type vars in the context of a subtype when an attribute is inherited from a generic super type.

Source code in pydantic/mypy.py
386
387
388
389
390
391
def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None:
    """Expands type vars in the context of a subtype when an attribute is inherited
    from a generic super type.
    """
    if self.type is not None:
        self.type = map_type_from_supertype(self.type, sub_type, self.info)

serialize

serialize()

Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.

Source code in pydantic/mypy.py
366
367
368
369
370
371
372
373
374
375
376
377
def serialize(self) -> JsonDict:
    """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
    assert self.type
    return {
        'name': self.name,
        'alias': self.alias,
        'has_dynamic_alias': self.has_dynamic_alias,
        'has_default': self.has_default,
        'line': self.line,
        'column': self.column,
        'type': self.type.serialize(),
    }

to_argument

to_argument(current_info, typed, force_optional, use_alias)

Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument.

Source code in pydantic/mypy.py
338
339
340
341
342
343
344
345
def to_argument(self, current_info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument:
    """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument."""
    return Argument(
        variable=self.to_var(current_info, use_alias),
        type_annotation=self.expand_type(current_info) if typed else AnyType(TypeOfAny.explicit),
        initializer=None,
        kind=ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED,
    )

to_var

to_var(current_info, use_alias)

Based on mypy.plugins.dataclasses.DataclassAttribute.to_var.

Source code in pydantic/mypy.py
357
358
359
360
361
362
363
364
def to_var(self, current_info: TypeInfo, use_alias: bool) -> Var:
    """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var."""
    if use_alias and self.alias is not None:
        name = self.alias
    else:
        name = self.name

    return Var(name, self.expand_type(current_info))

PydanticModelTransformer

PydanticModelTransformer(cls, reason, api, plugin_config)

Transform the BaseModel subclass according to the plugin settings.

Attributes:

Name Type Description
tracked_config_fields set[str]

A set of field configs that the plugin has to track their value.

Source code in pydantic/mypy.py
409
410
411
412
413
414
415
416
417
418
419
420
def __init__(
    self,
    cls: ClassDef,
    reason: Expression | Statement,
    api: SemanticAnalyzerPluginInterface,
    plugin_config: PydanticPluginConfig,
) -> None:
    self._cls = cls
    self._reason = reason
    self._api = api

    self.plugin_config = plugin_config

add_initializer

add_initializer(fields, config, is_settings)

Adds a fields-aware __init__ method to the class.

The added __init__ will be annotated with types vs. all Any depending on the plugin settings.

Source code in pydantic/mypy.py
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
def add_initializer(self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool) -> None:
    """Adds a fields-aware `__init__` method to the class.

    The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings.
    """
    if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated:
        return  # Don't generate an __init__ if one already exists

    typed = self.plugin_config.init_typed
    use_alias = config.populate_by_name is not True
    requires_dynamic_aliases = bool(config.has_alias_generator and not config.populate_by_name)
    with state.strict_optional_set(self._api.options.strict_optional):
        args = self.get_field_arguments(
            fields,
            typed=typed,
            requires_dynamic_aliases=requires_dynamic_aliases,
            use_alias=use_alias,
            is_settings=is_settings,
        )
        if is_settings:
            base_settings_info = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node.defn.info
            base_settings_init_arguments = base_settings_info.names['__init__'].node.arguments
            settings_init_arguments = []
            a: Argument
            for a in base_settings_init_arguments:
                if a.variable.name.startswith('__') or not a.variable.name.startswith('_'):
                    continue
                analyzed_variable_type = self._api.anal_type(a.variable.type)
                variable = Var(a.variable.name, analyzed_variable_type)
                settings_init_arguments.append(Argument(variable, analyzed_variable_type, None, ARG_OPT))
            args.extend(settings_init_arguments)

    if not self.should_init_forbid_extra(fields, config):
        var = Var('kwargs')
        args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2))

    add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType())

add_model_construct_method

add_model_construct_method(fields, config, is_settings)

Adds a fully typed model_construct classmethod to the class.

Similar to the fields-aware init method, but always uses the field names (not aliases), and does not treat settings fields as optional.

Source code in pydantic/mypy.py
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
def add_model_construct_method(
    self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool
) -> None:
    """Adds a fully typed `model_construct` classmethod to the class.

    Similar to the fields-aware __init__ method, but always uses the field names (not aliases),
    and does not treat settings fields as optional.
    """
    set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')])
    optional_set_str = UnionType([set_str, NoneType()])
    fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT)
    with state.strict_optional_set(self._api.options.strict_optional):
        args = self.get_field_arguments(
            fields, typed=True, requires_dynamic_aliases=False, use_alias=False, is_settings=is_settings
        )
    if not self.should_init_forbid_extra(fields, config):
        var = Var('kwargs')
        args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2))

    args = [fields_set_argument] + args

    add_method(
        self._api,
        self._cls,
        'model_construct',
        args=args,
        return_type=fill_typevars(self._cls.info),
        is_classmethod=True,
    )

adjust_decorator_signatures

adjust_decorator_signatures()

When we decorate a function f with pydantic.validator(...), pydantic.field_validator or pydantic.serializer(...), mypy sees f as a regular method taking a self instance, even though pydantic internally wraps f with classmethod if necessary.

Teach mypy this by marking any function whose outermost decorator is a validator(), field_validator() or serializer() call as a classmethod.

Source code in pydantic/mypy.py
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
def adjust_decorator_signatures(self) -> None:
    """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator`
    or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance,
    even though pydantic internally wraps `f` with `classmethod` if necessary.

    Teach mypy this by marking any function whose outermost decorator is a `validator()`,
    `field_validator()` or `serializer()` call as a `classmethod`.
    """
    for name, sym in self._cls.info.names.items():
        if isinstance(sym.node, Decorator):
            first_dec = sym.node.original_decorators[0]
            if (
                isinstance(first_dec, CallExpr)
                and isinstance(first_dec.callee, NameExpr)
                and first_dec.callee.fullname in DECORATOR_FULLNAMES
                # @model_validator(mode="after") is an exception, it expects a regular method
                and not (
                    first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME
                    and any(
                        first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after'
                        for i, arg in enumerate(first_dec.args)
                    )
                )
            ):
                # TODO: Only do this if the first argument of the decorated function is `cls`
                sym.node.func.is_class = True

collect_config

collect_config()

Collects the values of the config attributes that are used by the plugin, accounting for parent classes.

Source code in pydantic/mypy.py
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
def collect_config(self) -> ModelConfigData:  # noqa: C901 (ignore complexity)
    """Collects the values of the config attributes that are used by the plugin, accounting for parent classes."""
    cls = self._cls
    config = ModelConfigData()

    has_config_kwargs = False
    has_config_from_namespace = False

    # Handle `class MyModel(BaseModel, <name>=<expr>, ...):`
    for name, expr in cls.keywords.items():
        config_data = self.get_config_update(name, expr)
        if config_data:
            has_config_kwargs = True
            config.update(config_data)

    # Handle `model_config`
    stmt: Statement | None = None
    for stmt in cls.defs.body:
        if not isinstance(stmt, (AssignmentStmt, ClassDef)):
            continue

        if isinstance(stmt, AssignmentStmt):
            lhs = stmt.lvalues[0]
            if not isinstance(lhs, NameExpr) or lhs.name != 'model_config':
                continue

            if isinstance(stmt.rvalue, CallExpr):  # calls to `dict` or `ConfigDict`
                for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args):
                    if arg_name is None:
                        continue
                    config.update(self.get_config_update(arg_name, arg))
            elif isinstance(stmt.rvalue, DictExpr):  # dict literals
                for key_expr, value_expr in stmt.rvalue.items:
                    if not isinstance(key_expr, StrExpr):
                        continue
                    config.update(self.get_config_update(key_expr.value, value_expr))

        elif isinstance(stmt, ClassDef):
            if stmt.name != 'Config':  # 'deprecated' Config-class
                continue
            for substmt in stmt.defs.body:
                if not isinstance(substmt, AssignmentStmt):
                    continue
                lhs = substmt.lvalues[0]
                if not isinstance(lhs, NameExpr):
                    continue
                config.update(self.get_config_update(lhs.name, substmt.rvalue))

        if has_config_kwargs:
            self._api.fail(
                'Specifying config in two places is ambiguous, use either Config attribute or class kwargs',
                cls,
            )
            break

        has_config_from_namespace = True

    if has_config_kwargs or has_config_from_namespace:
        if (
            stmt
            and config.has_alias_generator
            and not config.populate_by_name
            and self.plugin_config.warn_required_dynamic_aliases
        ):
            error_required_dynamic_aliases(self._api, stmt)

    for info in cls.info.mro[1:]:  # 0 is the current class
        if METADATA_KEY not in info.metadata:
            continue

        # Each class depends on the set of fields in its ancestors
        self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname))
        for name, value in info.metadata[METADATA_KEY]['config'].items():
            config.setdefault(name, value)
    return config

collect_field_from_stmt

collect_field_from_stmt(stmt, model_config)

Get pydantic model field from statement.

Parameters:

Name Type Description Default
stmt AssignmentStmt

The statement.

required
model_config ModelConfigData

Configuration settings for the model.

required

Returns:

Type Description
PydanticModelField | None

A pydantic model field if it could find the field in statement. Otherwise, None.

Source code in pydantic/mypy.py
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
def collect_field_from_stmt(  # noqa C901
    self, stmt: AssignmentStmt, model_config: ModelConfigData
) -> PydanticModelField | None:
    """Get pydantic model field from statement.

    Args:
        stmt: The statement.
        model_config: Configuration settings for the model.

    Returns:
        A pydantic model field if it could find the field in statement. Otherwise, `None`.
    """
    cls = self._cls

    lhs = stmt.lvalues[0]
    if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config':
        return None

    if not stmt.new_syntax:
        if (
            isinstance(stmt.rvalue, CallExpr)
            and isinstance(stmt.rvalue.callee, CallExpr)
            and isinstance(stmt.rvalue.callee.callee, NameExpr)
            and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES
        ):
            # This is a (possibly-reused) validator or serializer, not a field
            # In particular, it looks something like: my_validator = validator('my_field')(f)
            # Eventually, we may want to attempt to respect model_config['ignored_types']
            return None

        # The assignment does not have an annotation, and it's not anything else we recognize
        error_untyped_fields(self._api, stmt)
        return None

    lhs = stmt.lvalues[0]
    if not isinstance(lhs, NameExpr):
        return None

    if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config':
        return None

    sym = cls.info.names.get(lhs.name)
    if sym is None:  # pragma: no cover
        # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation)
        # This is the same logic used in the dataclasses plugin
        return None

    node = sym.node
    if isinstance(node, PlaceholderNode):  # pragma: no cover
        # See the PlaceholderNode docstring for more detail about how this can occur
        # Basically, it is an edge case when dealing with complex import logic

        # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
        return None

    if isinstance(node, TypeAlias):
        self._api.fail(
            'Type aliases inside BaseModel definitions are not supported at runtime',
            node,
        )
        # Skip processing this node. This doesn't match the runtime behaviour,
        # but the only alternative would be to modify the SymbolTable,
        # and it's a little hairy to do that in a plugin.
        return None

    if not isinstance(node, Var):  # pragma: no cover
        # Don't know if this edge case still happens with the `is_valid_field` check above
        # but better safe than sorry

        # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
        return None

    # x: ClassVar[int] is not a field
    if node.is_classvar:
        return None

    # x: InitVar[int] is not supported in BaseModel
    node_type = get_proper_type(node.type)
    if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar':
        self._api.fail(
            'InitVar is not supported in BaseModel',
            node,
        )

    has_default = self.get_has_default(stmt)

    if sym.type is None and node.is_final and node.is_inferred:
        # This follows the logic from the dataclasses plugin. The following comment is taken verbatim:
        #
        # This is a special case, assignment like x: Final = 42 is classified
        # annotated above, but mypy strips the `Final` turning it into x = 42.
        # We do not support inferred types in dataclasses, so we can try inferring
        # type for simple literals, and otherwise require an explicit type
        # argument for Final[...].
        typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True)
        if typ:
            node.type = typ
        else:
            self._api.fail(
                'Need type argument for Final[...] with non-literal default in BaseModel',
                stmt,
            )
            node.type = AnyType(TypeOfAny.from_error)

    alias, has_dynamic_alias = self.get_alias_info(stmt)
    if has_dynamic_alias and not model_config.populate_by_name and self.plugin_config.warn_required_dynamic_aliases:
        error_required_dynamic_aliases(self._api, stmt)

    init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt)
    return PydanticModelField(
        name=lhs.name,
        has_dynamic_alias=has_dynamic_alias,
        has_default=has_default,
        alias=alias,
        line=stmt.line,
        column=stmt.column,
        type=init_type,
        info=cls.info,
    )

collect_fields

collect_fields(model_config)

Collects the fields for the model, accounting for parent classes.

Source code in pydantic/mypy.py
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
def collect_fields(self, model_config: ModelConfigData) -> list[PydanticModelField] | None:
    """Collects the fields for the model, accounting for parent classes."""
    cls = self._cls

    # First, collect fields belonging to any class in the MRO, ignoring duplicates.
    #
    # We iterate through the MRO in reverse because attrs defined in the parent must appear
    # earlier in the attributes list than attrs defined in the child. See:
    # https://docs.python.org/3/library/dataclasses.html#inheritance
    #
    # However, we also want fields defined in the subtype to override ones defined
    # in the parent. We can implement this via a dict without disrupting the attr order
    # because dicts preserve insertion order in Python 3.7+.
    found_fields: dict[str, PydanticModelField] = {}
    for info in reversed(cls.info.mro[1:-1]):  # 0 is the current class, -2 is BaseModel, -1 is object
        # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata:
        #     # We haven't processed the base class yet. Need another pass.
        #     return None
        if METADATA_KEY not in info.metadata:
            continue

        # Each class depends on the set of attributes in its dataclass ancestors.
        self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname))

        for name, data in info.metadata[METADATA_KEY]['fields'].items():
            field = PydanticModelField.deserialize(info, data, self._api)
            # (The following comment comes directly from the dataclasses plugin)
            # TODO: We shouldn't be performing type operations during the main
            #       semantic analysis pass, since some TypeInfo attributes might
            #       still be in flux. This should be performed in a later phase.
            with state.strict_optional_set(self._api.options.strict_optional):
                field.expand_typevar_from_subtype(cls.info)
            found_fields[name] = field

            sym_node = cls.info.names.get(name)
            if sym_node and sym_node.node and not isinstance(sym_node.node, Var):
                self._api.fail(
                    'BaseModel field may only be overridden by another field',
                    sym_node.node,
                )

    # Second, collect fields belonging to the current class.
    current_field_names: set[str] = set()
    for stmt in self._get_assignment_statements_from_block(cls.defs):
        maybe_field = self.collect_field_from_stmt(stmt, model_config)
        if maybe_field is not None:
            lhs = stmt.lvalues[0]
            current_field_names.add(lhs.name)
            found_fields[lhs.name] = maybe_field

    return list(found_fields.values())

get_alias_info staticmethod

get_alias_info(stmt)

Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in stmt.

has_dynamic_alias is True if and only if an alias is provided, but not as a string literal. If has_dynamic_alias is True, alias will be None.

Source code in pydantic/mypy.py
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
@staticmethod
def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]:
    """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.

    `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal.
    If `has_dynamic_alias` is True, `alias` will be None.
    """
    expr = stmt.rvalue
    if isinstance(expr, TempNode):
        # TempNode means annotation-only
        return None, False

    if not (
        isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
    ):
        # Assigned value is not a call to pydantic.fields.Field
        return None, False

    for i, arg_name in enumerate(expr.arg_names):
        if arg_name != 'alias':
            continue
        arg = expr.args[i]
        if isinstance(arg, StrExpr):
            return arg.value, False
        else:
            return None, True
    return None, False

get_config_update

get_config_update(name, arg)

Determines the config update due to a single kwarg in the ConfigDict definition.

Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)

Source code in pydantic/mypy.py
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
def get_config_update(self, name: str, arg: Expression) -> ModelConfigData | None:
    """Determines the config update due to a single kwarg in the ConfigDict definition.

    Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
    """
    if name not in self.tracked_config_fields:
        return None
    if name == 'extra':
        if isinstance(arg, StrExpr):
            forbid_extra = arg.value == 'forbid'
        elif isinstance(arg, MemberExpr):
            forbid_extra = arg.name == 'forbid'
        else:
            error_invalid_config_value(name, self._api, arg)
            return None
        return ModelConfigData(forbid_extra=forbid_extra)
    if name == 'alias_generator':
        has_alias_generator = True
        if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None':
            has_alias_generator = False
        return ModelConfigData(has_alias_generator=has_alias_generator)
    if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'):
        return ModelConfigData(**{name: arg.fullname == 'builtins.True'})
    error_invalid_config_value(name, self._api, arg)
    return None

get_field_arguments

get_field_arguments(
    fields,
    typed,
    use_alias,
    requires_dynamic_aliases,
    is_settings,
)

Helper function used during the construction of the __init__ and model_construct method signatures.

Returns a list of mypy Argument instances for use in the generated signatures.

Source code in pydantic/mypy.py
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
def get_field_arguments(
    self,
    fields: list[PydanticModelField],
    typed: bool,
    use_alias: bool,
    requires_dynamic_aliases: bool,
    is_settings: bool,
) -> list[Argument]:
    """Helper function used during the construction of the `__init__` and `model_construct` method signatures.

    Returns a list of mypy Argument instances for use in the generated signatures.
    """
    info = self._cls.info
    arguments = [
        field.to_argument(
            info, typed=typed, force_optional=requires_dynamic_aliases or is_settings, use_alias=use_alias
        )
        for field in fields
        if not (use_alias and field.has_dynamic_alias)
    ]
    return arguments

get_has_default staticmethod

get_has_default(stmt)

Returns a boolean indicating whether the field defined in stmt is a required field.

Source code in pydantic/mypy.py
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
@staticmethod
def get_has_default(stmt: AssignmentStmt) -> bool:
    """Returns a boolean indicating whether the field defined in `stmt` is a required field."""
    expr = stmt.rvalue
    if isinstance(expr, TempNode):
        # TempNode means annotation-only, so has no default
        return False
    if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME:
        # The "default value" is a call to `Field`; at this point, the field has a default if and only if:
        # * there is a positional argument that is not `...`
        # * there is a keyword argument named "default" that is not `...`
        # * there is a "default_factory" that is not `None`
        for arg, name in zip(expr.args, expr.arg_names):
            # If name is None, then this arg is the default because it is the only positional argument.
            if name is None or name == 'default':
                return arg.__class__ is not EllipsisExpr
            if name == 'default_factory':
                return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None')
        return False
    # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
    return not isinstance(expr, EllipsisExpr)

is_dynamic_alias_present staticmethod

is_dynamic_alias_present(fields, has_alias_generator)

Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be determined during static analysis.

Source code in pydantic/mypy.py
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
@staticmethod
def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool:
    """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be
    determined during static analysis.
    """
    for field in fields:
        if field.has_dynamic_alias:
            return True
    if has_alias_generator:
        for field in fields:
            if field.alias is None:
                return True
    return False

set_frozen

set_frozen(fields, frozen)

Marks all fields as properties so that attempts to set them trigger mypy errors.

This is the same approach used by the attrs and dataclasses plugins.

Source code in pydantic/mypy.py
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
def set_frozen(self, fields: list[PydanticModelField], frozen: bool) -> None:
    """Marks all fields as properties so that attempts to set them trigger mypy errors.

    This is the same approach used by the attrs and dataclasses plugins.
    """
    info = self._cls.info
    for field in fields:
        sym_node = info.names.get(field.name)
        if sym_node is not None:
            var = sym_node.node
            if isinstance(var, Var):
                var.is_property = frozen
            elif isinstance(var, PlaceholderNode) and not self._api.final_iteration:
                # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage
                self._api.defer()
            else:  # pragma: no cover
                # I don't know whether it's possible to hit this branch, but I've added it for safety
                try:
                    var_str = str(var)
                except TypeError:
                    # This happens for PlaceholderNode; perhaps it will happen for other types in the future..
                    var_str = repr(var)
                detail = f'sym_node.node: {var_str} (of type {var.__class__})'
                error_unexpected_behavior(detail, self._api, self._cls)
        else:
            var = field.to_var(info, use_alias=False)
            var.info = info
            var.is_property = frozen
            var._fullname = info.fullname + '.' + var.name
            info.names[var.name] = SymbolTableNode(MDEF, var)

should_init_forbid_extra

should_init_forbid_extra(fields, config)

Indicates whether the generated __init__ should get a **kwargs at the end of its signature.

We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, unless a required dynamic alias is present (since then we can't determine a valid signature).

Source code in pydantic/mypy.py
980
981
982
983
984
985
986
987
988
989
990
991
def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool:
    """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature.

    We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
    *unless* a required dynamic alias is present (since then we can't determine a valid signature).
    """
    if not config.populate_by_name:
        if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)):
            return False
    if config.forbid_extra:
        return True
    return self.plugin_config.init_forbid_extra

transform

transform()

Configures the BaseModel subclass according to the plugin settings.

In particular:

  • determines the model config and fields,
  • adds a fields-aware signature for the initializer and construct methods
  • freezes the class if frozen = True
  • stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
Source code in pydantic/mypy.py
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
def transform(self) -> bool:
    """Configures the BaseModel subclass according to the plugin settings.

    In particular:

    * determines the model config and fields,
    * adds a fields-aware signature for the initializer and construct methods
    * freezes the class if frozen = True
    * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
    """
    info = self._cls.info
    config = self.collect_config()
    fields = self.collect_fields(config)
    if fields is None:
        # Some definitions are not ready. We need another pass.
        return False
    for field in fields:
        if field.type is None:
            return False

    is_settings = any(base.fullname == BASESETTINGS_FULLNAME for base in info.mro[:-1])
    self.add_initializer(fields, config, is_settings)
    self.add_model_construct_method(fields, config, is_settings)
    self.set_frozen(fields, frozen=config.frozen is True)

    self.adjust_decorator_signatures()

    info.metadata[METADATA_KEY] = {
        'fields': {field.name: field.serialize() for field in fields},
        'config': config.get_values_dict(),
    }

    return True

PydanticPlugin

PydanticPlugin(options)

Bases: Plugin

The Pydantic mypy plugin.

Source code in pydantic/mypy.py
131
132
133
134
def __init__(self, options: Options) -> None:
    self.plugin_config = PydanticPluginConfig(options)
    self._plugin_data = self.plugin_config.to_data()
    super().__init__(options)

get_base_class_hook

get_base_class_hook(fullname)

Update Pydantic model class.

Source code in pydantic/mypy.py
136
137
138
139
140
141
142
143
def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], bool] | None:
    """Update Pydantic model class."""
    sym = self.lookup_fully_qualified(fullname)
    if sym and isinstance(sym.node, TypeInfo):  # pragma: no branch
        # No branching may occur if the mypy cache has not been cleared
        if any(base.fullname == BASEMODEL_FULLNAME for base in sym.node.mro):
            return self._pydantic_model_class_maker_callback
    return None

get_class_decorator_hook

get_class_decorator_hook(fullname)

Mark pydantic.dataclasses as dataclass.

Mypy version 1.1.1 added support for @dataclass_transform decorator.

Source code in pydantic/mypy.py
164
165
166
167
168
169
170
171
def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
    """Mark pydantic.dataclasses as dataclass.

    Mypy version 1.1.1 added support for `@dataclass_transform` decorator.
    """
    if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1):
        return dataclasses.dataclass_class_maker_callback  # type: ignore[return-value]
    return None

get_function_hook

get_function_hook(fullname)

Adjust the return type of the Field function.

Source code in pydantic/mypy.py
151
152
153
154
155
156
def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
    """Adjust the return type of the `Field` function."""
    sym = self.lookup_fully_qualified(fullname)
    if sym and sym.fullname == FIELD_FULLNAME:
        return self._pydantic_field_callback
    return None

get_metaclass_hook

get_metaclass_hook(fullname)

Update Pydantic ModelMetaclass definition.

Source code in pydantic/mypy.py
145
146
147
148
149
def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
    """Update Pydantic `ModelMetaclass` definition."""
    if fullname == MODEL_METACLASS_FULLNAME:
        return self._pydantic_model_metaclass_marker_callback
    return None

get_method_hook

get_method_hook(fullname)

Adjust return type of from_orm method call.

Source code in pydantic/mypy.py
158
159
160
161
162
def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
    """Adjust return type of `from_orm` method call."""
    if fullname.endswith('.from_orm'):
        return from_attributes_callback
    return None

report_config_data

report_config_data(ctx)

Return all plugin config data.

Used by mypy to determine if cache needs to be discarded.

Source code in pydantic/mypy.py
173
174
175
176
177
178
def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]:
    """Return all plugin config data.

    Used by mypy to determine if cache needs to be discarded.
    """
    return self._plugin_data

PydanticPluginConfig

PydanticPluginConfig(options)

A Pydantic mypy plugin config holder.

Attributes:

Name Type Description
init_forbid_extra bool

Whether to add a **kwargs at the end of the generated __init__ signature.

init_typed bool

Whether to annotate fields in the generated __init__.

warn_required_dynamic_aliases bool

Whether to raise required dynamic aliases error.

debug_dataclass_transform bool

Whether to not reset dataclass_transform_spec attribute of ModelMetaclass for testing purposes.

Source code in pydantic/mypy.py
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
def __init__(self, options: Options) -> None:
    if options.config_file is None:  # pragma: no cover
        return

    toml_config = parse_toml(options.config_file)
    if toml_config is not None:
        config = toml_config.get('tool', {}).get('pydantic-mypy', {})
        for key in self.__slots__:
            setting = config.get(key, False)
            if not isinstance(setting, bool):
                raise ValueError(f'Configuration value must be a boolean for key: {key}')
            setattr(self, key, setting)
    else:
        plugin_config = ConfigParser()
        plugin_config.read(options.config_file)
        for key in self.__slots__:
            setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False)
            setattr(self, key, setting)

to_data

to_data()

Returns a dict of config names to their values.

Source code in pydantic/mypy.py
287
288
289
def to_data(self) -> dict[str, Any]:
    """Returns a dict of config names to their values."""
    return {key: getattr(self, key) for key in self.__slots__}

add_method

add_method(
    api,
    cls,
    name,
    args,
    return_type,
    self_type=None,
    tvar_def=None,
    is_classmethod=False,
)

Very closely related to mypy.plugins.common.add_method_to_class, with a few pydantic-specific changes.

Source code in pydantic/mypy.py
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
def add_method(
    api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
    cls: ClassDef,
    name: str,
    args: list[Argument],
    return_type: Type,
    self_type: Type | None = None,
    tvar_def: TypeVarDef | None = None,
    is_classmethod: bool = False,
) -> None:
    """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes."""
    info = cls.info

    # First remove any previously generated methods with the same name
    # to avoid clashes and problems in the semantic analyzer.
    if name in info.names:
        sym = info.names[name]
        if sym.plugin_generated and isinstance(sym.node, FuncDef):
            cls.defs.body.remove(sym.node)  # pragma: no cover

    if isinstance(api, SemanticAnalyzerPluginInterface):
        function_type = api.named_type('builtins.function')
    else:
        function_type = api.named_generic_type('builtins.function', [])

    if is_classmethod:
        self_type = self_type or TypeType(fill_typevars(info))
        first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)]
    else:
        self_type = self_type or fill_typevars(info)
        first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)]
    args = first + args

    arg_types, arg_names, arg_kinds = [], [], []
    for arg in args:
        assert arg.type_annotation, 'All arguments must be fully typed.'
        arg_types.append(arg.type_annotation)
        arg_names.append(arg.variable.name)
        arg_kinds.append(arg.kind)

    signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type)
    if tvar_def:
        signature.variables = [tvar_def]

    func = FuncDef(name, args, Block([PassStmt()]))
    func.info = info
    func.type = set_callable_name(signature, func)
    func.is_class = is_classmethod
    func._fullname = info.fullname + '.' + name
    func.line = info.line

    # NOTE: we would like the plugin generated node to dominate, but we still
    # need to keep any existing definitions so they get semantically analyzed.
    if name in info.names:
        # Get a nice unique name instead.
        r_name = get_unique_redefinition_name(name, info.names)
        info.names[r_name] = info.names[name]

    # Add decorator for is_classmethod
    # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a
    # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel.
    if is_classmethod:
        func.is_decorated = True
        v = Var(name, func.type)
        v.info = info
        v._fullname = func._fullname
        v.is_classmethod = True
        dec = Decorator(func, [NameExpr('classmethod')], v)
        dec.line = info.line
        sym = SymbolTableNode(MDEF, dec)
    else:
        sym = SymbolTableNode(MDEF, func)
    sym.plugin_generated = True
    info.names[name] = sym

    info.defn.defs.body.append(func)

error_default_and_default_factory_specified

error_default_and_default_factory_specified(api, context)

Emits an error when Field has both default and default_factory together.

Source code in pydantic/mypy.py
1087
1088
1089
def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None:
    """Emits an error when `Field` has both `default` and `default_factory` together."""
    api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS)

error_from_attributes

error_from_attributes(model_name, api, context)

Emits an error when the model does not have from_attributes=True.

Source code in pydantic/mypy.py
1053
1054
1055
def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None:
    """Emits an error when the model does not have `from_attributes=True`."""
    api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM)

error_invalid_config_value

error_invalid_config_value(name, api, context)

Emits an error when the config value is invalid.

Source code in pydantic/mypy.py
1058
1059
1060
def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None:
    """Emits an error when the config value is invalid."""
    api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG)

error_required_dynamic_aliases

error_required_dynamic_aliases(api, context)

Emits required dynamic aliases error.

This will be called when warn_required_dynamic_aliases=True.

Source code in pydantic/mypy.py
1063
1064
1065
1066
1067
1068
def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
    """Emits required dynamic aliases error.

    This will be called when `warn_required_dynamic_aliases=True`.
    """
    api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS)

error_unexpected_behavior

error_unexpected_behavior(detail, api, context)

Emits unexpected behavior error.

Source code in pydantic/mypy.py
1071
1072
1073
1074
1075
1076
1077
1078
1079
def error_unexpected_behavior(
    detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context
) -> None:  # pragma: no cover
    """Emits unexpected behavior error."""
    # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path
    link = 'https://github.com/pydantic/pydantic/issues/new/choose'
    full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n'
    full_message += f'Please consider reporting this bug at {link} so we can try to fix it!'
    api.fail(full_message, context, code=ERROR_UNEXPECTED)

error_untyped_fields

error_untyped_fields(api, context)

Emits an error when there is an untyped field in the model.

Source code in pydantic/mypy.py
1082
1083
1084
def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
    """Emits an error when there is an untyped field in the model."""
    api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED)

from_attributes_callback

from_attributes_callback(ctx)

Raise an error if from_attributes is not enabled.

Source code in pydantic/mypy.py
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
def from_attributes_callback(ctx: MethodContext) -> Type:
    """Raise an error if from_attributes is not enabled."""
    model_type: Instance
    ctx_type = ctx.type
    if isinstance(ctx_type, TypeType):
        ctx_type = ctx_type.item
    if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance):
        model_type = ctx_type.ret_type  # called on the class
    elif isinstance(ctx_type, Instance):
        model_type = ctx_type  # called on an instance (unusual, but still valid)
    else:  # pragma: no cover
        detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})'
        error_unexpected_behavior(detail, ctx.api, ctx.context)
        return ctx.default_return_type
    pydantic_metadata = model_type.type.metadata.get(METADATA_KEY)
    if pydantic_metadata is None:
        return ctx.default_return_type
    from_attributes = pydantic_metadata.get('config', {}).get('from_attributes')
    if from_attributes is not True:
        error_from_attributes(model_type.type.name, ctx.api, ctx.context)
    return ctx.default_return_type

parse_toml

parse_toml(config_file)

Returns a dict of config keys to values.

It reads configs from toml file and returns None if the file is not a toml file.

Source code in pydantic/mypy.py
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
def parse_toml(config_file: str) -> dict[str, Any] | None:
    """Returns a dict of config keys to values.

    It reads configs from toml file and returns `None` if the file is not a toml file.
    """
    if not config_file.endswith('.toml'):
        return None

    if sys.version_info >= (3, 11):
        import tomllib as toml_
    else:
        try:
            import tomli as toml_
        except ImportError:  # pragma: no cover
            import warnings

            warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
            return None

    with open(config_file, 'rb') as rf:
        return toml_.load(rf)

plugin

plugin(version)

version is the mypy version string.

We might want to use this to print a warning if the mypy version being used is newer, or especially older, than we expect (or need).

Parameters:

Name Type Description Default
version str

The mypy version string.

required
Return

The Pydantic mypy plugin type.

Source code in pydantic/mypy.py
113
114
115
116
117
118
119
120
121
122
123
124
125
def plugin(version: str) -> type[Plugin]:
    """`version` is the mypy version string.

    We might want to use this to print a warning if the mypy version being used is
    newer, or especially older, than we expect (or need).

    Args:
        version: The mypy version string.

    Return:
        The Pydantic mypy plugin type.
    """
    return PydanticPlugin