Não pode escolher mais do que 25 tópicos Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres.
 
 
 
 

1774 linhas
79 KiB

  1. """Logic for creating models."""
  2. # Because `dict` is in the local namespace of the `BaseModel` class, we use `Dict` for annotations.
  3. # TODO v3 fallback to `dict` when the deprecated `dict` method gets removed.
  4. # ruff: noqa: UP035
  5. from __future__ import annotations as _annotations
  6. import operator
  7. import sys
  8. import types
  9. import typing
  10. import warnings
  11. from collections.abc import Generator, Mapping
  12. from copy import copy, deepcopy
  13. from functools import cached_property
  14. from typing import (
  15. TYPE_CHECKING,
  16. Any,
  17. Callable,
  18. ClassVar,
  19. Dict,
  20. Literal,
  21. TypeVar,
  22. Union,
  23. cast,
  24. overload,
  25. )
  26. import pydantic_core
  27. import typing_extensions
  28. from pydantic_core import PydanticUndefined, ValidationError
  29. from typing_extensions import Self, TypeAlias, Unpack
  30. from . import PydanticDeprecatedSince20, PydanticDeprecatedSince211
  31. from ._internal import (
  32. _config,
  33. _decorators,
  34. _fields,
  35. _forward_ref,
  36. _generics,
  37. _mock_val_ser,
  38. _model_construction,
  39. _namespace_utils,
  40. _repr,
  41. _typing_extra,
  42. _utils,
  43. )
  44. from ._migration import getattr_migration
  45. from .aliases import AliasChoices, AliasPath
  46. from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
  47. from .config import ConfigDict
  48. from .errors import PydanticUndefinedAnnotation, PydanticUserError
  49. from .json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema, JsonSchemaMode, JsonSchemaValue, model_json_schema
  50. from .plugin._schema_validator import PluggableSchemaValidator
  51. if TYPE_CHECKING:
  52. from inspect import Signature
  53. from pathlib import Path
  54. from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator
  55. from ._internal._namespace_utils import MappingNamespace
  56. from ._internal._utils import AbstractSetIntStr, MappingIntStrAny
  57. from .deprecated.parse import Protocol as DeprecatedParseProtocol
  58. from .fields import ComputedFieldInfo, FieldInfo, ModelPrivateAttr
  59. else:
  60. # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
  61. # and https://youtrack.jetbrains.com/issue/PY-51428
  62. DeprecationWarning = PydanticDeprecatedSince20
  63. __all__ = 'BaseModel', 'create_model'
  64. # Keep these type aliases available at runtime:
  65. TupleGenerator: TypeAlias = Generator[tuple[str, Any], None, None]
  66. # NOTE: In reality, `bool` should be replaced by `Literal[True]` but mypy fails to correctly apply bidirectional
  67. # type inference (e.g. when using `{'a': {'b': True}}`):
  68. # NOTE: Keep this type alias in sync with the stub definition in `pydantic-core`:
  69. IncEx: TypeAlias = Union[set[int], set[str], Mapping[int, Union['IncEx', bool]], Mapping[str, Union['IncEx', bool]]]
  70. _object_setattr = _model_construction.object_setattr
  71. def _check_frozen(model_cls: type[BaseModel], name: str, value: Any) -> None:
  72. if model_cls.model_config.get('frozen'):
  73. error_type = 'frozen_instance'
  74. elif getattr(model_cls.__pydantic_fields__.get(name), 'frozen', False):
  75. error_type = 'frozen_field'
  76. else:
  77. return
  78. raise ValidationError.from_exception_data(
  79. model_cls.__name__, [{'type': error_type, 'loc': (name,), 'input': value}]
  80. )
  81. def _model_field_setattr_handler(model: BaseModel, name: str, val: Any) -> None:
  82. model.__dict__[name] = val
  83. model.__pydantic_fields_set__.add(name)
  84. def _private_setattr_handler(model: BaseModel, name: str, val: Any) -> None:
  85. if getattr(model, '__pydantic_private__', None) is None:
  86. # While the attribute should be present at this point, this may not be the case if
  87. # users do unusual stuff with `model_post_init()` (which is where the `__pydantic_private__`
  88. # is initialized, by wrapping the user-defined `model_post_init()`), e.g. if they mock
  89. # the `model_post_init()` call. Ideally we should find a better way to init private attrs.
  90. object.__setattr__(model, '__pydantic_private__', {})
  91. model.__pydantic_private__[name] = val # pyright: ignore[reportOptionalSubscript]
  92. _SIMPLE_SETATTR_HANDLERS: Mapping[str, Callable[[BaseModel, str, Any], None]] = {
  93. 'model_field': _model_field_setattr_handler,
  94. 'validate_assignment': lambda model, name, val: model.__pydantic_validator__.validate_assignment(model, name, val), # pyright: ignore[reportAssignmentType]
  95. 'private': _private_setattr_handler,
  96. 'cached_property': lambda model, name, val: model.__dict__.__setitem__(name, val),
  97. 'extra_known': lambda model, name, val: _object_setattr(model, name, val),
  98. }
  99. class BaseModel(metaclass=_model_construction.ModelMetaclass):
  100. """!!! abstract "Usage Documentation"
  101. [Models](../concepts/models.md)
  102. A base class for creating Pydantic models.
  103. Attributes:
  104. __class_vars__: The names of the class variables defined on the model.
  105. __private_attributes__: Metadata about the private attributes of the model.
  106. __signature__: The synthesized `__init__` [`Signature`][inspect.Signature] of the model.
  107. __pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
  108. __pydantic_core_schema__: The core schema of the model.
  109. __pydantic_custom_init__: Whether the model has a custom `__init__` function.
  110. __pydantic_decorators__: Metadata containing the decorators defined on the model.
  111. This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
  112. __pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
  113. __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
  114. __pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
  115. __pydantic_post_init__: The name of the post-init method for the model, if defined.
  116. __pydantic_root_model__: Whether the model is a [`RootModel`][pydantic.root_model.RootModel].
  117. __pydantic_serializer__: The `pydantic-core` `SchemaSerializer` used to dump instances of the model.
  118. __pydantic_validator__: The `pydantic-core` `SchemaValidator` used to validate instances of the model.
  119. __pydantic_fields__: A dictionary of field names and their corresponding [`FieldInfo`][pydantic.fields.FieldInfo] objects.
  120. __pydantic_computed_fields__: A dictionary of computed field names and their corresponding [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects.
  121. __pydantic_extra__: A dictionary containing extra values, if [`extra`][pydantic.config.ConfigDict.extra]
  122. is set to `'allow'`.
  123. __pydantic_fields_set__: The names of fields explicitly set during instantiation.
  124. __pydantic_private__: Values of private attributes set on the model instance.
  125. """
  126. # Note: Many of the below class vars are defined in the metaclass, but we define them here for type checking purposes.
  127. model_config: ClassVar[ConfigDict] = ConfigDict()
  128. """
  129. Configuration for the model, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict].
  130. """
  131. __class_vars__: ClassVar[set[str]]
  132. """The names of the class variables defined on the model."""
  133. __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] # noqa: UP006
  134. """Metadata about the private attributes of the model."""
  135. __signature__: ClassVar[Signature]
  136. """The synthesized `__init__` [`Signature`][inspect.Signature] of the model."""
  137. __pydantic_complete__: ClassVar[bool] = False
  138. """Whether model building is completed, or if there are still undefined fields."""
  139. __pydantic_core_schema__: ClassVar[CoreSchema]
  140. """The core schema of the model."""
  141. __pydantic_custom_init__: ClassVar[bool]
  142. """Whether the model has a custom `__init__` method."""
  143. # Must be set for `GenerateSchema.model_schema` to work for a plain `BaseModel` annotation.
  144. __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] = _decorators.DecoratorInfos()
  145. """Metadata containing the decorators defined on the model.
  146. This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1."""
  147. __pydantic_generic_metadata__: ClassVar[_generics.PydanticGenericMetadata]
  148. """Metadata for generic models; contains data used for a similar purpose to
  149. __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these."""
  150. __pydantic_parent_namespace__: ClassVar[Dict[str, Any] | None] = None # noqa: UP006
  151. """Parent namespace of the model, used for automatic rebuilding of models."""
  152. __pydantic_post_init__: ClassVar[None | Literal['model_post_init']]
  153. """The name of the post-init method for the model, if defined."""
  154. __pydantic_root_model__: ClassVar[bool] = False
  155. """Whether the model is a [`RootModel`][pydantic.root_model.RootModel]."""
  156. __pydantic_serializer__: ClassVar[SchemaSerializer]
  157. """The `pydantic-core` `SchemaSerializer` used to dump instances of the model."""
  158. __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator]
  159. """The `pydantic-core` `SchemaValidator` used to validate instances of the model."""
  160. __pydantic_fields__: ClassVar[Dict[str, FieldInfo]] # noqa: UP006
  161. """A dictionary of field names and their corresponding [`FieldInfo`][pydantic.fields.FieldInfo] objects.
  162. This replaces `Model.__fields__` from Pydantic V1.
  163. """
  164. __pydantic_setattr_handlers__: ClassVar[Dict[str, Callable[[BaseModel, str, Any], None]]] # noqa: UP006
  165. """`__setattr__` handlers. Memoizing the handlers leads to a dramatic performance improvement in `__setattr__`"""
  166. __pydantic_computed_fields__: ClassVar[Dict[str, ComputedFieldInfo]] # noqa: UP006
  167. """A dictionary of computed field names and their corresponding [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects."""
  168. __pydantic_extra__: dict[str, Any] | None = _model_construction.NoInitField(init=False)
  169. """A dictionary containing extra values, if [`extra`][pydantic.config.ConfigDict.extra] is set to `'allow'`."""
  170. __pydantic_fields_set__: set[str] = _model_construction.NoInitField(init=False)
  171. """The names of fields explicitly set during instantiation."""
  172. __pydantic_private__: dict[str, Any] | None = _model_construction.NoInitField(init=False)
  173. """Values of private attributes set on the model instance."""
  174. if not TYPE_CHECKING:
  175. # Prevent `BaseModel` from being instantiated directly
  176. # (defined in an `if not TYPE_CHECKING` block for clarity and to avoid type checking errors):
  177. __pydantic_core_schema__ = _mock_val_ser.MockCoreSchema(
  178. 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
  179. code='base-model-instantiated',
  180. )
  181. __pydantic_validator__ = _mock_val_ser.MockValSer(
  182. 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
  183. val_or_ser='validator',
  184. code='base-model-instantiated',
  185. )
  186. __pydantic_serializer__ = _mock_val_ser.MockValSer(
  187. 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
  188. val_or_ser='serializer',
  189. code='base-model-instantiated',
  190. )
  191. __slots__ = '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__'
  192. def __init__(self, /, **data: Any) -> None:
  193. """Create a new model by parsing and validating input data from keyword arguments.
  194. Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be
  195. validated to form a valid model.
  196. `self` is explicitly positional-only to allow `self` as a field name.
  197. """
  198. # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
  199. __tracebackhide__ = True
  200. validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self)
  201. if self is not validated_self:
  202. warnings.warn(
  203. 'A custom validator is returning a value other than `self`.\n'
  204. "Returning anything other than `self` from a top level model validator isn't supported when validating via `__init__`.\n"
  205. 'See the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.',
  206. stacklevel=2,
  207. )
  208. # The following line sets a flag that we use to determine when `__init__` gets overridden by the user
  209. __init__.__pydantic_base_init__ = True # pyright: ignore[reportFunctionMemberAccess]
  210. @_utils.deprecated_instance_property
  211. @classmethod
  212. def model_fields(cls) -> dict[str, FieldInfo]:
  213. """A mapping of field names to their respective [`FieldInfo`][pydantic.fields.FieldInfo] instances.
  214. !!! warning
  215. Accessing this attribute from a model instance is deprecated, and will not work in Pydantic V3.
  216. Instead, you should access this attribute from the model class.
  217. """
  218. return getattr(cls, '__pydantic_fields__', {})
  219. @_utils.deprecated_instance_property
  220. @classmethod
  221. def model_computed_fields(cls) -> dict[str, ComputedFieldInfo]:
  222. """A mapping of computed field names to their respective [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] instances.
  223. !!! warning
  224. Accessing this attribute from a model instance is deprecated, and will not work in Pydantic V3.
  225. Instead, you should access this attribute from the model class.
  226. """
  227. return getattr(cls, '__pydantic_computed_fields__', {})
  228. @property
  229. def model_extra(self) -> dict[str, Any] | None:
  230. """Get extra fields set during validation.
  231. Returns:
  232. A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
  233. """
  234. return self.__pydantic_extra__
  235. @property
  236. def model_fields_set(self) -> set[str]:
  237. """Returns the set of fields that have been explicitly set on this model instance.
  238. Returns:
  239. A set of strings representing the fields that have been set,
  240. i.e. that were not filled from defaults.
  241. """
  242. return self.__pydantic_fields_set__
  243. @classmethod
  244. def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: # noqa: C901
  245. """Creates a new instance of the `Model` class with validated data.
  246. Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data.
  247. Default values are respected, but no other validation is performed.
  248. !!! note
  249. `model_construct()` generally respects the `model_config.extra` setting on the provided model.
  250. That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__`
  251. and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored.
  252. Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in
  253. an error if extra values are passed, but they will be ignored.
  254. Args:
  255. _fields_set: A set of field names that were originally explicitly set during instantiation. If provided,
  256. this is directly used for the [`model_fields_set`][pydantic.BaseModel.model_fields_set] attribute.
  257. Otherwise, the field names from the `values` argument will be used.
  258. values: Trusted or pre-validated data dictionary.
  259. Returns:
  260. A new instance of the `Model` class with validated data.
  261. """
  262. m = cls.__new__(cls)
  263. fields_values: dict[str, Any] = {}
  264. fields_set = set()
  265. for name, field in cls.__pydantic_fields__.items():
  266. if field.alias is not None and field.alias in values:
  267. fields_values[name] = values.pop(field.alias)
  268. fields_set.add(name)
  269. if (name not in fields_set) and (field.validation_alias is not None):
  270. validation_aliases: list[str | AliasPath] = (
  271. field.validation_alias.choices
  272. if isinstance(field.validation_alias, AliasChoices)
  273. else [field.validation_alias]
  274. )
  275. for alias in validation_aliases:
  276. if isinstance(alias, str) and alias in values:
  277. fields_values[name] = values.pop(alias)
  278. fields_set.add(name)
  279. break
  280. elif isinstance(alias, AliasPath):
  281. value = alias.search_dict_for_path(values)
  282. if value is not PydanticUndefined:
  283. fields_values[name] = value
  284. fields_set.add(name)
  285. break
  286. if name not in fields_set:
  287. if name in values:
  288. fields_values[name] = values.pop(name)
  289. fields_set.add(name)
  290. elif not field.is_required():
  291. fields_values[name] = field.get_default(call_default_factory=True, validated_data=fields_values)
  292. if _fields_set is None:
  293. _fields_set = fields_set
  294. _extra: dict[str, Any] | None = values if cls.model_config.get('extra') == 'allow' else None
  295. _object_setattr(m, '__dict__', fields_values)
  296. _object_setattr(m, '__pydantic_fields_set__', _fields_set)
  297. if not cls.__pydantic_root_model__:
  298. _object_setattr(m, '__pydantic_extra__', _extra)
  299. if cls.__pydantic_post_init__:
  300. m.model_post_init(None)
  301. # update private attributes with values set
  302. if hasattr(m, '__pydantic_private__') and m.__pydantic_private__ is not None:
  303. for k, v in values.items():
  304. if k in m.__private_attributes__:
  305. m.__pydantic_private__[k] = v
  306. elif not cls.__pydantic_root_model__:
  307. # Note: if there are any private attributes, cls.__pydantic_post_init__ would exist
  308. # Since it doesn't, that means that `__pydantic_private__` should be set to None
  309. _object_setattr(m, '__pydantic_private__', None)
  310. return m
  311. def model_copy(self, *, update: Mapping[str, Any] | None = None, deep: bool = False) -> Self:
  312. """!!! abstract "Usage Documentation"
  313. [`model_copy`](../concepts/serialization.md#model_copy)
  314. Returns a copy of the model.
  315. !!! note
  316. The underlying instance's [`__dict__`][object.__dict__] attribute is copied. This
  317. might have unexpected side effects if you store anything in it, on top of the model
  318. fields (e.g. the value of [cached properties][functools.cached_property]).
  319. Args:
  320. update: Values to change/add in the new model. Note: the data is not validated
  321. before creating the new model. You should trust this data.
  322. deep: Set to `True` to make a deep copy of the model.
  323. Returns:
  324. New model instance.
  325. """
  326. copied = self.__deepcopy__() if deep else self.__copy__()
  327. if update:
  328. if self.model_config.get('extra') == 'allow':
  329. for k, v in update.items():
  330. if k in self.__pydantic_fields__:
  331. copied.__dict__[k] = v
  332. else:
  333. if copied.__pydantic_extra__ is None:
  334. copied.__pydantic_extra__ = {}
  335. copied.__pydantic_extra__[k] = v
  336. else:
  337. copied.__dict__.update(update)
  338. copied.__pydantic_fields_set__.update(update.keys())
  339. return copied
  340. def model_dump(
  341. self,
  342. *,
  343. mode: Literal['json', 'python'] | str = 'python',
  344. include: IncEx | None = None,
  345. exclude: IncEx | None = None,
  346. context: Any | None = None,
  347. by_alias: bool | None = None,
  348. exclude_unset: bool = False,
  349. exclude_defaults: bool = False,
  350. exclude_none: bool = False,
  351. round_trip: bool = False,
  352. warnings: bool | Literal['none', 'warn', 'error'] = True,
  353. fallback: Callable[[Any], Any] | None = None,
  354. serialize_as_any: bool = False,
  355. ) -> dict[str, Any]:
  356. """!!! abstract "Usage Documentation"
  357. [`model_dump`](../concepts/serialization.md#modelmodel_dump)
  358. Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
  359. Args:
  360. mode: The mode in which `to_python` should run.
  361. If mode is 'json', the output will only contain JSON serializable types.
  362. If mode is 'python', the output may contain non-JSON-serializable Python objects.
  363. include: A set of fields to include in the output.
  364. exclude: A set of fields to exclude from the output.
  365. context: Additional context to pass to the serializer.
  366. by_alias: Whether to use the field's alias in the dictionary key if defined.
  367. exclude_unset: Whether to exclude fields that have not been explicitly set.
  368. exclude_defaults: Whether to exclude fields that are set to their default value.
  369. exclude_none: Whether to exclude fields that have a value of `None`.
  370. round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
  371. warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
  372. "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
  373. fallback: A function to call when an unknown value is encountered. If not provided,
  374. a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
  375. serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
  376. Returns:
  377. A dictionary representation of the model.
  378. """
  379. return self.__pydantic_serializer__.to_python(
  380. self,
  381. mode=mode,
  382. by_alias=by_alias,
  383. include=include,
  384. exclude=exclude,
  385. context=context,
  386. exclude_unset=exclude_unset,
  387. exclude_defaults=exclude_defaults,
  388. exclude_none=exclude_none,
  389. round_trip=round_trip,
  390. warnings=warnings,
  391. fallback=fallback,
  392. serialize_as_any=serialize_as_any,
  393. )
  394. def model_dump_json(
  395. self,
  396. *,
  397. indent: int | None = None,
  398. include: IncEx | None = None,
  399. exclude: IncEx | None = None,
  400. context: Any | None = None,
  401. by_alias: bool | None = None,
  402. exclude_unset: bool = False,
  403. exclude_defaults: bool = False,
  404. exclude_none: bool = False,
  405. round_trip: bool = False,
  406. warnings: bool | Literal['none', 'warn', 'error'] = True,
  407. fallback: Callable[[Any], Any] | None = None,
  408. serialize_as_any: bool = False,
  409. ) -> str:
  410. """!!! abstract "Usage Documentation"
  411. [`model_dump_json`](../concepts/serialization.md#modelmodel_dump_json)
  412. Generates a JSON representation of the model using Pydantic's `to_json` method.
  413. Args:
  414. indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
  415. include: Field(s) to include in the JSON output.
  416. exclude: Field(s) to exclude from the JSON output.
  417. context: Additional context to pass to the serializer.
  418. by_alias: Whether to serialize using field aliases.
  419. exclude_unset: Whether to exclude fields that have not been explicitly set.
  420. exclude_defaults: Whether to exclude fields that are set to their default value.
  421. exclude_none: Whether to exclude fields that have a value of `None`.
  422. round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
  423. warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
  424. "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
  425. fallback: A function to call when an unknown value is encountered. If not provided,
  426. a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
  427. serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
  428. Returns:
  429. A JSON string representation of the model.
  430. """
  431. return self.__pydantic_serializer__.to_json(
  432. self,
  433. indent=indent,
  434. include=include,
  435. exclude=exclude,
  436. context=context,
  437. by_alias=by_alias,
  438. exclude_unset=exclude_unset,
  439. exclude_defaults=exclude_defaults,
  440. exclude_none=exclude_none,
  441. round_trip=round_trip,
  442. warnings=warnings,
  443. fallback=fallback,
  444. serialize_as_any=serialize_as_any,
  445. ).decode()
  446. @classmethod
  447. def model_json_schema(
  448. cls,
  449. by_alias: bool = True,
  450. ref_template: str = DEFAULT_REF_TEMPLATE,
  451. schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
  452. mode: JsonSchemaMode = 'validation',
  453. ) -> dict[str, Any]:
  454. """Generates a JSON schema for a model class.
  455. Args:
  456. by_alias: Whether to use attribute aliases or not.
  457. ref_template: The reference template.
  458. schema_generator: To override the logic used to generate the JSON schema, as a subclass of
  459. `GenerateJsonSchema` with your desired modifications
  460. mode: The mode in which to generate the schema.
  461. Returns:
  462. The JSON schema for the given model class.
  463. """
  464. return model_json_schema(
  465. cls, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator, mode=mode
  466. )
  467. @classmethod
  468. def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
  469. """Compute the class name for parametrizations of generic classes.
  470. This method can be overridden to achieve a custom naming scheme for generic BaseModels.
  471. Args:
  472. params: Tuple of types of the class. Given a generic class
  473. `Model` with 2 type variables and a concrete model `Model[str, int]`,
  474. the value `(str, int)` would be passed to `params`.
  475. Returns:
  476. String representing the new class where `params` are passed to `cls` as type variables.
  477. Raises:
  478. TypeError: Raised when trying to generate concrete names for non-generic models.
  479. """
  480. if not issubclass(cls, typing.Generic):
  481. raise TypeError('Concrete names should only be generated for generic models.')
  482. # Any strings received should represent forward references, so we handle them specially below.
  483. # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future,
  484. # we may be able to remove this special case.
  485. param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params]
  486. params_component = ', '.join(param_names)
  487. return f'{cls.__name__}[{params_component}]'
  488. def model_post_init(self, context: Any, /) -> None:
  489. """Override this method to perform additional initialization after `__init__` and `model_construct`.
  490. This is useful if you want to do some validation that requires the entire model to be initialized.
  491. """
  492. pass
  493. @classmethod
  494. def model_rebuild(
  495. cls,
  496. *,
  497. force: bool = False,
  498. raise_errors: bool = True,
  499. _parent_namespace_depth: int = 2,
  500. _types_namespace: MappingNamespace | None = None,
  501. ) -> bool | None:
  502. """Try to rebuild the pydantic-core schema for the model.
  503. This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
  504. the initial attempt to build the schema, and automatic rebuilding fails.
  505. Args:
  506. force: Whether to force the rebuilding of the model schema, defaults to `False`.
  507. raise_errors: Whether to raise errors, defaults to `True`.
  508. _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
  509. _types_namespace: The types namespace, defaults to `None`.
  510. Returns:
  511. Returns `None` if the schema is already "complete" and rebuilding was not required.
  512. If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
  513. """
  514. if not force and cls.__pydantic_complete__:
  515. return None
  516. for attr in ('__pydantic_core_schema__', '__pydantic_validator__', '__pydantic_serializer__'):
  517. if attr in cls.__dict__ and not isinstance(getattr(cls, attr), _mock_val_ser.MockValSer):
  518. # Deleting the validator/serializer is necessary as otherwise they can get reused in
  519. # pydantic-core. We do so only if they aren't mock instances, otherwise — as `model_rebuild()`
  520. # isn't thread-safe — concurrent model instantiations can lead to the parent validator being used.
  521. # Same applies for the core schema that can be reused in schema generation.
  522. delattr(cls, attr)
  523. cls.__pydantic_complete__ = False
  524. if _types_namespace is not None:
  525. rebuild_ns = _types_namespace
  526. elif _parent_namespace_depth > 0:
  527. rebuild_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth, force=True) or {}
  528. else:
  529. rebuild_ns = {}
  530. parent_ns = _model_construction.unpack_lenient_weakvaluedict(cls.__pydantic_parent_namespace__) or {}
  531. ns_resolver = _namespace_utils.NsResolver(
  532. parent_namespace={**rebuild_ns, **parent_ns},
  533. )
  534. if not cls.__pydantic_fields_complete__:
  535. typevars_map = _generics.get_model_typevars_map(cls)
  536. try:
  537. cls.__pydantic_fields__ = _fields.rebuild_model_fields(
  538. cls,
  539. ns_resolver=ns_resolver,
  540. typevars_map=typevars_map,
  541. )
  542. except NameError as e:
  543. exc = PydanticUndefinedAnnotation.from_name_error(e)
  544. _mock_val_ser.set_model_mocks(cls, f'`{exc.name}`')
  545. if raise_errors:
  546. raise exc from e
  547. if not raise_errors and not cls.__pydantic_fields_complete__:
  548. # No need to continue with schema gen, it is guaranteed to fail
  549. return False
  550. assert cls.__pydantic_fields_complete__
  551. return _model_construction.complete_model_class(
  552. cls,
  553. _config.ConfigWrapper(cls.model_config, check=False),
  554. raise_errors=raise_errors,
  555. ns_resolver=ns_resolver,
  556. )
  557. @classmethod
  558. def model_validate(
  559. cls,
  560. obj: Any,
  561. *,
  562. strict: bool | None = None,
  563. from_attributes: bool | None = None,
  564. context: Any | None = None,
  565. by_alias: bool | None = None,
  566. by_name: bool | None = None,
  567. ) -> Self:
  568. """Validate a pydantic model instance.
  569. Args:
  570. obj: The object to validate.
  571. strict: Whether to enforce types strictly.
  572. from_attributes: Whether to extract data from object attributes.
  573. context: Additional context to pass to the validator.
  574. by_alias: Whether to use the field's alias when validating against the provided input data.
  575. by_name: Whether to use the field's name when validating against the provided input data.
  576. Raises:
  577. ValidationError: If the object could not be validated.
  578. Returns:
  579. The validated model instance.
  580. """
  581. # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
  582. __tracebackhide__ = True
  583. if by_alias is False and by_name is not True:
  584. raise PydanticUserError(
  585. 'At least one of `by_alias` or `by_name` must be set to True.',
  586. code='validate-by-alias-and-name-false',
  587. )
  588. return cls.__pydantic_validator__.validate_python(
  589. obj, strict=strict, from_attributes=from_attributes, context=context, by_alias=by_alias, by_name=by_name
  590. )
  591. @classmethod
  592. def model_validate_json(
  593. cls,
  594. json_data: str | bytes | bytearray,
  595. *,
  596. strict: bool | None = None,
  597. context: Any | None = None,
  598. by_alias: bool | None = None,
  599. by_name: bool | None = None,
  600. ) -> Self:
  601. """!!! abstract "Usage Documentation"
  602. [JSON Parsing](../concepts/json.md#json-parsing)
  603. Validate the given JSON data against the Pydantic model.
  604. Args:
  605. json_data: The JSON data to validate.
  606. strict: Whether to enforce types strictly.
  607. context: Extra variables to pass to the validator.
  608. by_alias: Whether to use the field's alias when validating against the provided input data.
  609. by_name: Whether to use the field's name when validating against the provided input data.
  610. Returns:
  611. The validated Pydantic model.
  612. Raises:
  613. ValidationError: If `json_data` is not a JSON string or the object could not be validated.
  614. """
  615. # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
  616. __tracebackhide__ = True
  617. if by_alias is False and by_name is not True:
  618. raise PydanticUserError(
  619. 'At least one of `by_alias` or `by_name` must be set to True.',
  620. code='validate-by-alias-and-name-false',
  621. )
  622. return cls.__pydantic_validator__.validate_json(
  623. json_data, strict=strict, context=context, by_alias=by_alias, by_name=by_name
  624. )
  625. @classmethod
  626. def model_validate_strings(
  627. cls,
  628. obj: Any,
  629. *,
  630. strict: bool | None = None,
  631. context: Any | None = None,
  632. by_alias: bool | None = None,
  633. by_name: bool | None = None,
  634. ) -> Self:
  635. """Validate the given object with string data against the Pydantic model.
  636. Args:
  637. obj: The object containing string data to validate.
  638. strict: Whether to enforce types strictly.
  639. context: Extra variables to pass to the validator.
  640. by_alias: Whether to use the field's alias when validating against the provided input data.
  641. by_name: Whether to use the field's name when validating against the provided input data.
  642. Returns:
  643. The validated Pydantic model.
  644. """
  645. # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
  646. __tracebackhide__ = True
  647. if by_alias is False and by_name is not True:
  648. raise PydanticUserError(
  649. 'At least one of `by_alias` or `by_name` must be set to True.',
  650. code='validate-by-alias-and-name-false',
  651. )
  652. return cls.__pydantic_validator__.validate_strings(
  653. obj, strict=strict, context=context, by_alias=by_alias, by_name=by_name
  654. )
  655. @classmethod
  656. def __get_pydantic_core_schema__(cls, source: type[BaseModel], handler: GetCoreSchemaHandler, /) -> CoreSchema:
  657. # This warning is only emitted when calling `super().__get_pydantic_core_schema__` from a model subclass.
  658. # In the generate schema logic, this method (`BaseModel.__get_pydantic_core_schema__`) is special cased to
  659. # *not* be called if not overridden.
  660. warnings.warn(
  661. 'The `__get_pydantic_core_schema__` method of the `BaseModel` class is deprecated. If you are calling '
  662. '`super().__get_pydantic_core_schema__` when overriding the method on a Pydantic model, consider using '
  663. '`handler(source)` instead. However, note that overriding this method on models can lead to unexpected '
  664. 'side effects.',
  665. PydanticDeprecatedSince211,
  666. stacklevel=2,
  667. )
  668. # Logic copied over from `GenerateSchema._model_schema`:
  669. schema = cls.__dict__.get('__pydantic_core_schema__')
  670. if schema is not None and not isinstance(schema, _mock_val_ser.MockCoreSchema):
  671. return cls.__pydantic_core_schema__
  672. return handler(source)
  673. @classmethod
  674. def __get_pydantic_json_schema__(
  675. cls,
  676. core_schema: CoreSchema,
  677. handler: GetJsonSchemaHandler,
  678. /,
  679. ) -> JsonSchemaValue:
  680. """Hook into generating the model's JSON schema.
  681. Args:
  682. core_schema: A `pydantic-core` CoreSchema.
  683. You can ignore this argument and call the handler with a new CoreSchema,
  684. wrap this CoreSchema (`{'type': 'nullable', 'schema': current_schema}`),
  685. or just call the handler with the original schema.
  686. handler: Call into Pydantic's internal JSON schema generation.
  687. This will raise a `pydantic.errors.PydanticInvalidForJsonSchema` if JSON schema
  688. generation fails.
  689. Since this gets called by `BaseModel.model_json_schema` you can override the
  690. `schema_generator` argument to that function to change JSON schema generation globally
  691. for a type.
  692. Returns:
  693. A JSON schema, as a Python object.
  694. """
  695. return handler(core_schema)
  696. @classmethod
  697. def __pydantic_init_subclass__(cls, **kwargs: Any) -> None:
  698. """This is intended to behave just like `__init_subclass__`, but is called by `ModelMetaclass`
  699. only after the class is actually fully initialized. In particular, attributes like `model_fields` will
  700. be present when this is called.
  701. This is necessary because `__init_subclass__` will always be called by `type.__new__`,
  702. and it would require a prohibitively large refactor to the `ModelMetaclass` to ensure that
  703. `type.__new__` was called in such a manner that the class would already be sufficiently initialized.
  704. This will receive the same `kwargs` that would be passed to the standard `__init_subclass__`, namely,
  705. any kwargs passed to the class definition that aren't used internally by pydantic.
  706. Args:
  707. **kwargs: Any keyword arguments passed to the class definition that aren't used internally
  708. by pydantic.
  709. """
  710. pass
  711. def __class_getitem__(
  712. cls, typevar_values: type[Any] | tuple[type[Any], ...]
  713. ) -> type[BaseModel] | _forward_ref.PydanticRecursiveRef:
  714. cached = _generics.get_cached_generic_type_early(cls, typevar_values)
  715. if cached is not None:
  716. return cached
  717. if cls is BaseModel:
  718. raise TypeError('Type parameters should be placed on typing.Generic, not BaseModel')
  719. if not hasattr(cls, '__parameters__'):
  720. raise TypeError(f'{cls} cannot be parametrized because it does not inherit from typing.Generic')
  721. if not cls.__pydantic_generic_metadata__['parameters'] and typing.Generic not in cls.__bases__:
  722. raise TypeError(f'{cls} is not a generic class')
  723. if not isinstance(typevar_values, tuple):
  724. typevar_values = (typevar_values,)
  725. # For a model `class Model[T, U, V = int](BaseModel): ...` parametrized with `(str, bool)`,
  726. # this gives us `{T: str, U: bool, V: int}`:
  727. typevars_map = _generics.map_generic_model_arguments(cls, typevar_values)
  728. # We also update the provided args to use defaults values (`(str, bool)` becomes `(str, bool, int)`):
  729. typevar_values = tuple(v for v in typevars_map.values())
  730. if _utils.all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map:
  731. submodel = cls # if arguments are equal to parameters it's the same object
  732. _generics.set_cached_generic_type(cls, typevar_values, submodel)
  733. else:
  734. parent_args = cls.__pydantic_generic_metadata__['args']
  735. if not parent_args:
  736. args = typevar_values
  737. else:
  738. args = tuple(_generics.replace_types(arg, typevars_map) for arg in parent_args)
  739. origin = cls.__pydantic_generic_metadata__['origin'] or cls
  740. model_name = origin.model_parametrized_name(args)
  741. params = tuple(
  742. {param: None for param in _generics.iter_contained_typevars(typevars_map.values())}
  743. ) # use dict as ordered set
  744. with _generics.generic_recursion_self_type(origin, args) as maybe_self_type:
  745. cached = _generics.get_cached_generic_type_late(cls, typevar_values, origin, args)
  746. if cached is not None:
  747. return cached
  748. if maybe_self_type is not None:
  749. return maybe_self_type
  750. # Attempt to rebuild the origin in case new types have been defined
  751. try:
  752. # depth 2 gets you above this __class_getitem__ call.
  753. # Note that we explicitly provide the parent ns, otherwise
  754. # `model_rebuild` will use the parent ns no matter if it is the ns of a module.
  755. # We don't want this here, as this has unexpected effects when a model
  756. # is being parametrized during a forward annotation evaluation.
  757. parent_ns = _typing_extra.parent_frame_namespace(parent_depth=2) or {}
  758. origin.model_rebuild(_types_namespace=parent_ns)
  759. except PydanticUndefinedAnnotation:
  760. # It's okay if it fails, it just means there are still undefined types
  761. # that could be evaluated later.
  762. pass
  763. submodel = _generics.create_generic_submodel(model_name, origin, args, params)
  764. _generics.set_cached_generic_type(cls, typevar_values, submodel, origin, args)
  765. return submodel
  766. def __copy__(self) -> Self:
  767. """Returns a shallow copy of the model."""
  768. cls = type(self)
  769. m = cls.__new__(cls)
  770. _object_setattr(m, '__dict__', copy(self.__dict__))
  771. _object_setattr(m, '__pydantic_extra__', copy(self.__pydantic_extra__))
  772. _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
  773. if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None:
  774. _object_setattr(m, '__pydantic_private__', None)
  775. else:
  776. _object_setattr(
  777. m,
  778. '__pydantic_private__',
  779. {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined},
  780. )
  781. return m
  782. def __deepcopy__(self, memo: dict[int, Any] | None = None) -> Self:
  783. """Returns a deep copy of the model."""
  784. cls = type(self)
  785. m = cls.__new__(cls)
  786. _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo))
  787. _object_setattr(m, '__pydantic_extra__', deepcopy(self.__pydantic_extra__, memo=memo))
  788. # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str],
  789. # and attempting a deepcopy would be marginally slower.
  790. _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
  791. if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None:
  792. _object_setattr(m, '__pydantic_private__', None)
  793. else:
  794. _object_setattr(
  795. m,
  796. '__pydantic_private__',
  797. deepcopy({k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, memo=memo),
  798. )
  799. return m
  800. if not TYPE_CHECKING:
  801. # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
  802. # The same goes for __setattr__ and __delattr__, see: https://github.com/pydantic/pydantic/issues/8643
  803. def __getattr__(self, item: str) -> Any:
  804. private_attributes = object.__getattribute__(self, '__private_attributes__')
  805. if item in private_attributes:
  806. attribute = private_attributes[item]
  807. if hasattr(attribute, '__get__'):
  808. return attribute.__get__(self, type(self)) # type: ignore
  809. try:
  810. # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items
  811. return self.__pydantic_private__[item] # type: ignore
  812. except KeyError as exc:
  813. raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc
  814. else:
  815. # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized.
  816. # See `BaseModel.__repr_args__` for more details
  817. try:
  818. pydantic_extra = object.__getattribute__(self, '__pydantic_extra__')
  819. except AttributeError:
  820. pydantic_extra = None
  821. if pydantic_extra:
  822. try:
  823. return pydantic_extra[item]
  824. except KeyError as exc:
  825. raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc
  826. else:
  827. if hasattr(self.__class__, item):
  828. return super().__getattribute__(item) # Raises AttributeError if appropriate
  829. else:
  830. # this is the current error
  831. raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')
  832. def __setattr__(self, name: str, value: Any) -> None:
  833. if (setattr_handler := self.__pydantic_setattr_handlers__.get(name)) is not None:
  834. setattr_handler(self, name, value)
  835. # if None is returned from _setattr_handler, the attribute was set directly
  836. elif (setattr_handler := self._setattr_handler(name, value)) is not None:
  837. setattr_handler(self, name, value) # call here to not memo on possibly unknown fields
  838. self.__pydantic_setattr_handlers__[name] = setattr_handler # memoize the handler for faster access
  839. def _setattr_handler(self, name: str, value: Any) -> Callable[[BaseModel, str, Any], None] | None:
  840. """Get a handler for setting an attribute on the model instance.
  841. Returns:
  842. A handler for setting an attribute on the model instance. Used for memoization of the handler.
  843. Memoizing the handlers leads to a dramatic performance improvement in `__setattr__`
  844. Returns `None` when memoization is not safe, then the attribute is set directly.
  845. """
  846. cls = self.__class__
  847. if name in cls.__class_vars__:
  848. raise AttributeError(
  849. f'{name!r} is a ClassVar of `{cls.__name__}` and cannot be set on an instance. '
  850. f'If you want to set a value on the class, use `{cls.__name__}.{name} = value`.'
  851. )
  852. elif not _fields.is_valid_field_name(name):
  853. if (attribute := cls.__private_attributes__.get(name)) is not None:
  854. if hasattr(attribute, '__set__'):
  855. return lambda model, _name, val: attribute.__set__(model, val)
  856. else:
  857. return _SIMPLE_SETATTR_HANDLERS['private']
  858. else:
  859. _object_setattr(self, name, value)
  860. return None # Can not return memoized handler with possibly freeform attr names
  861. attr = getattr(cls, name, None)
  862. # NOTE: We currently special case properties and `cached_property`, but we might need
  863. # to generalize this to all data/non-data descriptors at some point. For non-data descriptors
  864. # (such as `cached_property`), it isn't obvious though. `cached_property` caches the value
  865. # to the instance's `__dict__`, but other non-data descriptors might do things differently.
  866. if isinstance(attr, cached_property):
  867. return _SIMPLE_SETATTR_HANDLERS['cached_property']
  868. _check_frozen(cls, name, value)
  869. # We allow properties to be set only on non frozen models for now (to match dataclasses).
  870. # This can be changed if it ever gets requested.
  871. if isinstance(attr, property):
  872. return lambda model, _name, val: attr.__set__(model, val)
  873. elif cls.model_config.get('validate_assignment'):
  874. return _SIMPLE_SETATTR_HANDLERS['validate_assignment']
  875. elif name not in cls.__pydantic_fields__:
  876. if cls.model_config.get('extra') != 'allow':
  877. # TODO - matching error
  878. raise ValueError(f'"{cls.__name__}" object has no field "{name}"')
  879. elif attr is None:
  880. # attribute does not exist, so put it in extra
  881. self.__pydantic_extra__[name] = value
  882. return None # Can not return memoized handler with possibly freeform attr names
  883. else:
  884. # attribute _does_ exist, and was not in extra, so update it
  885. return _SIMPLE_SETATTR_HANDLERS['extra_known']
  886. else:
  887. return _SIMPLE_SETATTR_HANDLERS['model_field']
  888. def __delattr__(self, item: str) -> Any:
  889. cls = self.__class__
  890. if item in self.__private_attributes__:
  891. attribute = self.__private_attributes__[item]
  892. if hasattr(attribute, '__delete__'):
  893. attribute.__delete__(self) # type: ignore
  894. return
  895. try:
  896. # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items
  897. del self.__pydantic_private__[item] # type: ignore
  898. return
  899. except KeyError as exc:
  900. raise AttributeError(f'{cls.__name__!r} object has no attribute {item!r}') from exc
  901. # Allow cached properties to be deleted (even if the class is frozen):
  902. attr = getattr(cls, item, None)
  903. if isinstance(attr, cached_property):
  904. return object.__delattr__(self, item)
  905. _check_frozen(cls, name=item, value=None)
  906. if item in self.__pydantic_fields__:
  907. object.__delattr__(self, item)
  908. elif self.__pydantic_extra__ is not None and item in self.__pydantic_extra__:
  909. del self.__pydantic_extra__[item]
  910. else:
  911. try:
  912. object.__delattr__(self, item)
  913. except AttributeError:
  914. raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')
  915. # Because we make use of `@dataclass_transform()`, `__replace__` is already synthesized by
  916. # type checkers, so we define the implementation in this `if not TYPE_CHECKING:` block:
  917. def __replace__(self, **changes: Any) -> Self:
  918. return self.model_copy(update=changes)
  919. def __getstate__(self) -> dict[Any, Any]:
  920. private = self.__pydantic_private__
  921. if private:
  922. private = {k: v for k, v in private.items() if v is not PydanticUndefined}
  923. return {
  924. '__dict__': self.__dict__,
  925. '__pydantic_extra__': self.__pydantic_extra__,
  926. '__pydantic_fields_set__': self.__pydantic_fields_set__,
  927. '__pydantic_private__': private,
  928. }
  929. def __setstate__(self, state: dict[Any, Any]) -> None:
  930. _object_setattr(self, '__pydantic_fields_set__', state.get('__pydantic_fields_set__', {}))
  931. _object_setattr(self, '__pydantic_extra__', state.get('__pydantic_extra__', {}))
  932. _object_setattr(self, '__pydantic_private__', state.get('__pydantic_private__', {}))
  933. _object_setattr(self, '__dict__', state.get('__dict__', {}))
  934. if not TYPE_CHECKING:
  935. def __eq__(self, other: Any) -> bool:
  936. if isinstance(other, BaseModel):
  937. # When comparing instances of generic types for equality, as long as all field values are equal,
  938. # only require their generic origin types to be equal, rather than exact type equality.
  939. # This prevents headaches like MyGeneric(x=1) != MyGeneric[Any](x=1).
  940. self_type = self.__pydantic_generic_metadata__['origin'] or self.__class__
  941. other_type = other.__pydantic_generic_metadata__['origin'] or other.__class__
  942. # Perform common checks first
  943. if not (
  944. self_type == other_type
  945. and getattr(self, '__pydantic_private__', None) == getattr(other, '__pydantic_private__', None)
  946. and self.__pydantic_extra__ == other.__pydantic_extra__
  947. ):
  948. return False
  949. # We only want to compare pydantic fields but ignoring fields is costly.
  950. # We'll perform a fast check first, and fallback only when needed
  951. # See GH-7444 and GH-7825 for rationale and a performance benchmark
  952. # First, do the fast (and sometimes faulty) __dict__ comparison
  953. if self.__dict__ == other.__dict__:
  954. # If the check above passes, then pydantic fields are equal, we can return early
  955. return True
  956. # We don't want to trigger unnecessary costly filtering of __dict__ on all unequal objects, so we return
  957. # early if there are no keys to ignore (we would just return False later on anyway)
  958. model_fields = type(self).__pydantic_fields__.keys()
  959. if self.__dict__.keys() <= model_fields and other.__dict__.keys() <= model_fields:
  960. return False
  961. # If we reach here, there are non-pydantic-fields keys, mapped to unequal values, that we need to ignore
  962. # Resort to costly filtering of the __dict__ objects
  963. # We use operator.itemgetter because it is much faster than dict comprehensions
  964. # NOTE: Contrary to standard python class and instances, when the Model class has a default value for an
  965. # attribute and the model instance doesn't have a corresponding attribute, accessing the missing attribute
  966. # raises an error in BaseModel.__getattr__ instead of returning the class attribute
  967. # So we can use operator.itemgetter() instead of operator.attrgetter()
  968. getter = operator.itemgetter(*model_fields) if model_fields else lambda _: _utils._SENTINEL
  969. try:
  970. return getter(self.__dict__) == getter(other.__dict__)
  971. except KeyError:
  972. # In rare cases (such as when using the deprecated BaseModel.copy() method),
  973. # the __dict__ may not contain all model fields, which is how we can get here.
  974. # getter(self.__dict__) is much faster than any 'safe' method that accounts
  975. # for missing keys, and wrapping it in a `try` doesn't slow things down much
  976. # in the common case.
  977. self_fields_proxy = _utils.SafeGetItemProxy(self.__dict__)
  978. other_fields_proxy = _utils.SafeGetItemProxy(other.__dict__)
  979. return getter(self_fields_proxy) == getter(other_fields_proxy)
  980. # other instance is not a BaseModel
  981. else:
  982. return NotImplemented # delegate to the other item in the comparison
  983. if TYPE_CHECKING:
  984. # We put `__init_subclass__` in a TYPE_CHECKING block because, even though we want the type-checking benefits
  985. # described in the signature of `__init_subclass__` below, we don't want to modify the default behavior of
  986. # subclass initialization.
  987. def __init_subclass__(cls, **kwargs: Unpack[ConfigDict]):
  988. """This signature is included purely to help type-checkers check arguments to class declaration, which
  989. provides a way to conveniently set model_config key/value pairs.
  990. ```python
  991. from pydantic import BaseModel
  992. class MyModel(BaseModel, extra='allow'): ...
  993. ```
  994. However, this may be deceiving, since the _actual_ calls to `__init_subclass__` will not receive any
  995. of the config arguments, and will only receive any keyword arguments passed during class initialization
  996. that are _not_ expected keys in ConfigDict. (This is due to the way `ModelMetaclass.__new__` works.)
  997. Args:
  998. **kwargs: Keyword arguments passed to the class definition, which set model_config
  999. Note:
  1000. You may want to override `__pydantic_init_subclass__` instead, which behaves similarly but is called
  1001. *after* the class is fully initialized.
  1002. """
  1003. def __iter__(self) -> TupleGenerator:
  1004. """So `dict(model)` works."""
  1005. yield from [(k, v) for (k, v) in self.__dict__.items() if not k.startswith('_')]
  1006. extra = self.__pydantic_extra__
  1007. if extra:
  1008. yield from extra.items()
  1009. def __repr__(self) -> str:
  1010. return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
  1011. def __repr_args__(self) -> _repr.ReprArgs:
  1012. # Eagerly create the repr of computed fields, as this may trigger access of cached properties and as such
  1013. # modify the instance's `__dict__`. If we don't do it now, it could happen when iterating over the `__dict__`
  1014. # below if the instance happens to be referenced in a field, and would modify the `__dict__` size *during* iteration.
  1015. computed_fields_repr_args = [
  1016. (k, getattr(self, k)) for k, v in self.__pydantic_computed_fields__.items() if v.repr
  1017. ]
  1018. for k, v in self.__dict__.items():
  1019. field = self.__pydantic_fields__.get(k)
  1020. if field and field.repr:
  1021. if v is not self:
  1022. yield k, v
  1023. else:
  1024. yield k, self.__repr_recursion__(v)
  1025. # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized.
  1026. # This can happen if a `ValidationError` is raised during initialization and the instance's
  1027. # repr is generated as part of the exception handling. Therefore, we use `getattr` here
  1028. # with a fallback, even though the type hints indicate the attribute will always be present.
  1029. try:
  1030. pydantic_extra = object.__getattribute__(self, '__pydantic_extra__')
  1031. except AttributeError:
  1032. pydantic_extra = None
  1033. if pydantic_extra is not None:
  1034. yield from ((k, v) for k, v in pydantic_extra.items())
  1035. yield from computed_fields_repr_args
  1036. # take logic from `_repr.Representation` without the side effects of inheritance, see #5740
  1037. __repr_name__ = _repr.Representation.__repr_name__
  1038. __repr_recursion__ = _repr.Representation.__repr_recursion__
  1039. __repr_str__ = _repr.Representation.__repr_str__
  1040. __pretty__ = _repr.Representation.__pretty__
  1041. __rich_repr__ = _repr.Representation.__rich_repr__
  1042. def __str__(self) -> str:
  1043. return self.__repr_str__(' ')
  1044. # ##### Deprecated methods from v1 #####
  1045. @property
  1046. @typing_extensions.deprecated(
  1047. 'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None
  1048. )
  1049. def __fields__(self) -> dict[str, FieldInfo]:
  1050. warnings.warn(
  1051. 'The `__fields__` attribute is deprecated, use `model_fields` instead.',
  1052. category=PydanticDeprecatedSince20,
  1053. stacklevel=2,
  1054. )
  1055. return getattr(type(self), '__pydantic_fields__', {})
  1056. @property
  1057. @typing_extensions.deprecated(
  1058. 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.',
  1059. category=None,
  1060. )
  1061. def __fields_set__(self) -> set[str]:
  1062. warnings.warn(
  1063. 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.',
  1064. category=PydanticDeprecatedSince20,
  1065. stacklevel=2,
  1066. )
  1067. return self.__pydantic_fields_set__
  1068. @typing_extensions.deprecated('The `dict` method is deprecated; use `model_dump` instead.', category=None)
  1069. def dict( # noqa: D102
  1070. self,
  1071. *,
  1072. include: IncEx | None = None,
  1073. exclude: IncEx | None = None,
  1074. by_alias: bool = False,
  1075. exclude_unset: bool = False,
  1076. exclude_defaults: bool = False,
  1077. exclude_none: bool = False,
  1078. ) -> Dict[str, Any]: # noqa UP006
  1079. warnings.warn(
  1080. 'The `dict` method is deprecated; use `model_dump` instead.',
  1081. category=PydanticDeprecatedSince20,
  1082. stacklevel=2,
  1083. )
  1084. return self.model_dump(
  1085. include=include,
  1086. exclude=exclude,
  1087. by_alias=by_alias,
  1088. exclude_unset=exclude_unset,
  1089. exclude_defaults=exclude_defaults,
  1090. exclude_none=exclude_none,
  1091. )
  1092. @typing_extensions.deprecated('The `json` method is deprecated; use `model_dump_json` instead.', category=None)
  1093. def json( # noqa: D102
  1094. self,
  1095. *,
  1096. include: IncEx | None = None,
  1097. exclude: IncEx | None = None,
  1098. by_alias: bool = False,
  1099. exclude_unset: bool = False,
  1100. exclude_defaults: bool = False,
  1101. exclude_none: bool = False,
  1102. encoder: Callable[[Any], Any] | None = PydanticUndefined, # type: ignore[assignment]
  1103. models_as_dict: bool = PydanticUndefined, # type: ignore[assignment]
  1104. **dumps_kwargs: Any,
  1105. ) -> str:
  1106. warnings.warn(
  1107. 'The `json` method is deprecated; use `model_dump_json` instead.',
  1108. category=PydanticDeprecatedSince20,
  1109. stacklevel=2,
  1110. )
  1111. if encoder is not PydanticUndefined:
  1112. raise TypeError('The `encoder` argument is no longer supported; use field serializers instead.')
  1113. if models_as_dict is not PydanticUndefined:
  1114. raise TypeError('The `models_as_dict` argument is no longer supported; use a model serializer instead.')
  1115. if dumps_kwargs:
  1116. raise TypeError('`dumps_kwargs` keyword arguments are no longer supported.')
  1117. return self.model_dump_json(
  1118. include=include,
  1119. exclude=exclude,
  1120. by_alias=by_alias,
  1121. exclude_unset=exclude_unset,
  1122. exclude_defaults=exclude_defaults,
  1123. exclude_none=exclude_none,
  1124. )
  1125. @classmethod
  1126. @typing_extensions.deprecated('The `parse_obj` method is deprecated; use `model_validate` instead.', category=None)
  1127. def parse_obj(cls, obj: Any) -> Self: # noqa: D102
  1128. warnings.warn(
  1129. 'The `parse_obj` method is deprecated; use `model_validate` instead.',
  1130. category=PydanticDeprecatedSince20,
  1131. stacklevel=2,
  1132. )
  1133. return cls.model_validate(obj)
  1134. @classmethod
  1135. @typing_extensions.deprecated(
  1136. 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, '
  1137. 'otherwise load the data then use `model_validate` instead.',
  1138. category=None,
  1139. )
  1140. def parse_raw( # noqa: D102
  1141. cls,
  1142. b: str | bytes,
  1143. *,
  1144. content_type: str | None = None,
  1145. encoding: str = 'utf8',
  1146. proto: DeprecatedParseProtocol | None = None,
  1147. allow_pickle: bool = False,
  1148. ) -> Self: # pragma: no cover
  1149. warnings.warn(
  1150. 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, '
  1151. 'otherwise load the data then use `model_validate` instead.',
  1152. category=PydanticDeprecatedSince20,
  1153. stacklevel=2,
  1154. )
  1155. from .deprecated import parse
  1156. try:
  1157. obj = parse.load_str_bytes(
  1158. b,
  1159. proto=proto,
  1160. content_type=content_type,
  1161. encoding=encoding,
  1162. allow_pickle=allow_pickle,
  1163. )
  1164. except (ValueError, TypeError) as exc:
  1165. import json
  1166. # try to match V1
  1167. if isinstance(exc, UnicodeDecodeError):
  1168. type_str = 'value_error.unicodedecode'
  1169. elif isinstance(exc, json.JSONDecodeError):
  1170. type_str = 'value_error.jsondecode'
  1171. elif isinstance(exc, ValueError):
  1172. type_str = 'value_error'
  1173. else:
  1174. type_str = 'type_error'
  1175. # ctx is missing here, but since we've added `input` to the error, we're not pretending it's the same
  1176. error: pydantic_core.InitErrorDetails = {
  1177. # The type: ignore on the next line is to ignore the requirement of LiteralString
  1178. 'type': pydantic_core.PydanticCustomError(type_str, str(exc)), # type: ignore
  1179. 'loc': ('__root__',),
  1180. 'input': b,
  1181. }
  1182. raise pydantic_core.ValidationError.from_exception_data(cls.__name__, [error])
  1183. return cls.model_validate(obj)
  1184. @classmethod
  1185. @typing_extensions.deprecated(
  1186. 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON '
  1187. 'use `model_validate_json`, otherwise `model_validate` instead.',
  1188. category=None,
  1189. )
  1190. def parse_file( # noqa: D102
  1191. cls,
  1192. path: str | Path,
  1193. *,
  1194. content_type: str | None = None,
  1195. encoding: str = 'utf8',
  1196. proto: DeprecatedParseProtocol | None = None,
  1197. allow_pickle: bool = False,
  1198. ) -> Self:
  1199. warnings.warn(
  1200. 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON '
  1201. 'use `model_validate_json`, otherwise `model_validate` instead.',
  1202. category=PydanticDeprecatedSince20,
  1203. stacklevel=2,
  1204. )
  1205. from .deprecated import parse
  1206. obj = parse.load_file(
  1207. path,
  1208. proto=proto,
  1209. content_type=content_type,
  1210. encoding=encoding,
  1211. allow_pickle=allow_pickle,
  1212. )
  1213. return cls.parse_obj(obj)
  1214. @classmethod
  1215. @typing_extensions.deprecated(
  1216. 'The `from_orm` method is deprecated; set '
  1217. "`model_config['from_attributes']=True` and use `model_validate` instead.",
  1218. category=None,
  1219. )
  1220. def from_orm(cls, obj: Any) -> Self: # noqa: D102
  1221. warnings.warn(
  1222. 'The `from_orm` method is deprecated; set '
  1223. "`model_config['from_attributes']=True` and use `model_validate` instead.",
  1224. category=PydanticDeprecatedSince20,
  1225. stacklevel=2,
  1226. )
  1227. if not cls.model_config.get('from_attributes', None):
  1228. raise PydanticUserError(
  1229. 'You must set the config attribute `from_attributes=True` to use from_orm', code=None
  1230. )
  1231. return cls.model_validate(obj)
  1232. @classmethod
  1233. @typing_extensions.deprecated('The `construct` method is deprecated; use `model_construct` instead.', category=None)
  1234. def construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: # noqa: D102
  1235. warnings.warn(
  1236. 'The `construct` method is deprecated; use `model_construct` instead.',
  1237. category=PydanticDeprecatedSince20,
  1238. stacklevel=2,
  1239. )
  1240. return cls.model_construct(_fields_set=_fields_set, **values)
  1241. @typing_extensions.deprecated(
  1242. 'The `copy` method is deprecated; use `model_copy` instead. '
  1243. 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
  1244. category=None,
  1245. )
  1246. def copy(
  1247. self,
  1248. *,
  1249. include: AbstractSetIntStr | MappingIntStrAny | None = None,
  1250. exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
  1251. update: Dict[str, Any] | None = None, # noqa UP006
  1252. deep: bool = False,
  1253. ) -> Self: # pragma: no cover
  1254. """Returns a copy of the model.
  1255. !!! warning "Deprecated"
  1256. This method is now deprecated; use `model_copy` instead.
  1257. If you need `include` or `exclude`, use:
  1258. ```python {test="skip" lint="skip"}
  1259. data = self.model_dump(include=include, exclude=exclude, round_trip=True)
  1260. data = {**data, **(update or {})}
  1261. copied = self.model_validate(data)
  1262. ```
  1263. Args:
  1264. include: Optional set or mapping specifying which fields to include in the copied model.
  1265. exclude: Optional set or mapping specifying which fields to exclude in the copied model.
  1266. update: Optional dictionary of field-value pairs to override field values in the copied model.
  1267. deep: If True, the values of fields that are Pydantic models will be deep-copied.
  1268. Returns:
  1269. A copy of the model with included, excluded and updated fields as specified.
  1270. """
  1271. warnings.warn(
  1272. 'The `copy` method is deprecated; use `model_copy` instead. '
  1273. 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
  1274. category=PydanticDeprecatedSince20,
  1275. stacklevel=2,
  1276. )
  1277. from .deprecated import copy_internals
  1278. values = dict(
  1279. copy_internals._iter(
  1280. self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
  1281. ),
  1282. **(update or {}),
  1283. )
  1284. if self.__pydantic_private__ is None:
  1285. private = None
  1286. else:
  1287. private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}
  1288. if self.__pydantic_extra__ is None:
  1289. extra: dict[str, Any] | None = None
  1290. else:
  1291. extra = self.__pydantic_extra__.copy()
  1292. for k in list(self.__pydantic_extra__):
  1293. if k not in values: # k was in the exclude
  1294. extra.pop(k)
  1295. for k in list(values):
  1296. if k in self.__pydantic_extra__: # k must have come from extra
  1297. extra[k] = values.pop(k)
  1298. # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg
  1299. if update:
  1300. fields_set = self.__pydantic_fields_set__ | update.keys()
  1301. else:
  1302. fields_set = set(self.__pydantic_fields_set__)
  1303. # removing excluded fields from `__pydantic_fields_set__`
  1304. if exclude:
  1305. fields_set -= set(exclude)
  1306. return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep)
  1307. @classmethod
  1308. @typing_extensions.deprecated('The `schema` method is deprecated; use `model_json_schema` instead.', category=None)
  1309. def schema( # noqa: D102
  1310. cls, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE
  1311. ) -> Dict[str, Any]: # noqa UP006
  1312. warnings.warn(
  1313. 'The `schema` method is deprecated; use `model_json_schema` instead.',
  1314. category=PydanticDeprecatedSince20,
  1315. stacklevel=2,
  1316. )
  1317. return cls.model_json_schema(by_alias=by_alias, ref_template=ref_template)
  1318. @classmethod
  1319. @typing_extensions.deprecated(
  1320. 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.',
  1321. category=None,
  1322. )
  1323. def schema_json( # noqa: D102
  1324. cls, *, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE, **dumps_kwargs: Any
  1325. ) -> str: # pragma: no cover
  1326. warnings.warn(
  1327. 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.',
  1328. category=PydanticDeprecatedSince20,
  1329. stacklevel=2,
  1330. )
  1331. import json
  1332. from .deprecated.json import pydantic_encoder
  1333. return json.dumps(
  1334. cls.model_json_schema(by_alias=by_alias, ref_template=ref_template),
  1335. default=pydantic_encoder,
  1336. **dumps_kwargs,
  1337. )
  1338. @classmethod
  1339. @typing_extensions.deprecated('The `validate` method is deprecated; use `model_validate` instead.', category=None)
  1340. def validate(cls, value: Any) -> Self: # noqa: D102
  1341. warnings.warn(
  1342. 'The `validate` method is deprecated; use `model_validate` instead.',
  1343. category=PydanticDeprecatedSince20,
  1344. stacklevel=2,
  1345. )
  1346. return cls.model_validate(value)
  1347. @classmethod
  1348. @typing_extensions.deprecated(
  1349. 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.',
  1350. category=None,
  1351. )
  1352. def update_forward_refs(cls, **localns: Any) -> None: # noqa: D102
  1353. warnings.warn(
  1354. 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.',
  1355. category=PydanticDeprecatedSince20,
  1356. stacklevel=2,
  1357. )
  1358. if localns: # pragma: no cover
  1359. raise TypeError('`localns` arguments are not longer accepted.')
  1360. cls.model_rebuild(force=True)
  1361. @typing_extensions.deprecated(
  1362. 'The private method `_iter` will be removed and should no longer be used.', category=None
  1363. )
  1364. def _iter(self, *args: Any, **kwargs: Any) -> Any:
  1365. warnings.warn(
  1366. 'The private method `_iter` will be removed and should no longer be used.',
  1367. category=PydanticDeprecatedSince20,
  1368. stacklevel=2,
  1369. )
  1370. from .deprecated import copy_internals
  1371. return copy_internals._iter(self, *args, **kwargs)
  1372. @typing_extensions.deprecated(
  1373. 'The private method `_copy_and_set_values` will be removed and should no longer be used.',
  1374. category=None,
  1375. )
  1376. def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any:
  1377. warnings.warn(
  1378. 'The private method `_copy_and_set_values` will be removed and should no longer be used.',
  1379. category=PydanticDeprecatedSince20,
  1380. stacklevel=2,
  1381. )
  1382. from .deprecated import copy_internals
  1383. return copy_internals._copy_and_set_values(self, *args, **kwargs)
  1384. @classmethod
  1385. @typing_extensions.deprecated(
  1386. 'The private method `_get_value` will be removed and should no longer be used.',
  1387. category=None,
  1388. )
  1389. def _get_value(cls, *args: Any, **kwargs: Any) -> Any:
  1390. warnings.warn(
  1391. 'The private method `_get_value` will be removed and should no longer be used.',
  1392. category=PydanticDeprecatedSince20,
  1393. stacklevel=2,
  1394. )
  1395. from .deprecated import copy_internals
  1396. return copy_internals._get_value(cls, *args, **kwargs)
  1397. @typing_extensions.deprecated(
  1398. 'The private method `_calculate_keys` will be removed and should no longer be used.',
  1399. category=None,
  1400. )
  1401. def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any:
  1402. warnings.warn(
  1403. 'The private method `_calculate_keys` will be removed and should no longer be used.',
  1404. category=PydanticDeprecatedSince20,
  1405. stacklevel=2,
  1406. )
  1407. from .deprecated import copy_internals
  1408. return copy_internals._calculate_keys(self, *args, **kwargs)
  1409. ModelT = TypeVar('ModelT', bound=BaseModel)
  1410. @overload
  1411. def create_model(
  1412. model_name: str,
  1413. /,
  1414. *,
  1415. __config__: ConfigDict | None = None,
  1416. __doc__: str | None = None,
  1417. __base__: None = None,
  1418. __module__: str = __name__,
  1419. __validators__: dict[str, Callable[..., Any]] | None = None,
  1420. __cls_kwargs__: dict[str, Any] | None = None,
  1421. **field_definitions: Any | tuple[str, Any],
  1422. ) -> type[BaseModel]: ...
  1423. @overload
  1424. def create_model(
  1425. model_name: str,
  1426. /,
  1427. *,
  1428. __config__: ConfigDict | None = None,
  1429. __doc__: str | None = None,
  1430. __base__: type[ModelT] | tuple[type[ModelT], ...],
  1431. __module__: str = __name__,
  1432. __validators__: dict[str, Callable[..., Any]] | None = None,
  1433. __cls_kwargs__: dict[str, Any] | None = None,
  1434. **field_definitions: Any | tuple[str, Any],
  1435. ) -> type[ModelT]: ...
  1436. def create_model( # noqa: C901
  1437. model_name: str,
  1438. /,
  1439. *,
  1440. __config__: ConfigDict | None = None,
  1441. __doc__: str | None = None,
  1442. __base__: type[ModelT] | tuple[type[ModelT], ...] | None = None,
  1443. __module__: str | None = None,
  1444. __validators__: dict[str, Callable[..., Any]] | None = None,
  1445. __cls_kwargs__: dict[str, Any] | None = None,
  1446. # TODO PEP 747: replace `Any` by the TypeForm:
  1447. **field_definitions: Any | tuple[str, Any],
  1448. ) -> type[ModelT]:
  1449. """!!! abstract "Usage Documentation"
  1450. [Dynamic Model Creation](../concepts/models.md#dynamic-model-creation)
  1451. Dynamically creates and returns a new Pydantic model, in other words, `create_model` dynamically creates a
  1452. subclass of [`BaseModel`][pydantic.BaseModel].
  1453. Args:
  1454. model_name: The name of the newly created model.
  1455. __config__: The configuration of the new model.
  1456. __doc__: The docstring of the new model.
  1457. __base__: The base class or classes for the new model.
  1458. __module__: The name of the module that the model belongs to;
  1459. if `None`, the value is taken from `sys._getframe(1)`
  1460. __validators__: A dictionary of methods that validate fields. The keys are the names of the validation methods to
  1461. be added to the model, and the values are the validation methods themselves. You can read more about functional
  1462. validators [here](https://docs.pydantic.dev/2.9/concepts/validators/#field-validators).
  1463. __cls_kwargs__: A dictionary of keyword arguments for class creation, such as `metaclass`.
  1464. **field_definitions: Field definitions of the new model. Either:
  1465. - a single element, representing the type annotation of the field.
  1466. - a two-tuple, the first element being the type and the second element the assigned value
  1467. (either a default or the [`Field()`][pydantic.Field] function).
  1468. Returns:
  1469. The new [model][pydantic.BaseModel].
  1470. Raises:
  1471. PydanticUserError: If `__base__` and `__config__` are both passed.
  1472. """
  1473. if __base__ is None:
  1474. __base__ = (cast('type[ModelT]', BaseModel),)
  1475. elif not isinstance(__base__, tuple):
  1476. __base__ = (__base__,)
  1477. __cls_kwargs__ = __cls_kwargs__ or {}
  1478. fields: dict[str, Any] = {}
  1479. annotations: dict[str, Any] = {}
  1480. for f_name, f_def in field_definitions.items():
  1481. if isinstance(f_def, tuple):
  1482. if len(f_def) != 2:
  1483. raise PydanticUserError(
  1484. f'Field definition for {f_name!r} should a single element representing the type or a two-tuple, the first element '
  1485. 'being the type and the second element the assigned value (either a default or the `Field()` function).',
  1486. code='create-model-field-definitions',
  1487. )
  1488. annotations[f_name] = f_def[0]
  1489. fields[f_name] = f_def[1]
  1490. else:
  1491. annotations[f_name] = f_def
  1492. if __module__ is None:
  1493. f = sys._getframe(1)
  1494. __module__ = f.f_globals['__name__']
  1495. namespace: dict[str, Any] = {'__annotations__': annotations, '__module__': __module__}
  1496. if __doc__:
  1497. namespace.update({'__doc__': __doc__})
  1498. if __validators__:
  1499. namespace.update(__validators__)
  1500. namespace.update(fields)
  1501. if __config__:
  1502. namespace['model_config'] = __config__
  1503. resolved_bases = types.resolve_bases(__base__)
  1504. meta, ns, kwds = types.prepare_class(model_name, resolved_bases, kwds=__cls_kwargs__)
  1505. if resolved_bases is not __base__:
  1506. ns['__orig_bases__'] = __base__
  1507. namespace.update(ns)
  1508. return meta(
  1509. model_name,
  1510. resolved_bases,
  1511. namespace,
  1512. __pydantic_reset_parent_namespace__=False,
  1513. _create_model_module=__module__,
  1514. **kwds,
  1515. )
  1516. __getattr__ = getattr_migration(__name__)