mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-16 02:02:58 -07:00
Update cherrypy==18.9.0
This commit is contained in:
parent
2fc618c01f
commit
51196a7fb1
137 changed files with 44442 additions and 11582 deletions
0
lib/pydantic/_internal/__init__.py
Normal file
0
lib/pydantic/_internal/__init__.py
Normal file
322
lib/pydantic/_internal/_config.py
Normal file
322
lib/pydantic/_internal/_config.py
Normal file
|
@ -0,0 +1,322 @@
|
|||
from __future__ import annotations as _annotations
|
||||
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
cast,
|
||||
)
|
||||
|
||||
from pydantic_core import core_schema
|
||||
from typing_extensions import (
|
||||
Literal,
|
||||
Self,
|
||||
)
|
||||
|
||||
from ..aliases import AliasGenerator
|
||||
from ..config import ConfigDict, ExtraValues, JsonDict, JsonEncoder, JsonSchemaExtraCallable
|
||||
from ..errors import PydanticUserError
|
||||
from ..warnings import PydanticDeprecatedSince20
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
||||
# and https://youtrack.jetbrains.com/issue/PY-51428
|
||||
DeprecationWarning = PydanticDeprecatedSince20
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .._internal._schema_generation_shared import GenerateSchema
|
||||
|
||||
DEPRECATION_MESSAGE = 'Support for class-based `config` is deprecated, use ConfigDict instead.'
|
||||
|
||||
|
||||
class ConfigWrapper:
|
||||
"""Internal wrapper for Config which exposes ConfigDict items as attributes."""
|
||||
|
||||
__slots__ = ('config_dict',)
|
||||
|
||||
config_dict: ConfigDict
|
||||
|
||||
# all annotations are copied directly from ConfigDict, and should be kept up to date, a test will fail if they
|
||||
# stop matching
|
||||
title: str | None
|
||||
str_to_lower: bool
|
||||
str_to_upper: bool
|
||||
str_strip_whitespace: bool
|
||||
str_min_length: int
|
||||
str_max_length: int | None
|
||||
extra: ExtraValues | None
|
||||
frozen: bool
|
||||
populate_by_name: bool
|
||||
use_enum_values: bool
|
||||
validate_assignment: bool
|
||||
arbitrary_types_allowed: bool
|
||||
from_attributes: bool
|
||||
# whether to use the actual key provided in the data (e.g. alias or first alias for "field required" errors) instead of field_names
|
||||
# to construct error `loc`s, default `True`
|
||||
loc_by_alias: bool
|
||||
alias_generator: Callable[[str], str] | AliasGenerator | None
|
||||
ignored_types: tuple[type, ...]
|
||||
allow_inf_nan: bool
|
||||
json_schema_extra: JsonDict | JsonSchemaExtraCallable | None
|
||||
json_encoders: dict[type[object], JsonEncoder] | None
|
||||
|
||||
# new in V2
|
||||
strict: bool
|
||||
# whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never'
|
||||
revalidate_instances: Literal['always', 'never', 'subclass-instances']
|
||||
ser_json_timedelta: Literal['iso8601', 'float']
|
||||
ser_json_bytes: Literal['utf8', 'base64']
|
||||
ser_json_inf_nan: Literal['null', 'constants']
|
||||
# whether to validate default values during validation, default False
|
||||
validate_default: bool
|
||||
validate_return: bool
|
||||
protected_namespaces: tuple[str, ...]
|
||||
hide_input_in_errors: bool
|
||||
defer_build: bool
|
||||
plugin_settings: dict[str, object] | None
|
||||
schema_generator: type[GenerateSchema] | None
|
||||
json_schema_serialization_defaults_required: bool
|
||||
json_schema_mode_override: Literal['validation', 'serialization', None]
|
||||
coerce_numbers_to_str: bool
|
||||
regex_engine: Literal['rust-regex', 'python-re']
|
||||
validation_error_cause: bool
|
||||
|
||||
def __init__(self, config: ConfigDict | dict[str, Any] | type[Any] | None, *, check: bool = True):
|
||||
if check:
|
||||
self.config_dict = prepare_config(config)
|
||||
else:
|
||||
self.config_dict = cast(ConfigDict, config)
|
||||
|
||||
@classmethod
|
||||
def for_model(cls, bases: tuple[type[Any], ...], namespace: dict[str, Any], kwargs: dict[str, Any]) -> Self:
|
||||
"""Build a new `ConfigWrapper` instance for a `BaseModel`.
|
||||
|
||||
The config wrapper built based on (in descending order of priority):
|
||||
- options from `kwargs`
|
||||
- options from the `namespace`
|
||||
- options from the base classes (`bases`)
|
||||
|
||||
Args:
|
||||
bases: A tuple of base classes.
|
||||
namespace: The namespace of the class being created.
|
||||
kwargs: The kwargs passed to the class being created.
|
||||
|
||||
Returns:
|
||||
A `ConfigWrapper` instance for `BaseModel`.
|
||||
"""
|
||||
config_new = ConfigDict()
|
||||
for base in bases:
|
||||
config = getattr(base, 'model_config', None)
|
||||
if config:
|
||||
config_new.update(config.copy())
|
||||
|
||||
config_class_from_namespace = namespace.get('Config')
|
||||
config_dict_from_namespace = namespace.get('model_config')
|
||||
|
||||
if config_class_from_namespace and config_dict_from_namespace:
|
||||
raise PydanticUserError('"Config" and "model_config" cannot be used together', code='config-both')
|
||||
|
||||
config_from_namespace = config_dict_from_namespace or prepare_config(config_class_from_namespace)
|
||||
|
||||
config_new.update(config_from_namespace)
|
||||
|
||||
for k in list(kwargs.keys()):
|
||||
if k in config_keys:
|
||||
config_new[k] = kwargs.pop(k)
|
||||
|
||||
return cls(config_new)
|
||||
|
||||
# we don't show `__getattr__` to type checkers so missing attributes cause errors
|
||||
if not TYPE_CHECKING: # pragma: no branch
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
try:
|
||||
return self.config_dict[name]
|
||||
except KeyError:
|
||||
try:
|
||||
return config_defaults[name]
|
||||
except KeyError:
|
||||
raise AttributeError(f'Config has no attribute {name!r}') from None
|
||||
|
||||
def core_config(self, obj: Any) -> core_schema.CoreConfig:
|
||||
"""Create a pydantic-core config, `obj` is just used to populate `title` if not set in config.
|
||||
|
||||
Pass `obj=None` if you do not want to attempt to infer the `title`.
|
||||
|
||||
We don't use getattr here since we don't want to populate with defaults.
|
||||
|
||||
Args:
|
||||
obj: An object used to populate `title` if not set in config.
|
||||
|
||||
Returns:
|
||||
A `CoreConfig` object created from config.
|
||||
"""
|
||||
|
||||
def dict_not_none(**kwargs: Any) -> Any:
|
||||
return {k: v for k, v in kwargs.items() if v is not None}
|
||||
|
||||
core_config = core_schema.CoreConfig(
|
||||
**dict_not_none(
|
||||
title=self.config_dict.get('title') or (obj and obj.__name__),
|
||||
extra_fields_behavior=self.config_dict.get('extra'),
|
||||
allow_inf_nan=self.config_dict.get('allow_inf_nan'),
|
||||
populate_by_name=self.config_dict.get('populate_by_name'),
|
||||
str_strip_whitespace=self.config_dict.get('str_strip_whitespace'),
|
||||
str_to_lower=self.config_dict.get('str_to_lower'),
|
||||
str_to_upper=self.config_dict.get('str_to_upper'),
|
||||
strict=self.config_dict.get('strict'),
|
||||
ser_json_timedelta=self.config_dict.get('ser_json_timedelta'),
|
||||
ser_json_bytes=self.config_dict.get('ser_json_bytes'),
|
||||
ser_json_inf_nan=self.config_dict.get('ser_json_inf_nan'),
|
||||
from_attributes=self.config_dict.get('from_attributes'),
|
||||
loc_by_alias=self.config_dict.get('loc_by_alias'),
|
||||
revalidate_instances=self.config_dict.get('revalidate_instances'),
|
||||
validate_default=self.config_dict.get('validate_default'),
|
||||
str_max_length=self.config_dict.get('str_max_length'),
|
||||
str_min_length=self.config_dict.get('str_min_length'),
|
||||
hide_input_in_errors=self.config_dict.get('hide_input_in_errors'),
|
||||
coerce_numbers_to_str=self.config_dict.get('coerce_numbers_to_str'),
|
||||
regex_engine=self.config_dict.get('regex_engine'),
|
||||
validation_error_cause=self.config_dict.get('validation_error_cause'),
|
||||
)
|
||||
)
|
||||
return core_config
|
||||
|
||||
def __repr__(self):
|
||||
c = ', '.join(f'{k}={v!r}' for k, v in self.config_dict.items())
|
||||
return f'ConfigWrapper({c})'
|
||||
|
||||
|
||||
class ConfigWrapperStack:
|
||||
"""A stack of `ConfigWrapper` instances."""
|
||||
|
||||
def __init__(self, config_wrapper: ConfigWrapper):
|
||||
self._config_wrapper_stack: list[ConfigWrapper] = [config_wrapper]
|
||||
|
||||
@property
|
||||
def tail(self) -> ConfigWrapper:
|
||||
return self._config_wrapper_stack[-1]
|
||||
|
||||
@contextmanager
|
||||
def push(self, config_wrapper: ConfigWrapper | ConfigDict | None):
|
||||
if config_wrapper is None:
|
||||
yield
|
||||
return
|
||||
|
||||
if not isinstance(config_wrapper, ConfigWrapper):
|
||||
config_wrapper = ConfigWrapper(config_wrapper, check=False)
|
||||
|
||||
self._config_wrapper_stack.append(config_wrapper)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._config_wrapper_stack.pop()
|
||||
|
||||
|
||||
config_defaults = ConfigDict(
|
||||
title=None,
|
||||
str_to_lower=False,
|
||||
str_to_upper=False,
|
||||
str_strip_whitespace=False,
|
||||
str_min_length=0,
|
||||
str_max_length=None,
|
||||
# let the model / dataclass decide how to handle it
|
||||
extra=None,
|
||||
frozen=False,
|
||||
populate_by_name=False,
|
||||
use_enum_values=False,
|
||||
validate_assignment=False,
|
||||
arbitrary_types_allowed=False,
|
||||
from_attributes=False,
|
||||
loc_by_alias=True,
|
||||
alias_generator=None,
|
||||
ignored_types=(),
|
||||
allow_inf_nan=True,
|
||||
json_schema_extra=None,
|
||||
strict=False,
|
||||
revalidate_instances='never',
|
||||
ser_json_timedelta='iso8601',
|
||||
ser_json_bytes='utf8',
|
||||
ser_json_inf_nan='null',
|
||||
validate_default=False,
|
||||
validate_return=False,
|
||||
protected_namespaces=('model_',),
|
||||
hide_input_in_errors=False,
|
||||
json_encoders=None,
|
||||
defer_build=False,
|
||||
plugin_settings=None,
|
||||
schema_generator=None,
|
||||
json_schema_serialization_defaults_required=False,
|
||||
json_schema_mode_override=None,
|
||||
coerce_numbers_to_str=False,
|
||||
regex_engine='rust-regex',
|
||||
validation_error_cause=False,
|
||||
)
|
||||
|
||||
|
||||
def prepare_config(config: ConfigDict | dict[str, Any] | type[Any] | None) -> ConfigDict:
|
||||
"""Create a `ConfigDict` instance from an existing dict, a class (e.g. old class-based config) or None.
|
||||
|
||||
Args:
|
||||
config: The input config.
|
||||
|
||||
Returns:
|
||||
A ConfigDict object created from config.
|
||||
"""
|
||||
if config is None:
|
||||
return ConfigDict()
|
||||
|
||||
if not isinstance(config, dict):
|
||||
warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning)
|
||||
config = {k: getattr(config, k) for k in dir(config) if not k.startswith('__')}
|
||||
|
||||
config_dict = cast(ConfigDict, config)
|
||||
check_deprecated(config_dict)
|
||||
return config_dict
|
||||
|
||||
|
||||
config_keys = set(ConfigDict.__annotations__.keys())
|
||||
|
||||
|
||||
V2_REMOVED_KEYS = {
|
||||
'allow_mutation',
|
||||
'error_msg_templates',
|
||||
'fields',
|
||||
'getter_dict',
|
||||
'smart_union',
|
||||
'underscore_attrs_are_private',
|
||||
'json_loads',
|
||||
'json_dumps',
|
||||
'copy_on_model_validation',
|
||||
'post_init_call',
|
||||
}
|
||||
V2_RENAMED_KEYS = {
|
||||
'allow_population_by_field_name': 'populate_by_name',
|
||||
'anystr_lower': 'str_to_lower',
|
||||
'anystr_strip_whitespace': 'str_strip_whitespace',
|
||||
'anystr_upper': 'str_to_upper',
|
||||
'keep_untouched': 'ignored_types',
|
||||
'max_anystr_length': 'str_max_length',
|
||||
'min_anystr_length': 'str_min_length',
|
||||
'orm_mode': 'from_attributes',
|
||||
'schema_extra': 'json_schema_extra',
|
||||
'validate_all': 'validate_default',
|
||||
}
|
||||
|
||||
|
||||
def check_deprecated(config_dict: ConfigDict) -> None:
|
||||
"""Check for deprecated config keys and warn the user.
|
||||
|
||||
Args:
|
||||
config_dict: The input config.
|
||||
"""
|
||||
deprecated_removed_keys = V2_REMOVED_KEYS & config_dict.keys()
|
||||
deprecated_renamed_keys = V2_RENAMED_KEYS.keys() & config_dict.keys()
|
||||
if deprecated_removed_keys or deprecated_renamed_keys:
|
||||
renamings = {k: V2_RENAMED_KEYS[k] for k in sorted(deprecated_renamed_keys)}
|
||||
renamed_bullets = [f'* {k!r} has been renamed to {v!r}' for k, v in renamings.items()]
|
||||
removed_bullets = [f'* {k!r} has been removed' for k in sorted(deprecated_removed_keys)]
|
||||
message = '\n'.join(['Valid config keys have changed in V2:'] + renamed_bullets + removed_bullets)
|
||||
warnings.warn(message, UserWarning)
|
92
lib/pydantic/_internal/_core_metadata.py
Normal file
92
lib/pydantic/_internal/_core_metadata.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
from __future__ import annotations as _annotations
|
||||
|
||||
import typing
|
||||
from typing import Any
|
||||
|
||||
import typing_extensions
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ._schema_generation_shared import (
|
||||
CoreSchemaOrField as CoreSchemaOrField,
|
||||
)
|
||||
from ._schema_generation_shared import (
|
||||
GetJsonSchemaFunction,
|
||||
)
|
||||
|
||||
|
||||
class CoreMetadata(typing_extensions.TypedDict, total=False):
|
||||
"""A `TypedDict` for holding the metadata dict of the schema.
|
||||
|
||||
Attributes:
|
||||
pydantic_js_functions: List of JSON schema functions.
|
||||
pydantic_js_prefer_positional_arguments: Whether JSON schema generator will
|
||||
prefer positional over keyword arguments for an 'arguments' schema.
|
||||
"""
|
||||
|
||||
pydantic_js_functions: list[GetJsonSchemaFunction]
|
||||
pydantic_js_annotation_functions: list[GetJsonSchemaFunction]
|
||||
|
||||
# If `pydantic_js_prefer_positional_arguments` is True, the JSON schema generator will
|
||||
# prefer positional over keyword arguments for an 'arguments' schema.
|
||||
pydantic_js_prefer_positional_arguments: bool | None
|
||||
|
||||
pydantic_typed_dict_cls: type[Any] | None # TODO: Consider moving this into the pydantic-core TypedDictSchema
|
||||
|
||||
|
||||
class CoreMetadataHandler:
|
||||
"""Because the metadata field in pydantic_core is of type `Any`, we can't assume much about its contents.
|
||||
|
||||
This class is used to interact with the metadata field on a CoreSchema object in a consistent
|
||||
way throughout pydantic.
|
||||
"""
|
||||
|
||||
__slots__ = ('_schema',)
|
||||
|
||||
def __init__(self, schema: CoreSchemaOrField):
|
||||
self._schema = schema
|
||||
|
||||
metadata = schema.get('metadata')
|
||||
if metadata is None:
|
||||
schema['metadata'] = CoreMetadata()
|
||||
elif not isinstance(metadata, dict):
|
||||
raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')
|
||||
|
||||
@property
|
||||
def metadata(self) -> CoreMetadata:
|
||||
"""Retrieves the metadata dict from the schema, initializing it to a dict if it is None
|
||||
and raises an error if it is not a dict.
|
||||
"""
|
||||
metadata = self._schema.get('metadata')
|
||||
if metadata is None:
|
||||
self._schema['metadata'] = metadata = CoreMetadata()
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')
|
||||
return metadata
|
||||
|
||||
|
||||
def build_metadata_dict(
|
||||
*, # force keyword arguments to make it easier to modify this signature in a backwards-compatible way
|
||||
js_functions: list[GetJsonSchemaFunction] | None = None,
|
||||
js_annotation_functions: list[GetJsonSchemaFunction] | None = None,
|
||||
js_prefer_positional_arguments: bool | None = None,
|
||||
typed_dict_cls: type[Any] | None = None,
|
||||
initial_metadata: Any | None = None,
|
||||
) -> Any:
|
||||
"""Builds a dict to use as the metadata field of a CoreSchema object in a manner that is consistent
|
||||
with the CoreMetadataHandler class.
|
||||
"""
|
||||
if initial_metadata is not None and not isinstance(initial_metadata, dict):
|
||||
raise TypeError(f'CoreSchema metadata should be a dict; got {initial_metadata!r}.')
|
||||
|
||||
metadata = CoreMetadata(
|
||||
pydantic_js_functions=js_functions or [],
|
||||
pydantic_js_annotation_functions=js_annotation_functions or [],
|
||||
pydantic_js_prefer_positional_arguments=js_prefer_positional_arguments,
|
||||
pydantic_typed_dict_cls=typed_dict_cls,
|
||||
)
|
||||
metadata = {k: v for k, v in metadata.items() if v is not None}
|
||||
|
||||
if initial_metadata is not None:
|
||||
metadata = {**initial_metadata, **metadata}
|
||||
|
||||
return metadata
|
570
lib/pydantic/_internal/_core_utils.py
Normal file
570
lib/pydantic/_internal/_core_utils.py
Normal file
|
@ -0,0 +1,570 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Hashable,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pydantic_core import CoreSchema, core_schema
|
||||
from pydantic_core import validate_core_schema as _validate_core_schema
|
||||
from typing_extensions import TypeAliasType, TypeGuard, get_args, get_origin
|
||||
|
||||
from . import _repr
|
||||
from ._typing_extra import is_generic_alias
|
||||
|
||||
AnyFunctionSchema = Union[
|
||||
core_schema.AfterValidatorFunctionSchema,
|
||||
core_schema.BeforeValidatorFunctionSchema,
|
||||
core_schema.WrapValidatorFunctionSchema,
|
||||
core_schema.PlainValidatorFunctionSchema,
|
||||
]
|
||||
|
||||
|
||||
FunctionSchemaWithInnerSchema = Union[
|
||||
core_schema.AfterValidatorFunctionSchema,
|
||||
core_schema.BeforeValidatorFunctionSchema,
|
||||
core_schema.WrapValidatorFunctionSchema,
|
||||
]
|
||||
|
||||
CoreSchemaField = Union[
|
||||
core_schema.ModelField, core_schema.DataclassField, core_schema.TypedDictField, core_schema.ComputedField
|
||||
]
|
||||
CoreSchemaOrField = Union[core_schema.CoreSchema, CoreSchemaField]
|
||||
|
||||
_CORE_SCHEMA_FIELD_TYPES = {'typed-dict-field', 'dataclass-field', 'model-field', 'computed-field'}
|
||||
_FUNCTION_WITH_INNER_SCHEMA_TYPES = {'function-before', 'function-after', 'function-wrap'}
|
||||
_LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES = {'list', 'set', 'frozenset'}
|
||||
|
||||
_DEFINITIONS_CACHE_METADATA_KEY = 'pydantic.definitions_cache'
|
||||
|
||||
TAGGED_UNION_TAG_KEY = 'pydantic.internal.tagged_union_tag'
|
||||
"""
|
||||
Used in a `Tag` schema to specify the tag used for a discriminated union.
|
||||
"""
|
||||
HAS_INVALID_SCHEMAS_METADATA_KEY = 'pydantic.internal.invalid'
|
||||
"""Used to mark a schema that is invalid because it refers to a definition that was not yet defined when the
|
||||
schema was first encountered.
|
||||
"""
|
||||
|
||||
|
||||
def is_core_schema(
|
||||
schema: CoreSchemaOrField,
|
||||
) -> TypeGuard[CoreSchema]:
|
||||
return schema['type'] not in _CORE_SCHEMA_FIELD_TYPES
|
||||
|
||||
|
||||
def is_core_schema_field(
|
||||
schema: CoreSchemaOrField,
|
||||
) -> TypeGuard[CoreSchemaField]:
|
||||
return schema['type'] in _CORE_SCHEMA_FIELD_TYPES
|
||||
|
||||
|
||||
def is_function_with_inner_schema(
|
||||
schema: CoreSchemaOrField,
|
||||
) -> TypeGuard[FunctionSchemaWithInnerSchema]:
|
||||
return schema['type'] in _FUNCTION_WITH_INNER_SCHEMA_TYPES
|
||||
|
||||
|
||||
def is_list_like_schema_with_items_schema(
|
||||
schema: CoreSchema,
|
||||
) -> TypeGuard[core_schema.ListSchema | core_schema.SetSchema | core_schema.FrozenSetSchema]:
|
||||
return schema['type'] in _LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES
|
||||
|
||||
|
||||
def get_type_ref(type_: type[Any], args_override: tuple[type[Any], ...] | None = None) -> str:
|
||||
"""Produces the ref to be used for this type by pydantic_core's core schemas.
|
||||
|
||||
This `args_override` argument was added for the purpose of creating valid recursive references
|
||||
when creating generic models without needing to create a concrete class.
|
||||
"""
|
||||
origin = get_origin(type_) or type_
|
||||
|
||||
args = get_args(type_) if is_generic_alias(type_) else (args_override or ())
|
||||
generic_metadata = getattr(type_, '__pydantic_generic_metadata__', None)
|
||||
if generic_metadata:
|
||||
origin = generic_metadata['origin'] or origin
|
||||
args = generic_metadata['args'] or args
|
||||
|
||||
module_name = getattr(origin, '__module__', '<No __module__>')
|
||||
if isinstance(origin, TypeAliasType):
|
||||
type_ref = f'{module_name}.{origin.__name__}:{id(origin)}'
|
||||
else:
|
||||
try:
|
||||
qualname = getattr(origin, '__qualname__', f'<No __qualname__: {origin}>')
|
||||
except Exception:
|
||||
qualname = getattr(origin, '__qualname__', '<No __qualname__>')
|
||||
type_ref = f'{module_name}.{qualname}:{id(origin)}'
|
||||
|
||||
arg_refs: list[str] = []
|
||||
for arg in args:
|
||||
if isinstance(arg, str):
|
||||
# Handle string literals as a special case; we may be able to remove this special handling if we
|
||||
# wrap them in a ForwardRef at some point.
|
||||
arg_ref = f'{arg}:str-{id(arg)}'
|
||||
else:
|
||||
arg_ref = f'{_repr.display_as_type(arg)}:{id(arg)}'
|
||||
arg_refs.append(arg_ref)
|
||||
if arg_refs:
|
||||
type_ref = f'{type_ref}[{",".join(arg_refs)}]'
|
||||
return type_ref
|
||||
|
||||
|
||||
def get_ref(s: core_schema.CoreSchema) -> None | str:
|
||||
"""Get the ref from the schema if it has one.
|
||||
This exists just for type checking to work correctly.
|
||||
"""
|
||||
return s.get('ref', None)
|
||||
|
||||
|
||||
def collect_definitions(schema: core_schema.CoreSchema) -> dict[str, core_schema.CoreSchema]:
|
||||
defs: dict[str, CoreSchema] = {}
|
||||
|
||||
def _record_valid_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
ref = get_ref(s)
|
||||
if ref:
|
||||
defs[ref] = s
|
||||
return recurse(s, _record_valid_refs)
|
||||
|
||||
walk_core_schema(schema, _record_valid_refs)
|
||||
|
||||
return defs
|
||||
|
||||
|
||||
def define_expected_missing_refs(
|
||||
schema: core_schema.CoreSchema, allowed_missing_refs: set[str]
|
||||
) -> core_schema.CoreSchema | None:
|
||||
if not allowed_missing_refs:
|
||||
# in this case, there are no missing refs to potentially substitute, so there's no need to walk the schema
|
||||
# this is a common case (will be hit for all non-generic models), so it's worth optimizing for
|
||||
return None
|
||||
|
||||
refs = collect_definitions(schema).keys()
|
||||
|
||||
expected_missing_refs = allowed_missing_refs.difference(refs)
|
||||
if expected_missing_refs:
|
||||
definitions: list[core_schema.CoreSchema] = [
|
||||
# TODO: Replace this with a (new) CoreSchema that, if present at any level, makes validation fail
|
||||
# Issue: https://github.com/pydantic/pydantic-core/issues/619
|
||||
core_schema.none_schema(ref=ref, metadata={HAS_INVALID_SCHEMAS_METADATA_KEY: True})
|
||||
for ref in expected_missing_refs
|
||||
]
|
||||
return core_schema.definitions_schema(schema, definitions)
|
||||
return None
|
||||
|
||||
|
||||
def collect_invalid_schemas(schema: core_schema.CoreSchema) -> bool:
|
||||
invalid = False
|
||||
|
||||
def _is_schema_valid(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
nonlocal invalid
|
||||
if 'metadata' in s:
|
||||
metadata = s['metadata']
|
||||
if HAS_INVALID_SCHEMAS_METADATA_KEY in metadata:
|
||||
invalid = metadata[HAS_INVALID_SCHEMAS_METADATA_KEY]
|
||||
return s
|
||||
return recurse(s, _is_schema_valid)
|
||||
|
||||
walk_core_schema(schema, _is_schema_valid)
|
||||
return invalid
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
Recurse = Callable[[core_schema.CoreSchema, 'Walk'], core_schema.CoreSchema]
|
||||
Walk = Callable[[core_schema.CoreSchema, Recurse], core_schema.CoreSchema]
|
||||
|
||||
# TODO: Should we move _WalkCoreSchema into pydantic_core proper?
|
||||
# Issue: https://github.com/pydantic/pydantic-core/issues/615
|
||||
|
||||
|
||||
class _WalkCoreSchema:
|
||||
def __init__(self):
|
||||
self._schema_type_to_method = self._build_schema_type_to_method()
|
||||
|
||||
def _build_schema_type_to_method(self) -> dict[core_schema.CoreSchemaType, Recurse]:
|
||||
mapping: dict[core_schema.CoreSchemaType, Recurse] = {}
|
||||
key: core_schema.CoreSchemaType
|
||||
for key in get_args(core_schema.CoreSchemaType):
|
||||
method_name = f"handle_{key.replace('-', '_')}_schema"
|
||||
mapping[key] = getattr(self, method_name, self._handle_other_schemas)
|
||||
return mapping
|
||||
|
||||
def walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
return f(schema, self._walk)
|
||||
|
||||
def _walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema = self._schema_type_to_method[schema['type']](schema.copy(), f)
|
||||
ser_schema: core_schema.SerSchema | None = schema.get('serialization') # type: ignore
|
||||
if ser_schema:
|
||||
schema['serialization'] = self._handle_ser_schemas(ser_schema, f)
|
||||
return schema
|
||||
|
||||
def _handle_other_schemas(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
sub_schema = schema.get('schema', None)
|
||||
if sub_schema is not None:
|
||||
schema['schema'] = self.walk(sub_schema, f) # type: ignore
|
||||
return schema
|
||||
|
||||
def _handle_ser_schemas(self, ser_schema: core_schema.SerSchema, f: Walk) -> core_schema.SerSchema:
|
||||
schema: core_schema.CoreSchema | None = ser_schema.get('schema', None)
|
||||
if schema is not None:
|
||||
ser_schema['schema'] = self.walk(schema, f) # type: ignore
|
||||
return_schema: core_schema.CoreSchema | None = ser_schema.get('return_schema', None)
|
||||
if return_schema is not None:
|
||||
ser_schema['return_schema'] = self.walk(return_schema, f) # type: ignore
|
||||
return ser_schema
|
||||
|
||||
def handle_definitions_schema(self, schema: core_schema.DefinitionsSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
new_definitions: list[core_schema.CoreSchema] = []
|
||||
for definition in schema['definitions']:
|
||||
if 'schema_ref' in definition and 'ref' in definition:
|
||||
# This indicates a purposely indirect reference
|
||||
# We want to keep such references around for implications related to JSON schema, etc.:
|
||||
new_definitions.append(definition)
|
||||
# However, we still need to walk the referenced definition:
|
||||
self.walk(definition, f)
|
||||
continue
|
||||
|
||||
updated_definition = self.walk(definition, f)
|
||||
if 'ref' in updated_definition:
|
||||
# If the updated definition schema doesn't have a 'ref', it shouldn't go in the definitions
|
||||
# This is most likely to happen due to replacing something with a definition reference, in
|
||||
# which case it should certainly not go in the definitions list
|
||||
new_definitions.append(updated_definition)
|
||||
new_inner_schema = self.walk(schema['schema'], f)
|
||||
|
||||
if not new_definitions and len(schema) == 3:
|
||||
# This means we'd be returning a "trivial" definitions schema that just wrapped the inner schema
|
||||
return new_inner_schema
|
||||
|
||||
new_schema = schema.copy()
|
||||
new_schema['schema'] = new_inner_schema
|
||||
new_schema['definitions'] = new_definitions
|
||||
return new_schema
|
||||
|
||||
def handle_list_schema(self, schema: core_schema.ListSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_set_schema(self, schema: core_schema.SetSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_frozenset_schema(self, schema: core_schema.FrozenSetSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_generator_schema(self, schema: core_schema.GeneratorSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
items_schema = schema.get('items_schema')
|
||||
if items_schema is not None:
|
||||
schema['items_schema'] = self.walk(items_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_tuple_schema(self, schema: core_schema.TupleSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['items_schema'] = [self.walk(v, f) for v in schema['items_schema']]
|
||||
return schema
|
||||
|
||||
def handle_dict_schema(self, schema: core_schema.DictSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
keys_schema = schema.get('keys_schema')
|
||||
if keys_schema is not None:
|
||||
schema['keys_schema'] = self.walk(keys_schema, f)
|
||||
values_schema = schema.get('values_schema')
|
||||
if values_schema:
|
||||
schema['values_schema'] = self.walk(values_schema, f)
|
||||
return schema
|
||||
|
||||
def handle_function_schema(self, schema: AnyFunctionSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
if not is_function_with_inner_schema(schema):
|
||||
return schema
|
||||
schema['schema'] = self.walk(schema['schema'], f)
|
||||
return schema
|
||||
|
||||
def handle_union_schema(self, schema: core_schema.UnionSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
new_choices: list[CoreSchema | tuple[CoreSchema, str]] = []
|
||||
for v in schema['choices']:
|
||||
if isinstance(v, tuple):
|
||||
new_choices.append((self.walk(v[0], f), v[1]))
|
||||
else:
|
||||
new_choices.append(self.walk(v, f))
|
||||
schema['choices'] = new_choices
|
||||
return schema
|
||||
|
||||
def handle_tagged_union_schema(self, schema: core_schema.TaggedUnionSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
new_choices: dict[Hashable, core_schema.CoreSchema] = {}
|
||||
for k, v in schema['choices'].items():
|
||||
new_choices[k] = v if isinstance(v, (str, int)) else self.walk(v, f)
|
||||
schema['choices'] = new_choices
|
||||
return schema
|
||||
|
||||
def handle_chain_schema(self, schema: core_schema.ChainSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['steps'] = [self.walk(v, f) for v in schema['steps']]
|
||||
return schema
|
||||
|
||||
def handle_lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['lax_schema'] = self.walk(schema['lax_schema'], f)
|
||||
schema['strict_schema'] = self.walk(schema['strict_schema'], f)
|
||||
return schema
|
||||
|
||||
def handle_json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['json_schema'] = self.walk(schema['json_schema'], f)
|
||||
schema['python_schema'] = self.walk(schema['python_schema'], f)
|
||||
return schema
|
||||
|
||||
def handle_model_fields_schema(self, schema: core_schema.ModelFieldsSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
extras_schema = schema.get('extras_schema')
|
||||
if extras_schema is not None:
|
||||
schema['extras_schema'] = self.walk(extras_schema, f)
|
||||
replaced_fields: dict[str, core_schema.ModelField] = {}
|
||||
replaced_computed_fields: list[core_schema.ComputedField] = []
|
||||
for computed_field in schema.get('computed_fields', ()):
|
||||
replaced_field = computed_field.copy()
|
||||
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
||||
replaced_computed_fields.append(replaced_field)
|
||||
if replaced_computed_fields:
|
||||
schema['computed_fields'] = replaced_computed_fields
|
||||
for k, v in schema['fields'].items():
|
||||
replaced_field = v.copy()
|
||||
replaced_field['schema'] = self.walk(v['schema'], f)
|
||||
replaced_fields[k] = replaced_field
|
||||
schema['fields'] = replaced_fields
|
||||
return schema
|
||||
|
||||
def handle_typed_dict_schema(self, schema: core_schema.TypedDictSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
extras_schema = schema.get('extras_schema')
|
||||
if extras_schema is not None:
|
||||
schema['extras_schema'] = self.walk(extras_schema, f)
|
||||
replaced_computed_fields: list[core_schema.ComputedField] = []
|
||||
for computed_field in schema.get('computed_fields', ()):
|
||||
replaced_field = computed_field.copy()
|
||||
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
||||
replaced_computed_fields.append(replaced_field)
|
||||
if replaced_computed_fields:
|
||||
schema['computed_fields'] = replaced_computed_fields
|
||||
replaced_fields: dict[str, core_schema.TypedDictField] = {}
|
||||
for k, v in schema['fields'].items():
|
||||
replaced_field = v.copy()
|
||||
replaced_field['schema'] = self.walk(v['schema'], f)
|
||||
replaced_fields[k] = replaced_field
|
||||
schema['fields'] = replaced_fields
|
||||
return schema
|
||||
|
||||
def handle_dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
replaced_fields: list[core_schema.DataclassField] = []
|
||||
replaced_computed_fields: list[core_schema.ComputedField] = []
|
||||
for computed_field in schema.get('computed_fields', ()):
|
||||
replaced_field = computed_field.copy()
|
||||
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
||||
replaced_computed_fields.append(replaced_field)
|
||||
if replaced_computed_fields:
|
||||
schema['computed_fields'] = replaced_computed_fields
|
||||
for field in schema['fields']:
|
||||
replaced_field = field.copy()
|
||||
replaced_field['schema'] = self.walk(field['schema'], f)
|
||||
replaced_fields.append(replaced_field)
|
||||
schema['fields'] = replaced_fields
|
||||
return schema
|
||||
|
||||
def handle_arguments_schema(self, schema: core_schema.ArgumentsSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
replaced_arguments_schema: list[core_schema.ArgumentsParameter] = []
|
||||
for param in schema['arguments_schema']:
|
||||
replaced_param = param.copy()
|
||||
replaced_param['schema'] = self.walk(param['schema'], f)
|
||||
replaced_arguments_schema.append(replaced_param)
|
||||
schema['arguments_schema'] = replaced_arguments_schema
|
||||
if 'var_args_schema' in schema:
|
||||
schema['var_args_schema'] = self.walk(schema['var_args_schema'], f)
|
||||
if 'var_kwargs_schema' in schema:
|
||||
schema['var_kwargs_schema'] = self.walk(schema['var_kwargs_schema'], f)
|
||||
return schema
|
||||
|
||||
def handle_call_schema(self, schema: core_schema.CallSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
schema['arguments_schema'] = self.walk(schema['arguments_schema'], f)
|
||||
if 'return_schema' in schema:
|
||||
schema['return_schema'] = self.walk(schema['return_schema'], f)
|
||||
return schema
|
||||
|
||||
|
||||
_dispatch = _WalkCoreSchema().walk
|
||||
|
||||
|
||||
def walk_core_schema(schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
||||
"""Recursively traverse a CoreSchema.
|
||||
|
||||
Args:
|
||||
schema (core_schema.CoreSchema): The CoreSchema to process, it will not be modified.
|
||||
f (Walk): A function to apply. This function takes two arguments:
|
||||
1. The current CoreSchema that is being processed
|
||||
(not the same one you passed into this function, one level down).
|
||||
2. The "next" `f` to call. This lets you for example use `f=functools.partial(some_method, some_context)`
|
||||
to pass data down the recursive calls without using globals or other mutable state.
|
||||
|
||||
Returns:
|
||||
core_schema.CoreSchema: A processed CoreSchema.
|
||||
"""
|
||||
return f(schema.copy(), _dispatch)
|
||||
|
||||
|
||||
def simplify_schema_references(schema: core_schema.CoreSchema) -> core_schema.CoreSchema: # noqa: C901
|
||||
definitions: dict[str, core_schema.CoreSchema] = {}
|
||||
ref_counts: dict[str, int] = defaultdict(int)
|
||||
involved_in_recursion: dict[str, bool] = {}
|
||||
current_recursion_ref_count: dict[str, int] = defaultdict(int)
|
||||
|
||||
def collect_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
if s['type'] == 'definitions':
|
||||
for definition in s['definitions']:
|
||||
ref = get_ref(definition)
|
||||
assert ref is not None
|
||||
if ref not in definitions:
|
||||
definitions[ref] = definition
|
||||
recurse(definition, collect_refs)
|
||||
return recurse(s['schema'], collect_refs)
|
||||
else:
|
||||
ref = get_ref(s)
|
||||
if ref is not None:
|
||||
new = recurse(s, collect_refs)
|
||||
new_ref = get_ref(new)
|
||||
if new_ref:
|
||||
definitions[new_ref] = new
|
||||
return core_schema.definition_reference_schema(schema_ref=ref)
|
||||
else:
|
||||
return recurse(s, collect_refs)
|
||||
|
||||
schema = walk_core_schema(schema, collect_refs)
|
||||
|
||||
def count_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
if s['type'] != 'definition-ref':
|
||||
return recurse(s, count_refs)
|
||||
ref = s['schema_ref']
|
||||
ref_counts[ref] += 1
|
||||
|
||||
if ref_counts[ref] >= 2:
|
||||
# If this model is involved in a recursion this should be detected
|
||||
# on its second encounter, we can safely stop the walk here.
|
||||
if current_recursion_ref_count[ref] != 0:
|
||||
involved_in_recursion[ref] = True
|
||||
return s
|
||||
|
||||
current_recursion_ref_count[ref] += 1
|
||||
recurse(definitions[ref], count_refs)
|
||||
current_recursion_ref_count[ref] -= 1
|
||||
return s
|
||||
|
||||
schema = walk_core_schema(schema, count_refs)
|
||||
|
||||
assert all(c == 0 for c in current_recursion_ref_count.values()), 'this is a bug! please report it'
|
||||
|
||||
def can_be_inlined(s: core_schema.DefinitionReferenceSchema, ref: str) -> bool:
|
||||
if ref_counts[ref] > 1:
|
||||
return False
|
||||
if involved_in_recursion.get(ref, False):
|
||||
return False
|
||||
if 'serialization' in s:
|
||||
return False
|
||||
if 'metadata' in s:
|
||||
metadata = s['metadata']
|
||||
for k in (
|
||||
'pydantic_js_functions',
|
||||
'pydantic_js_annotation_functions',
|
||||
'pydantic.internal.union_discriminator',
|
||||
):
|
||||
if k in metadata:
|
||||
# we need to keep this as a ref
|
||||
return False
|
||||
return True
|
||||
|
||||
def inline_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
||||
if s['type'] == 'definition-ref':
|
||||
ref = s['schema_ref']
|
||||
# Check if the reference is only used once, not involved in recursion and does not have
|
||||
# any extra keys (like 'serialization')
|
||||
if can_be_inlined(s, ref):
|
||||
# Inline the reference by replacing the reference with the actual schema
|
||||
new = definitions.pop(ref)
|
||||
ref_counts[ref] -= 1 # because we just replaced it!
|
||||
# put all other keys that were on the def-ref schema into the inlined version
|
||||
# in particular this is needed for `serialization`
|
||||
if 'serialization' in s:
|
||||
new['serialization'] = s['serialization']
|
||||
s = recurse(new, inline_refs)
|
||||
return s
|
||||
else:
|
||||
return recurse(s, inline_refs)
|
||||
else:
|
||||
return recurse(s, inline_refs)
|
||||
|
||||
schema = walk_core_schema(schema, inline_refs)
|
||||
|
||||
def_values = [v for v in definitions.values() if ref_counts[v['ref']] > 0] # type: ignore
|
||||
|
||||
if def_values:
|
||||
schema = core_schema.definitions_schema(schema=schema, definitions=def_values)
|
||||
return schema
|
||||
|
||||
|
||||
def _strip_metadata(schema: CoreSchema) -> CoreSchema:
|
||||
def strip_metadata(s: CoreSchema, recurse: Recurse) -> CoreSchema:
|
||||
s = s.copy()
|
||||
s.pop('metadata', None)
|
||||
if s['type'] == 'model-fields':
|
||||
s = s.copy()
|
||||
s['fields'] = {k: v.copy() for k, v in s['fields'].items()}
|
||||
for field_name, field_schema in s['fields'].items():
|
||||
field_schema.pop('metadata', None)
|
||||
s['fields'][field_name] = field_schema
|
||||
computed_fields = s.get('computed_fields', None)
|
||||
if computed_fields:
|
||||
s['computed_fields'] = [cf.copy() for cf in computed_fields]
|
||||
for cf in computed_fields:
|
||||
cf.pop('metadata', None)
|
||||
else:
|
||||
s.pop('computed_fields', None)
|
||||
elif s['type'] == 'model':
|
||||
# remove some defaults
|
||||
if s.get('custom_init', True) is False:
|
||||
s.pop('custom_init')
|
||||
if s.get('root_model', True) is False:
|
||||
s.pop('root_model')
|
||||
if {'title'}.issuperset(s.get('config', {}).keys()):
|
||||
s.pop('config', None)
|
||||
|
||||
return recurse(s, strip_metadata)
|
||||
|
||||
return walk_core_schema(schema, strip_metadata)
|
||||
|
||||
|
||||
def pretty_print_core_schema(
|
||||
schema: CoreSchema,
|
||||
include_metadata: bool = False,
|
||||
) -> None:
|
||||
"""Pretty print a CoreSchema using rich.
|
||||
This is intended for debugging purposes.
|
||||
|
||||
Args:
|
||||
schema: The CoreSchema to print.
|
||||
include_metadata: Whether to include metadata in the output. Defaults to `False`.
|
||||
"""
|
||||
from rich import print # type: ignore # install it manually in your dev env
|
||||
|
||||
if not include_metadata:
|
||||
schema = _strip_metadata(schema)
|
||||
|
||||
return print(schema)
|
||||
|
||||
|
||||
def validate_core_schema(schema: CoreSchema) -> CoreSchema:
|
||||
if 'PYDANTIC_SKIP_VALIDATING_CORE_SCHEMAS' in os.environ:
|
||||
return schema
|
||||
return _validate_core_schema(schema)
|
225
lib/pydantic/_internal/_dataclasses.py
Normal file
225
lib/pydantic/_internal/_dataclasses.py
Normal file
|
@ -0,0 +1,225 @@
|
|||
"""Private logic for creating pydantic dataclasses."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import dataclasses
|
||||
import typing
|
||||
import warnings
|
||||
from functools import partial, wraps
|
||||
from typing import Any, Callable, ClassVar
|
||||
|
||||
from pydantic_core import (
|
||||
ArgsKwargs,
|
||||
SchemaSerializer,
|
||||
SchemaValidator,
|
||||
core_schema,
|
||||
)
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
from ..errors import PydanticUndefinedAnnotation
|
||||
from ..fields import FieldInfo
|
||||
from ..plugin._schema_validator import create_schema_validator
|
||||
from ..warnings import PydanticDeprecatedSince20
|
||||
from . import _config, _decorators, _typing_extra
|
||||
from ._fields import collect_dataclass_fields
|
||||
from ._generate_schema import GenerateSchema
|
||||
from ._generics import get_standard_typevars_map
|
||||
from ._mock_val_ser import set_dataclass_mocks
|
||||
from ._schema_generation_shared import CallbackGetCoreSchemaHandler
|
||||
from ._signature import generate_pydantic_signature
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ..config import ConfigDict
|
||||
|
||||
class StandardDataclass(typing.Protocol):
|
||||
__dataclass_fields__: ClassVar[dict[str, Any]]
|
||||
__dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams`
|
||||
__post_init__: ClassVar[Callable[..., None]]
|
||||
|
||||
def __init__(self, *args: object, **kwargs: object) -> None:
|
||||
pass
|
||||
|
||||
class PydanticDataclass(StandardDataclass, typing.Protocol):
|
||||
"""A protocol containing attributes only available once a class has been decorated as a Pydantic dataclass.
|
||||
|
||||
Attributes:
|
||||
__pydantic_config__: Pydantic-specific configuration settings for the dataclass.
|
||||
__pydantic_complete__: Whether dataclass building is completed, or if there are still undefined fields.
|
||||
__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
|
||||
__pydantic_decorators__: Metadata containing the decorators defined on the dataclass.
|
||||
__pydantic_fields__: Metadata about the fields defined on the dataclass.
|
||||
__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the dataclass.
|
||||
__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the dataclass.
|
||||
"""
|
||||
|
||||
__pydantic_config__: ClassVar[ConfigDict]
|
||||
__pydantic_complete__: ClassVar[bool]
|
||||
__pydantic_core_schema__: ClassVar[core_schema.CoreSchema]
|
||||
__pydantic_decorators__: ClassVar[_decorators.DecoratorInfos]
|
||||
__pydantic_fields__: ClassVar[dict[str, FieldInfo]]
|
||||
__pydantic_serializer__: ClassVar[SchemaSerializer]
|
||||
__pydantic_validator__: ClassVar[SchemaValidator]
|
||||
|
||||
else:
|
||||
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
||||
# and https://youtrack.jetbrains.com/issue/PY-51428
|
||||
DeprecationWarning = PydanticDeprecatedSince20
|
||||
|
||||
|
||||
def set_dataclass_fields(cls: type[StandardDataclass], types_namespace: dict[str, Any] | None = None) -> None:
|
||||
"""Collect and set `cls.__pydantic_fields__`.
|
||||
|
||||
Args:
|
||||
cls: The class.
|
||||
types_namespace: The types namespace, defaults to `None`.
|
||||
"""
|
||||
typevars_map = get_standard_typevars_map(cls)
|
||||
fields = collect_dataclass_fields(cls, types_namespace, typevars_map=typevars_map)
|
||||
|
||||
cls.__pydantic_fields__ = fields # type: ignore
|
||||
|
||||
|
||||
def complete_dataclass(
|
||||
cls: type[Any],
|
||||
config_wrapper: _config.ConfigWrapper,
|
||||
*,
|
||||
raise_errors: bool = True,
|
||||
types_namespace: dict[str, Any] | None,
|
||||
) -> bool:
|
||||
"""Finish building a pydantic dataclass.
|
||||
|
||||
This logic is called on a class which has already been wrapped in `dataclasses.dataclass()`.
|
||||
|
||||
This is somewhat analogous to `pydantic._internal._model_construction.complete_model_class`.
|
||||
|
||||
Args:
|
||||
cls: The class.
|
||||
config_wrapper: The config wrapper instance.
|
||||
raise_errors: Whether to raise errors, defaults to `True`.
|
||||
types_namespace: The types namespace.
|
||||
|
||||
Returns:
|
||||
`True` if building a pydantic dataclass is successfully completed, `False` otherwise.
|
||||
|
||||
Raises:
|
||||
PydanticUndefinedAnnotation: If `raise_error` is `True` and there is an undefined annotations.
|
||||
"""
|
||||
if hasattr(cls, '__post_init_post_parse__'):
|
||||
warnings.warn(
|
||||
'Support for `__post_init_post_parse__` has been dropped, the method will not be called', DeprecationWarning
|
||||
)
|
||||
|
||||
if types_namespace is None:
|
||||
types_namespace = _typing_extra.get_cls_types_namespace(cls)
|
||||
|
||||
set_dataclass_fields(cls, types_namespace)
|
||||
|
||||
typevars_map = get_standard_typevars_map(cls)
|
||||
gen_schema = GenerateSchema(
|
||||
config_wrapper,
|
||||
types_namespace,
|
||||
typevars_map,
|
||||
)
|
||||
|
||||
# This needs to be called before we change the __init__
|
||||
sig = generate_pydantic_signature(
|
||||
init=cls.__init__,
|
||||
fields=cls.__pydantic_fields__, # type: ignore
|
||||
config_wrapper=config_wrapper,
|
||||
is_dataclass=True,
|
||||
)
|
||||
|
||||
# dataclass.__init__ must be defined here so its `__qualname__` can be changed since functions can't be copied.
|
||||
def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
|
||||
__tracebackhide__ = True
|
||||
s = __dataclass_self__
|
||||
s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
|
||||
|
||||
__init__.__qualname__ = f'{cls.__qualname__}.__init__'
|
||||
|
||||
cls.__init__ = __init__ # type: ignore
|
||||
cls.__pydantic_config__ = config_wrapper.config_dict # type: ignore
|
||||
cls.__signature__ = sig # type: ignore
|
||||
get_core_schema = getattr(cls, '__get_pydantic_core_schema__', None)
|
||||
try:
|
||||
if get_core_schema:
|
||||
schema = get_core_schema(
|
||||
cls,
|
||||
CallbackGetCoreSchemaHandler(
|
||||
partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
|
||||
gen_schema,
|
||||
ref_mode='unpack',
|
||||
),
|
||||
)
|
||||
else:
|
||||
schema = gen_schema.generate_schema(cls, from_dunder_get_core_schema=False)
|
||||
except PydanticUndefinedAnnotation as e:
|
||||
if raise_errors:
|
||||
raise
|
||||
set_dataclass_mocks(cls, cls.__name__, f'`{e.name}`')
|
||||
return False
|
||||
|
||||
core_config = config_wrapper.core_config(cls)
|
||||
|
||||
try:
|
||||
schema = gen_schema.clean_schema(schema)
|
||||
except gen_schema.CollectedInvalid:
|
||||
set_dataclass_mocks(cls, cls.__name__, 'all referenced types')
|
||||
return False
|
||||
|
||||
# We are about to set all the remaining required properties expected for this cast;
|
||||
# __pydantic_decorators__ and __pydantic_fields__ should already be set
|
||||
cls = typing.cast('type[PydanticDataclass]', cls)
|
||||
# debug(schema)
|
||||
|
||||
cls.__pydantic_core_schema__ = schema
|
||||
cls.__pydantic_validator__ = validator = create_schema_validator(
|
||||
schema, cls, cls.__module__, cls.__qualname__, 'dataclass', core_config, config_wrapper.plugin_settings
|
||||
)
|
||||
cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
|
||||
|
||||
if config_wrapper.validate_assignment:
|
||||
|
||||
@wraps(cls.__setattr__)
|
||||
def validated_setattr(instance: Any, __field: str, __value: str) -> None:
|
||||
validator.validate_assignment(instance, __field, __value)
|
||||
|
||||
cls.__setattr__ = validated_setattr.__get__(None, cls) # type: ignore
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def is_builtin_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]:
|
||||
"""Returns True if a class is a stdlib dataclass and *not* a pydantic dataclass.
|
||||
|
||||
We check that
|
||||
- `_cls` is a dataclass
|
||||
- `_cls` does not inherit from a processed pydantic dataclass (and thus have a `__pydantic_validator__`)
|
||||
- `_cls` does not have any annotations that are not dataclass fields
|
||||
e.g.
|
||||
```py
|
||||
import dataclasses
|
||||
|
||||
import pydantic.dataclasses
|
||||
|
||||
@dataclasses.dataclass
|
||||
class A:
|
||||
x: int
|
||||
|
||||
@pydantic.dataclasses.dataclass
|
||||
class B(A):
|
||||
y: int
|
||||
```
|
||||
In this case, when we first check `B`, we make an extra check and look at the annotations ('y'),
|
||||
which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x')
|
||||
|
||||
Args:
|
||||
cls: The class.
|
||||
|
||||
Returns:
|
||||
`True` if the class is a stdlib dataclass, `False` otherwise.
|
||||
"""
|
||||
return (
|
||||
dataclasses.is_dataclass(_cls)
|
||||
and not hasattr(_cls, '__pydantic_validator__')
|
||||
and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {})))
|
||||
)
|
791
lib/pydantic/_internal/_decorators.py
Normal file
791
lib/pydantic/_internal/_decorators.py
Normal file
|
@ -0,0 +1,791 @@
|
|||
"""Logic related to validators applied to models etc. via the `@field_validator` and `@model_validator` decorators."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from collections import deque
|
||||
from dataclasses import dataclass, field
|
||||
from functools import cached_property, partial, partialmethod
|
||||
from inspect import Parameter, Signature, isdatadescriptor, ismethoddescriptor, signature
|
||||
from itertools import islice
|
||||
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Iterable, TypeVar, Union
|
||||
|
||||
from pydantic_core import PydanticUndefined, core_schema
|
||||
from typing_extensions import Literal, TypeAlias, is_typeddict
|
||||
|
||||
from ..errors import PydanticUserError
|
||||
from ._core_utils import get_type_ref
|
||||
from ._internal_dataclass import slots_true
|
||||
from ._typing_extra import get_function_type_hints
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..fields import ComputedFieldInfo
|
||||
from ..functional_validators import FieldValidatorModes
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class ValidatorDecoratorInfo:
|
||||
"""A container for data from `@validator` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@validator'.
|
||||
fields: A tuple of field names the validator should be called on.
|
||||
mode: The proposed validator mode.
|
||||
each_item: For complex objects (sets, lists etc.) whether to validate individual
|
||||
elements rather than the whole object.
|
||||
always: Whether this method and other validators should be called even if the value is missing.
|
||||
check_fields: Whether to check that the fields actually exist on the model.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@validator'
|
||||
|
||||
fields: tuple[str, ...]
|
||||
mode: Literal['before', 'after']
|
||||
each_item: bool
|
||||
always: bool
|
||||
check_fields: bool | None
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class FieldValidatorDecoratorInfo:
|
||||
"""A container for data from `@field_validator` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@field_validator'.
|
||||
fields: A tuple of field names the validator should be called on.
|
||||
mode: The proposed validator mode.
|
||||
check_fields: Whether to check that the fields actually exist on the model.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@field_validator'
|
||||
|
||||
fields: tuple[str, ...]
|
||||
mode: FieldValidatorModes
|
||||
check_fields: bool | None
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class RootValidatorDecoratorInfo:
|
||||
"""A container for data from `@root_validator` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@root_validator'.
|
||||
mode: The proposed validator mode.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@root_validator'
|
||||
mode: Literal['before', 'after']
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class FieldSerializerDecoratorInfo:
|
||||
"""A container for data from `@field_serializer` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@field_serializer'.
|
||||
fields: A tuple of field names the serializer should be called on.
|
||||
mode: The proposed serializer mode.
|
||||
return_type: The type of the serializer's return value.
|
||||
when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
|
||||
and `'json-unless-none'`.
|
||||
check_fields: Whether to check that the fields actually exist on the model.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@field_serializer'
|
||||
fields: tuple[str, ...]
|
||||
mode: Literal['plain', 'wrap']
|
||||
return_type: Any
|
||||
when_used: core_schema.WhenUsed
|
||||
check_fields: bool | None
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class ModelSerializerDecoratorInfo:
|
||||
"""A container for data from `@model_serializer` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@model_serializer'.
|
||||
mode: The proposed serializer mode.
|
||||
return_type: The type of the serializer's return value.
|
||||
when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
|
||||
and `'json-unless-none'`.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@model_serializer'
|
||||
mode: Literal['plain', 'wrap']
|
||||
return_type: Any
|
||||
when_used: core_schema.WhenUsed
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class ModelValidatorDecoratorInfo:
|
||||
"""A container for data from `@model_validator` so that we can access it
|
||||
while building the pydantic-core schema.
|
||||
|
||||
Attributes:
|
||||
decorator_repr: A class variable representing the decorator string, '@model_serializer'.
|
||||
mode: The proposed serializer mode.
|
||||
"""
|
||||
|
||||
decorator_repr: ClassVar[str] = '@model_validator'
|
||||
mode: Literal['wrap', 'before', 'after']
|
||||
|
||||
|
||||
DecoratorInfo: TypeAlias = """Union[
|
||||
ValidatorDecoratorInfo,
|
||||
FieldValidatorDecoratorInfo,
|
||||
RootValidatorDecoratorInfo,
|
||||
FieldSerializerDecoratorInfo,
|
||||
ModelSerializerDecoratorInfo,
|
||||
ModelValidatorDecoratorInfo,
|
||||
ComputedFieldInfo,
|
||||
]"""
|
||||
|
||||
ReturnType = TypeVar('ReturnType')
|
||||
DecoratedType: TypeAlias = (
|
||||
'Union[classmethod[Any, Any, ReturnType], staticmethod[Any, ReturnType], Callable[..., ReturnType], property]'
|
||||
)
|
||||
|
||||
|
||||
@dataclass # can't use slots here since we set attributes on `__post_init__`
|
||||
class PydanticDescriptorProxy(Generic[ReturnType]):
|
||||
"""Wrap a classmethod, staticmethod, property or unbound function
|
||||
and act as a descriptor that allows us to detect decorated items
|
||||
from the class' attributes.
|
||||
|
||||
This class' __get__ returns the wrapped item's __get__ result,
|
||||
which makes it transparent for classmethods and staticmethods.
|
||||
|
||||
Attributes:
|
||||
wrapped: The decorator that has to be wrapped.
|
||||
decorator_info: The decorator info.
|
||||
shim: A wrapper function to wrap V1 style function.
|
||||
"""
|
||||
|
||||
wrapped: DecoratedType[ReturnType]
|
||||
decorator_info: DecoratorInfo
|
||||
shim: Callable[[Callable[..., Any]], Callable[..., Any]] | None = None
|
||||
|
||||
def __post_init__(self):
|
||||
for attr in 'setter', 'deleter':
|
||||
if hasattr(self.wrapped, attr):
|
||||
f = partial(self._call_wrapped_attr, name=attr)
|
||||
setattr(self, attr, f)
|
||||
|
||||
def _call_wrapped_attr(self, func: Callable[[Any], None], *, name: str) -> PydanticDescriptorProxy[ReturnType]:
|
||||
self.wrapped = getattr(self.wrapped, name)(func)
|
||||
return self
|
||||
|
||||
def __get__(self, obj: object | None, obj_type: type[object] | None = None) -> PydanticDescriptorProxy[ReturnType]:
|
||||
try:
|
||||
return self.wrapped.__get__(obj, obj_type)
|
||||
except AttributeError:
|
||||
# not a descriptor, e.g. a partial object
|
||||
return self.wrapped # type: ignore[return-value]
|
||||
|
||||
def __set_name__(self, instance: Any, name: str) -> None:
|
||||
if hasattr(self.wrapped, '__set_name__'):
|
||||
self.wrapped.__set_name__(instance, name) # pyright: ignore[reportFunctionMemberAccess]
|
||||
|
||||
def __getattr__(self, __name: str) -> Any:
|
||||
"""Forward checks for __isabstractmethod__ and such."""
|
||||
return getattr(self.wrapped, __name)
|
||||
|
||||
|
||||
DecoratorInfoType = TypeVar('DecoratorInfoType', bound=DecoratorInfo)
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class Decorator(Generic[DecoratorInfoType]):
|
||||
"""A generic container class to join together the decorator metadata
|
||||
(metadata from decorator itself, which we have when the
|
||||
decorator is called but not when we are building the core-schema)
|
||||
and the bound function (which we have after the class itself is created).
|
||||
|
||||
Attributes:
|
||||
cls_ref: The class ref.
|
||||
cls_var_name: The decorated function name.
|
||||
func: The decorated function.
|
||||
shim: A wrapper function to wrap V1 style function.
|
||||
info: The decorator info.
|
||||
"""
|
||||
|
||||
cls_ref: str
|
||||
cls_var_name: str
|
||||
func: Callable[..., Any]
|
||||
shim: Callable[[Any], Any] | None
|
||||
info: DecoratorInfoType
|
||||
|
||||
@staticmethod
|
||||
def build(
|
||||
cls_: Any,
|
||||
*,
|
||||
cls_var_name: str,
|
||||
shim: Callable[[Any], Any] | None,
|
||||
info: DecoratorInfoType,
|
||||
) -> Decorator[DecoratorInfoType]:
|
||||
"""Build a new decorator.
|
||||
|
||||
Args:
|
||||
cls_: The class.
|
||||
cls_var_name: The decorated function name.
|
||||
shim: A wrapper function to wrap V1 style function.
|
||||
info: The decorator info.
|
||||
|
||||
Returns:
|
||||
The new decorator instance.
|
||||
"""
|
||||
func = get_attribute_from_bases(cls_, cls_var_name)
|
||||
if shim is not None:
|
||||
func = shim(func)
|
||||
func = unwrap_wrapped_function(func, unwrap_partial=False)
|
||||
if not callable(func):
|
||||
# This branch will get hit for classmethod properties
|
||||
attribute = get_attribute_from_base_dicts(cls_, cls_var_name) # prevents the binding call to `__get__`
|
||||
if isinstance(attribute, PydanticDescriptorProxy):
|
||||
func = unwrap_wrapped_function(attribute.wrapped)
|
||||
return Decorator(
|
||||
cls_ref=get_type_ref(cls_),
|
||||
cls_var_name=cls_var_name,
|
||||
func=func,
|
||||
shim=shim,
|
||||
info=info,
|
||||
)
|
||||
|
||||
def bind_to_cls(self, cls: Any) -> Decorator[DecoratorInfoType]:
|
||||
"""Bind the decorator to a class.
|
||||
|
||||
Args:
|
||||
cls: the class.
|
||||
|
||||
Returns:
|
||||
The new decorator instance.
|
||||
"""
|
||||
return self.build(
|
||||
cls,
|
||||
cls_var_name=self.cls_var_name,
|
||||
shim=self.shim,
|
||||
info=self.info,
|
||||
)
|
||||
|
||||
|
||||
def get_bases(tp: type[Any]) -> tuple[type[Any], ...]:
|
||||
"""Get the base classes of a class or typeddict.
|
||||
|
||||
Args:
|
||||
tp: The type or class to get the bases.
|
||||
|
||||
Returns:
|
||||
The base classes.
|
||||
"""
|
||||
if is_typeddict(tp):
|
||||
return tp.__orig_bases__ # type: ignore
|
||||
try:
|
||||
return tp.__bases__
|
||||
except AttributeError:
|
||||
return ()
|
||||
|
||||
|
||||
def mro(tp: type[Any]) -> tuple[type[Any], ...]:
|
||||
"""Calculate the Method Resolution Order of bases using the C3 algorithm.
|
||||
|
||||
See https://www.python.org/download/releases/2.3/mro/
|
||||
"""
|
||||
# try to use the existing mro, for performance mainly
|
||||
# but also because it helps verify the implementation below
|
||||
if not is_typeddict(tp):
|
||||
try:
|
||||
return tp.__mro__
|
||||
except AttributeError:
|
||||
# GenericAlias and some other cases
|
||||
pass
|
||||
|
||||
bases = get_bases(tp)
|
||||
return (tp,) + mro_for_bases(bases)
|
||||
|
||||
|
||||
def mro_for_bases(bases: tuple[type[Any], ...]) -> tuple[type[Any], ...]:
|
||||
def merge_seqs(seqs: list[deque[type[Any]]]) -> Iterable[type[Any]]:
|
||||
while True:
|
||||
non_empty = [seq for seq in seqs if seq]
|
||||
if not non_empty:
|
||||
# Nothing left to process, we're done.
|
||||
return
|
||||
candidate: type[Any] | None = None
|
||||
for seq in non_empty: # Find merge candidates among seq heads.
|
||||
candidate = seq[0]
|
||||
not_head = [s for s in non_empty if candidate in islice(s, 1, None)]
|
||||
if not_head:
|
||||
# Reject the candidate.
|
||||
candidate = None
|
||||
else:
|
||||
break
|
||||
if not candidate:
|
||||
raise TypeError('Inconsistent hierarchy, no C3 MRO is possible')
|
||||
yield candidate
|
||||
for seq in non_empty:
|
||||
# Remove candidate.
|
||||
if seq[0] == candidate:
|
||||
seq.popleft()
|
||||
|
||||
seqs = [deque(mro(base)) for base in bases] + [deque(bases)]
|
||||
return tuple(merge_seqs(seqs))
|
||||
|
||||
|
||||
_sentinel = object()
|
||||
|
||||
|
||||
def get_attribute_from_bases(tp: type[Any] | tuple[type[Any], ...], name: str) -> Any:
|
||||
"""Get the attribute from the next class in the MRO that has it,
|
||||
aiming to simulate calling the method on the actual class.
|
||||
|
||||
The reason for iterating over the mro instead of just getting
|
||||
the attribute (which would do that for us) is to support TypedDict,
|
||||
which lacks a real __mro__, but can have a virtual one constructed
|
||||
from its bases (as done here).
|
||||
|
||||
Args:
|
||||
tp: The type or class to search for the attribute. If a tuple, this is treated as a set of base classes.
|
||||
name: The name of the attribute to retrieve.
|
||||
|
||||
Returns:
|
||||
Any: The attribute value, if found.
|
||||
|
||||
Raises:
|
||||
AttributeError: If the attribute is not found in any class in the MRO.
|
||||
"""
|
||||
if isinstance(tp, tuple):
|
||||
for base in mro_for_bases(tp):
|
||||
attribute = base.__dict__.get(name, _sentinel)
|
||||
if attribute is not _sentinel:
|
||||
attribute_get = getattr(attribute, '__get__', None)
|
||||
if attribute_get is not None:
|
||||
return attribute_get(None, tp)
|
||||
return attribute
|
||||
raise AttributeError(f'{name} not found in {tp}')
|
||||
else:
|
||||
try:
|
||||
return getattr(tp, name)
|
||||
except AttributeError:
|
||||
return get_attribute_from_bases(mro(tp), name)
|
||||
|
||||
|
||||
def get_attribute_from_base_dicts(tp: type[Any], name: str) -> Any:
|
||||
"""Get an attribute out of the `__dict__` following the MRO.
|
||||
This prevents the call to `__get__` on the descriptor, and allows
|
||||
us to get the original function for classmethod properties.
|
||||
|
||||
Args:
|
||||
tp: The type or class to search for the attribute.
|
||||
name: The name of the attribute to retrieve.
|
||||
|
||||
Returns:
|
||||
Any: The attribute value, if found.
|
||||
|
||||
Raises:
|
||||
KeyError: If the attribute is not found in any class's `__dict__` in the MRO.
|
||||
"""
|
||||
for base in reversed(mro(tp)):
|
||||
if name in base.__dict__:
|
||||
return base.__dict__[name]
|
||||
return tp.__dict__[name] # raise the error
|
||||
|
||||
|
||||
@dataclass(**slots_true)
|
||||
class DecoratorInfos:
|
||||
"""Mapping of name in the class namespace to decorator info.
|
||||
|
||||
note that the name in the class namespace is the function or attribute name
|
||||
not the field name!
|
||||
"""
|
||||
|
||||
validators: dict[str, Decorator[ValidatorDecoratorInfo]] = field(default_factory=dict)
|
||||
field_validators: dict[str, Decorator[FieldValidatorDecoratorInfo]] = field(default_factory=dict)
|
||||
root_validators: dict[str, Decorator[RootValidatorDecoratorInfo]] = field(default_factory=dict)
|
||||
field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]] = field(default_factory=dict)
|
||||
model_serializers: dict[str, Decorator[ModelSerializerDecoratorInfo]] = field(default_factory=dict)
|
||||
model_validators: dict[str, Decorator[ModelValidatorDecoratorInfo]] = field(default_factory=dict)
|
||||
computed_fields: dict[str, Decorator[ComputedFieldInfo]] = field(default_factory=dict)
|
||||
|
||||
@staticmethod
|
||||
def build(model_dc: type[Any]) -> DecoratorInfos: # noqa: C901 (ignore complexity)
|
||||
"""We want to collect all DecFunc instances that exist as
|
||||
attributes in the namespace of the class (a BaseModel or dataclass)
|
||||
that called us
|
||||
But we want to collect these in the order of the bases
|
||||
So instead of getting them all from the leaf class (the class that called us),
|
||||
we traverse the bases from root (the oldest ancestor class) to leaf
|
||||
and collect all of the instances as we go, taking care to replace
|
||||
any duplicate ones with the last one we see to mimic how function overriding
|
||||
works with inheritance.
|
||||
If we do replace any functions we put the replacement into the position
|
||||
the replaced function was in; that is, we maintain the order.
|
||||
"""
|
||||
# reminder: dicts are ordered and replacement does not alter the order
|
||||
res = DecoratorInfos()
|
||||
for base in reversed(mro(model_dc)[1:]):
|
||||
existing: DecoratorInfos | None = base.__dict__.get('__pydantic_decorators__')
|
||||
if existing is None:
|
||||
existing = DecoratorInfos.build(base)
|
||||
res.validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.validators.items()})
|
||||
res.field_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_validators.items()})
|
||||
res.root_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.root_validators.items()})
|
||||
res.field_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_serializers.items()})
|
||||
res.model_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_serializers.items()})
|
||||
res.model_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_validators.items()})
|
||||
res.computed_fields.update({k: v.bind_to_cls(model_dc) for k, v in existing.computed_fields.items()})
|
||||
|
||||
to_replace: list[tuple[str, Any]] = []
|
||||
|
||||
for var_name, var_value in vars(model_dc).items():
|
||||
if isinstance(var_value, PydanticDescriptorProxy):
|
||||
info = var_value.decorator_info
|
||||
if isinstance(info, ValidatorDecoratorInfo):
|
||||
res.validators[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, FieldValidatorDecoratorInfo):
|
||||
res.field_validators[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, RootValidatorDecoratorInfo):
|
||||
res.root_validators[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, FieldSerializerDecoratorInfo):
|
||||
# check whether a serializer function is already registered for fields
|
||||
for field_serializer_decorator in res.field_serializers.values():
|
||||
# check that each field has at most one serializer function.
|
||||
# serializer functions for the same field in subclasses are allowed,
|
||||
# and are treated as overrides
|
||||
if field_serializer_decorator.cls_var_name == var_name:
|
||||
continue
|
||||
for f in info.fields:
|
||||
if f in field_serializer_decorator.info.fields:
|
||||
raise PydanticUserError(
|
||||
'Multiple field serializer functions were defined '
|
||||
f'for field {f!r}, this is not allowed.',
|
||||
code='multiple-field-serializers',
|
||||
)
|
||||
res.field_serializers[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, ModelValidatorDecoratorInfo):
|
||||
res.model_validators[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
elif isinstance(info, ModelSerializerDecoratorInfo):
|
||||
res.model_serializers[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
||||
)
|
||||
else:
|
||||
from ..fields import ComputedFieldInfo
|
||||
|
||||
isinstance(var_value, ComputedFieldInfo)
|
||||
res.computed_fields[var_name] = Decorator.build(
|
||||
model_dc, cls_var_name=var_name, shim=None, info=info
|
||||
)
|
||||
to_replace.append((var_name, var_value.wrapped))
|
||||
if to_replace:
|
||||
# If we can save `__pydantic_decorators__` on the class we'll be able to check for it above
|
||||
# so then we don't need to re-process the type, which means we can discard our descriptor wrappers
|
||||
# and replace them with the thing they are wrapping (see the other setattr call below)
|
||||
# which allows validator class methods to also function as regular class methods
|
||||
setattr(model_dc, '__pydantic_decorators__', res)
|
||||
for name, value in to_replace:
|
||||
setattr(model_dc, name, value)
|
||||
return res
|
||||
|
||||
|
||||
def inspect_validator(validator: Callable[..., Any], mode: FieldValidatorModes) -> bool:
|
||||
"""Look at a field or model validator function and determine whether it takes an info argument.
|
||||
|
||||
An error is raised if the function has an invalid signature.
|
||||
|
||||
Args:
|
||||
validator: The validator function to inspect.
|
||||
mode: The proposed validator mode.
|
||||
|
||||
Returns:
|
||||
Whether the validator takes an info argument.
|
||||
"""
|
||||
try:
|
||||
sig = signature(validator)
|
||||
except ValueError:
|
||||
# builtins and some C extensions don't have signatures
|
||||
# assume that they don't take an info argument and only take a single argument
|
||||
# e.g. `str.strip` or `datetime.datetime`
|
||||
return False
|
||||
n_positional = count_positional_params(sig)
|
||||
if mode == 'wrap':
|
||||
if n_positional == 3:
|
||||
return True
|
||||
elif n_positional == 2:
|
||||
return False
|
||||
else:
|
||||
assert mode in {'before', 'after', 'plain'}, f"invalid mode: {mode!r}, expected 'before', 'after' or 'plain"
|
||||
if n_positional == 2:
|
||||
return True
|
||||
elif n_positional == 1:
|
||||
return False
|
||||
|
||||
raise PydanticUserError(
|
||||
f'Unrecognized field_validator function signature for {validator} with `mode={mode}`:{sig}',
|
||||
code='validator-signature',
|
||||
)
|
||||
|
||||
|
||||
def inspect_field_serializer(
|
||||
serializer: Callable[..., Any], mode: Literal['plain', 'wrap'], computed_field: bool = False
|
||||
) -> tuple[bool, bool]:
|
||||
"""Look at a field serializer function and determine if it is a field serializer,
|
||||
and whether it takes an info argument.
|
||||
|
||||
An error is raised if the function has an invalid signature.
|
||||
|
||||
Args:
|
||||
serializer: The serializer function to inspect.
|
||||
mode: The serializer mode, either 'plain' or 'wrap'.
|
||||
computed_field: When serializer is applied on computed_field. It doesn't require
|
||||
info signature.
|
||||
|
||||
Returns:
|
||||
Tuple of (is_field_serializer, info_arg).
|
||||
"""
|
||||
sig = signature(serializer)
|
||||
|
||||
first = next(iter(sig.parameters.values()), None)
|
||||
is_field_serializer = first is not None and first.name == 'self'
|
||||
|
||||
n_positional = count_positional_params(sig)
|
||||
if is_field_serializer:
|
||||
# -1 to correct for self parameter
|
||||
info_arg = _serializer_info_arg(mode, n_positional - 1)
|
||||
else:
|
||||
info_arg = _serializer_info_arg(mode, n_positional)
|
||||
|
||||
if info_arg is None:
|
||||
raise PydanticUserError(
|
||||
f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
||||
code='field-serializer-signature',
|
||||
)
|
||||
if info_arg and computed_field:
|
||||
raise PydanticUserError(
|
||||
'field_serializer on computed_field does not use info signature', code='field-serializer-signature'
|
||||
)
|
||||
|
||||
else:
|
||||
return is_field_serializer, info_arg
|
||||
|
||||
|
||||
def inspect_annotated_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
|
||||
"""Look at a serializer function used via `Annotated` and determine whether it takes an info argument.
|
||||
|
||||
An error is raised if the function has an invalid signature.
|
||||
|
||||
Args:
|
||||
serializer: The serializer function to check.
|
||||
mode: The serializer mode, either 'plain' or 'wrap'.
|
||||
|
||||
Returns:
|
||||
info_arg
|
||||
"""
|
||||
sig = signature(serializer)
|
||||
info_arg = _serializer_info_arg(mode, count_positional_params(sig))
|
||||
if info_arg is None:
|
||||
raise PydanticUserError(
|
||||
f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
||||
code='field-serializer-signature',
|
||||
)
|
||||
else:
|
||||
return info_arg
|
||||
|
||||
|
||||
def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
|
||||
"""Look at a model serializer function and determine whether it takes an info argument.
|
||||
|
||||
An error is raised if the function has an invalid signature.
|
||||
|
||||
Args:
|
||||
serializer: The serializer function to check.
|
||||
mode: The serializer mode, either 'plain' or 'wrap'.
|
||||
|
||||
Returns:
|
||||
`info_arg` - whether the function expects an info argument.
|
||||
"""
|
||||
if isinstance(serializer, (staticmethod, classmethod)) or not is_instance_method_from_sig(serializer):
|
||||
raise PydanticUserError(
|
||||
'`@model_serializer` must be applied to instance methods', code='model-serializer-instance-method'
|
||||
)
|
||||
|
||||
sig = signature(serializer)
|
||||
info_arg = _serializer_info_arg(mode, count_positional_params(sig))
|
||||
if info_arg is None:
|
||||
raise PydanticUserError(
|
||||
f'Unrecognized model_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
||||
code='model-serializer-signature',
|
||||
)
|
||||
else:
|
||||
return info_arg
|
||||
|
||||
|
||||
def _serializer_info_arg(mode: Literal['plain', 'wrap'], n_positional: int) -> bool | None:
|
||||
if mode == 'plain':
|
||||
if n_positional == 1:
|
||||
# (__input_value: Any) -> Any
|
||||
return False
|
||||
elif n_positional == 2:
|
||||
# (__model: Any, __input_value: Any) -> Any
|
||||
return True
|
||||
else:
|
||||
assert mode == 'wrap', f"invalid mode: {mode!r}, expected 'plain' or 'wrap'"
|
||||
if n_positional == 2:
|
||||
# (__input_value: Any, __serializer: SerializerFunctionWrapHandler) -> Any
|
||||
return False
|
||||
elif n_positional == 3:
|
||||
# (__input_value: Any, __serializer: SerializerFunctionWrapHandler, __info: SerializationInfo) -> Any
|
||||
return True
|
||||
|
||||
return None
|
||||
|
||||
|
||||
AnyDecoratorCallable: TypeAlias = (
|
||||
'Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any], Callable[..., Any]]'
|
||||
)
|
||||
|
||||
|
||||
def is_instance_method_from_sig(function: AnyDecoratorCallable) -> bool:
|
||||
"""Whether the function is an instance method.
|
||||
|
||||
It will consider a function as instance method if the first parameter of
|
||||
function is `self`.
|
||||
|
||||
Args:
|
||||
function: The function to check.
|
||||
|
||||
Returns:
|
||||
`True` if the function is an instance method, `False` otherwise.
|
||||
"""
|
||||
sig = signature(unwrap_wrapped_function(function))
|
||||
first = next(iter(sig.parameters.values()), None)
|
||||
if first and first.name == 'self':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable) -> Any:
|
||||
"""Apply the `@classmethod` decorator on the function.
|
||||
|
||||
Args:
|
||||
function: The function to apply the decorator on.
|
||||
|
||||
Return:
|
||||
The `@classmethod` decorator applied function.
|
||||
"""
|
||||
if not isinstance(
|
||||
unwrap_wrapped_function(function, unwrap_class_static_method=False), classmethod
|
||||
) and _is_classmethod_from_sig(function):
|
||||
return classmethod(function) # type: ignore[arg-type]
|
||||
return function
|
||||
|
||||
|
||||
def _is_classmethod_from_sig(function: AnyDecoratorCallable) -> bool:
|
||||
sig = signature(unwrap_wrapped_function(function))
|
||||
first = next(iter(sig.parameters.values()), None)
|
||||
if first and first.name == 'cls':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def unwrap_wrapped_function(
|
||||
func: Any,
|
||||
*,
|
||||
unwrap_partial: bool = True,
|
||||
unwrap_class_static_method: bool = True,
|
||||
) -> Any:
|
||||
"""Recursively unwraps a wrapped function until the underlying function is reached.
|
||||
This handles property, functools.partial, functools.partialmethod, staticmethod and classmethod.
|
||||
|
||||
Args:
|
||||
func: The function to unwrap.
|
||||
unwrap_partial: If True (default), unwrap partial and partialmethod decorators, otherwise don't.
|
||||
decorators.
|
||||
unwrap_class_static_method: If True (default), also unwrap classmethod and staticmethod
|
||||
decorators. If False, only unwrap partial and partialmethod decorators.
|
||||
|
||||
Returns:
|
||||
The underlying function of the wrapped function.
|
||||
"""
|
||||
all: set[Any] = {property, cached_property}
|
||||
|
||||
if unwrap_partial:
|
||||
all.update({partial, partialmethod})
|
||||
|
||||
if unwrap_class_static_method:
|
||||
all.update({staticmethod, classmethod})
|
||||
|
||||
while isinstance(func, tuple(all)):
|
||||
if unwrap_class_static_method and isinstance(func, (classmethod, staticmethod)):
|
||||
func = func.__func__
|
||||
elif isinstance(func, (partial, partialmethod)):
|
||||
func = func.func
|
||||
elif isinstance(func, property):
|
||||
func = func.fget # arbitrary choice, convenient for computed fields
|
||||
else:
|
||||
# Make coverage happy as it can only get here in the last possible case
|
||||
assert isinstance(func, cached_property)
|
||||
func = func.func # type: ignore
|
||||
|
||||
return func
|
||||
|
||||
|
||||
def get_function_return_type(
|
||||
func: Any, explicit_return_type: Any, types_namespace: dict[str, Any] | None = None
|
||||
) -> Any:
|
||||
"""Get the function return type.
|
||||
|
||||
It gets the return type from the type annotation if `explicit_return_type` is `None`.
|
||||
Otherwise, it returns `explicit_return_type`.
|
||||
|
||||
Args:
|
||||
func: The function to get its return type.
|
||||
explicit_return_type: The explicit return type.
|
||||
types_namespace: The types namespace, defaults to `None`.
|
||||
|
||||
Returns:
|
||||
The function return type.
|
||||
"""
|
||||
if explicit_return_type is PydanticUndefined:
|
||||
# try to get it from the type annotation
|
||||
hints = get_function_type_hints(
|
||||
unwrap_wrapped_function(func), include_keys={'return'}, types_namespace=types_namespace
|
||||
)
|
||||
return hints.get('return', PydanticUndefined)
|
||||
else:
|
||||
return explicit_return_type
|
||||
|
||||
|
||||
def count_positional_params(sig: Signature) -> int:
|
||||
return sum(1 for param in sig.parameters.values() if can_be_positional(param))
|
||||
|
||||
|
||||
def can_be_positional(param: Parameter) -> bool:
|
||||
return param.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
|
||||
|
||||
|
||||
def ensure_property(f: Any) -> Any:
|
||||
"""Ensure that a function is a `property` or `cached_property`, or is a valid descriptor.
|
||||
|
||||
Args:
|
||||
f: The function to check.
|
||||
|
||||
Returns:
|
||||
The function, or a `property` or `cached_property` instance wrapping the function.
|
||||
"""
|
||||
if ismethoddescriptor(f) or isdatadescriptor(f):
|
||||
return f
|
||||
else:
|
||||
return property(f)
|
181
lib/pydantic/_internal/_decorators_v1.py
Normal file
181
lib/pydantic/_internal/_decorators_v1.py
Normal file
|
@ -0,0 +1,181 @@
|
|||
"""Logic for V1 validators, e.g. `@validator` and `@root_validator`."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from inspect import Parameter, signature
|
||||
from typing import Any, Dict, Tuple, Union, cast
|
||||
|
||||
from pydantic_core import core_schema
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from ..errors import PydanticUserError
|
||||
from ._decorators import can_be_positional
|
||||
|
||||
|
||||
class V1OnlyValueValidator(Protocol):
|
||||
"""A simple validator, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any) -> Any:
|
||||
...
|
||||
|
||||
|
||||
class V1ValidatorWithValues(Protocol):
|
||||
"""A validator with `values` argument, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any, values: dict[str, Any]) -> Any:
|
||||
...
|
||||
|
||||
|
||||
class V1ValidatorWithValuesKwOnly(Protocol):
|
||||
"""A validator with keyword only `values` argument, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any, *, values: dict[str, Any]) -> Any:
|
||||
...
|
||||
|
||||
|
||||
class V1ValidatorWithKwargs(Protocol):
|
||||
"""A validator with `kwargs` argument, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any, **kwargs: Any) -> Any:
|
||||
...
|
||||
|
||||
|
||||
class V1ValidatorWithValuesAndKwargs(Protocol):
|
||||
"""A validator with `values` and `kwargs` arguments, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __value: Any, values: dict[str, Any], **kwargs: Any) -> Any:
|
||||
...
|
||||
|
||||
|
||||
V1Validator = Union[
|
||||
V1ValidatorWithValues, V1ValidatorWithValuesKwOnly, V1ValidatorWithKwargs, V1ValidatorWithValuesAndKwargs
|
||||
]
|
||||
|
||||
|
||||
def can_be_keyword(param: Parameter) -> bool:
|
||||
return param.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY)
|
||||
|
||||
|
||||
def make_generic_v1_field_validator(validator: V1Validator) -> core_schema.WithInfoValidatorFunction:
|
||||
"""Wrap a V1 style field validator for V2 compatibility.
|
||||
|
||||
Args:
|
||||
validator: The V1 style field validator.
|
||||
|
||||
Returns:
|
||||
A wrapped V2 style field validator.
|
||||
|
||||
Raises:
|
||||
PydanticUserError: If the signature is not supported or the parameters are
|
||||
not available in Pydantic V2.
|
||||
"""
|
||||
sig = signature(validator)
|
||||
|
||||
needs_values_kw = False
|
||||
|
||||
for param_num, (param_name, parameter) in enumerate(sig.parameters.items()):
|
||||
if can_be_keyword(parameter) and param_name in ('field', 'config'):
|
||||
raise PydanticUserError(
|
||||
'The `field` and `config` parameters are not available in Pydantic V2, '
|
||||
'please use the `info` parameter instead.',
|
||||
code='validator-field-config-info',
|
||||
)
|
||||
if parameter.kind is Parameter.VAR_KEYWORD:
|
||||
needs_values_kw = True
|
||||
elif can_be_keyword(parameter) and param_name == 'values':
|
||||
needs_values_kw = True
|
||||
elif can_be_positional(parameter) and param_num == 0:
|
||||
# value
|
||||
continue
|
||||
elif parameter.default is Parameter.empty: # ignore params with defaults e.g. bound by functools.partial
|
||||
raise PydanticUserError(
|
||||
f'Unsupported signature for V1 style validator {validator}: {sig} is not supported.',
|
||||
code='validator-v1-signature',
|
||||
)
|
||||
|
||||
if needs_values_kw:
|
||||
# (v, **kwargs), (v, values, **kwargs), (v, *, values, **kwargs) or (v, *, values)
|
||||
val1 = cast(V1ValidatorWithValues, validator)
|
||||
|
||||
def wrapper1(value: Any, info: core_schema.ValidationInfo) -> Any:
|
||||
return val1(value, values=info.data)
|
||||
|
||||
return wrapper1
|
||||
else:
|
||||
val2 = cast(V1OnlyValueValidator, validator)
|
||||
|
||||
def wrapper2(value: Any, _: core_schema.ValidationInfo) -> Any:
|
||||
return val2(value)
|
||||
|
||||
return wrapper2
|
||||
|
||||
|
||||
RootValidatorValues = Dict[str, Any]
|
||||
# technically tuple[model_dict, model_extra, fields_set] | tuple[dataclass_dict, init_vars]
|
||||
RootValidatorFieldsTuple = Tuple[Any, ...]
|
||||
|
||||
|
||||
class V1RootValidatorFunction(Protocol):
|
||||
"""A simple root validator, supported for V1 validators and V2 validators."""
|
||||
|
||||
def __call__(self, __values: RootValidatorValues) -> RootValidatorValues:
|
||||
...
|
||||
|
||||
|
||||
class V2CoreBeforeRootValidator(Protocol):
|
||||
"""V2 validator with mode='before'."""
|
||||
|
||||
def __call__(self, __values: RootValidatorValues, __info: core_schema.ValidationInfo) -> RootValidatorValues:
|
||||
...
|
||||
|
||||
|
||||
class V2CoreAfterRootValidator(Protocol):
|
||||
"""V2 validator with mode='after'."""
|
||||
|
||||
def __call__(
|
||||
self, __fields_tuple: RootValidatorFieldsTuple, __info: core_schema.ValidationInfo
|
||||
) -> RootValidatorFieldsTuple:
|
||||
...
|
||||
|
||||
|
||||
def make_v1_generic_root_validator(
|
||||
validator: V1RootValidatorFunction, pre: bool
|
||||
) -> V2CoreBeforeRootValidator | V2CoreAfterRootValidator:
|
||||
"""Wrap a V1 style root validator for V2 compatibility.
|
||||
|
||||
Args:
|
||||
validator: The V1 style field validator.
|
||||
pre: Whether the validator is a pre validator.
|
||||
|
||||
Returns:
|
||||
A wrapped V2 style validator.
|
||||
"""
|
||||
if pre is True:
|
||||
# mode='before' for pydantic-core
|
||||
def _wrapper1(values: RootValidatorValues, _: core_schema.ValidationInfo) -> RootValidatorValues:
|
||||
return validator(values)
|
||||
|
||||
return _wrapper1
|
||||
|
||||
# mode='after' for pydantic-core
|
||||
def _wrapper2(fields_tuple: RootValidatorFieldsTuple, _: core_schema.ValidationInfo) -> RootValidatorFieldsTuple:
|
||||
if len(fields_tuple) == 2:
|
||||
# dataclass, this is easy
|
||||
values, init_vars = fields_tuple
|
||||
values = validator(values)
|
||||
return values, init_vars
|
||||
else:
|
||||
# ugly hack: to match v1 behaviour, we merge values and model_extra, then split them up based on fields
|
||||
# afterwards
|
||||
model_dict, model_extra, fields_set = fields_tuple
|
||||
if model_extra:
|
||||
fields = set(model_dict.keys())
|
||||
model_dict.update(model_extra)
|
||||
model_dict_new = validator(model_dict)
|
||||
for k in list(model_dict_new.keys()):
|
||||
if k not in fields:
|
||||
model_extra[k] = model_dict_new.pop(k)
|
||||
else:
|
||||
model_dict_new = validator(model_dict)
|
||||
return model_dict_new, model_extra, fields_set
|
||||
|
||||
return _wrapper2
|
506
lib/pydantic/_internal/_discriminated_union.py
Normal file
506
lib/pydantic/_internal/_discriminated_union.py
Normal file
|
@ -0,0 +1,506 @@
|
|||
from __future__ import annotations as _annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Hashable, Sequence
|
||||
|
||||
from pydantic_core import CoreSchema, core_schema
|
||||
|
||||
from ..errors import PydanticUserError
|
||||
from . import _core_utils
|
||||
from ._core_utils import (
|
||||
CoreSchemaField,
|
||||
collect_definitions,
|
||||
simplify_schema_references,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..types import Discriminator
|
||||
|
||||
CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY = 'pydantic.internal.union_discriminator'
|
||||
|
||||
|
||||
class MissingDefinitionForUnionRef(Exception):
|
||||
"""Raised when applying a discriminated union discriminator to a schema
|
||||
requires a definition that is not yet defined
|
||||
"""
|
||||
|
||||
def __init__(self, ref: str) -> None:
|
||||
self.ref = ref
|
||||
super().__init__(f'Missing definition for ref {self.ref!r}')
|
||||
|
||||
|
||||
def set_discriminator_in_metadata(schema: CoreSchema, discriminator: Any) -> None:
|
||||
schema.setdefault('metadata', {})
|
||||
metadata = schema.get('metadata')
|
||||
assert metadata is not None
|
||||
metadata[CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY] = discriminator
|
||||
|
||||
|
||||
def apply_discriminators(schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
definitions: dict[str, CoreSchema] | None = None
|
||||
|
||||
def inner(s: core_schema.CoreSchema, recurse: _core_utils.Recurse) -> core_schema.CoreSchema:
|
||||
nonlocal definitions
|
||||
|
||||
s = recurse(s, inner)
|
||||
if s['type'] == 'tagged-union':
|
||||
return s
|
||||
|
||||
metadata = s.get('metadata', {})
|
||||
discriminator = metadata.pop(CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY, None)
|
||||
if discriminator is not None:
|
||||
if definitions is None:
|
||||
definitions = collect_definitions(schema)
|
||||
s = apply_discriminator(s, discriminator, definitions)
|
||||
return s
|
||||
|
||||
return simplify_schema_references(_core_utils.walk_core_schema(schema, inner))
|
||||
|
||||
|
||||
def apply_discriminator(
|
||||
schema: core_schema.CoreSchema,
|
||||
discriminator: str | Discriminator,
|
||||
definitions: dict[str, core_schema.CoreSchema] | None = None,
|
||||
) -> core_schema.CoreSchema:
|
||||
"""Applies the discriminator and returns a new core schema.
|
||||
|
||||
Args:
|
||||
schema: The input schema.
|
||||
discriminator: The name of the field which will serve as the discriminator.
|
||||
definitions: A mapping of schema ref to schema.
|
||||
|
||||
Returns:
|
||||
The new core schema.
|
||||
|
||||
Raises:
|
||||
TypeError:
|
||||
- If `discriminator` is used with invalid union variant.
|
||||
- If `discriminator` is used with `Union` type with one variant.
|
||||
- If `discriminator` value mapped to multiple choices.
|
||||
MissingDefinitionForUnionRef:
|
||||
If the definition for ref is missing.
|
||||
PydanticUserError:
|
||||
- If a model in union doesn't have a discriminator field.
|
||||
- If discriminator field has a non-string alias.
|
||||
- If discriminator fields have different aliases.
|
||||
- If discriminator field not of type `Literal`.
|
||||
"""
|
||||
from ..types import Discriminator
|
||||
|
||||
if isinstance(discriminator, Discriminator):
|
||||
if isinstance(discriminator.discriminator, str):
|
||||
discriminator = discriminator.discriminator
|
||||
else:
|
||||
return discriminator._convert_schema(schema)
|
||||
|
||||
return _ApplyInferredDiscriminator(discriminator, definitions or {}).apply(schema)
|
||||
|
||||
|
||||
class _ApplyInferredDiscriminator:
|
||||
"""This class is used to convert an input schema containing a union schema into one where that union is
|
||||
replaced with a tagged-union, with all the associated debugging and performance benefits.
|
||||
|
||||
This is done by:
|
||||
* Validating that the input schema is compatible with the provided discriminator
|
||||
* Introspecting the schema to determine which discriminator values should map to which union choices
|
||||
* Handling various edge cases such as 'definitions', 'default', 'nullable' schemas, and more
|
||||
|
||||
I have chosen to implement the conversion algorithm in this class, rather than a function,
|
||||
to make it easier to maintain state while recursively walking the provided CoreSchema.
|
||||
"""
|
||||
|
||||
def __init__(self, discriminator: str, definitions: dict[str, core_schema.CoreSchema]):
|
||||
# `discriminator` should be the name of the field which will serve as the discriminator.
|
||||
# It must be the python name of the field, and *not* the field's alias. Note that as of now,
|
||||
# all members of a discriminated union _must_ use a field with the same name as the discriminator.
|
||||
# This may change if/when we expose a way to manually specify the TaggedUnionSchema's choices.
|
||||
self.discriminator = discriminator
|
||||
|
||||
# `definitions` should contain a mapping of schema ref to schema for all schemas which might
|
||||
# be referenced by some choice
|
||||
self.definitions = definitions
|
||||
|
||||
# `_discriminator_alias` will hold the value, if present, of the alias for the discriminator
|
||||
#
|
||||
# Note: following the v1 implementation, we currently disallow the use of different aliases
|
||||
# for different choices. This is not a limitation of pydantic_core, but if we try to handle
|
||||
# this, the inference logic gets complicated very quickly, and could result in confusing
|
||||
# debugging challenges for users making subtle mistakes.
|
||||
#
|
||||
# Rather than trying to do the most powerful inference possible, I think we should eventually
|
||||
# expose a way to more-manually control the way the TaggedUnionSchema is constructed through
|
||||
# the use of a new type which would be placed as an Annotation on the Union type. This would
|
||||
# provide the full flexibility/power of pydantic_core's TaggedUnionSchema where necessary for
|
||||
# more complex cases, without over-complicating the inference logic for the common cases.
|
||||
self._discriminator_alias: str | None = None
|
||||
|
||||
# `_should_be_nullable` indicates whether the converted union has `None` as an allowed value.
|
||||
# If `None` is an acceptable value of the (possibly-wrapped) union, we ignore it while
|
||||
# constructing the TaggedUnionSchema, but set the `_should_be_nullable` attribute to True.
|
||||
# Once we have constructed the TaggedUnionSchema, if `_should_be_nullable` is True, we ensure
|
||||
# that the final schema gets wrapped as a NullableSchema. This has the same semantics on the
|
||||
# python side, but resolves the issue that `None` cannot correspond to any discriminator values.
|
||||
self._should_be_nullable = False
|
||||
|
||||
# `_is_nullable` is used to track if the final produced schema will definitely be nullable;
|
||||
# we set it to True if the input schema is wrapped in a nullable schema that we know will be preserved
|
||||
# as an indication that, even if None is discovered as one of the union choices, we will not need to wrap
|
||||
# the final value in another nullable schema.
|
||||
#
|
||||
# This is more complicated than just checking for the final outermost schema having type 'nullable' thanks
|
||||
# to the possible presence of other wrapper schemas such as DefinitionsSchema, WithDefaultSchema, etc.
|
||||
self._is_nullable = False
|
||||
|
||||
# `_choices_to_handle` serves as a stack of choices to add to the tagged union. Initially, choices
|
||||
# from the union in the wrapped schema will be appended to this list, and the recursive choice-handling
|
||||
# algorithm may add more choices to this stack as (nested) unions are encountered.
|
||||
self._choices_to_handle: list[core_schema.CoreSchema] = []
|
||||
|
||||
# `_tagged_union_choices` is built during the call to `apply`, and will hold the choices to be included
|
||||
# in the output TaggedUnionSchema that will replace the union from the input schema
|
||||
self._tagged_union_choices: dict[Hashable, core_schema.CoreSchema] = {}
|
||||
|
||||
# `_used` is changed to True after applying the discriminator to prevent accidental re-use
|
||||
self._used = False
|
||||
|
||||
def apply(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
"""Return a new CoreSchema based on `schema` that uses a tagged-union with the discriminator provided
|
||||
to this class.
|
||||
|
||||
Args:
|
||||
schema: The input schema.
|
||||
|
||||
Returns:
|
||||
The new core schema.
|
||||
|
||||
Raises:
|
||||
TypeError:
|
||||
- If `discriminator` is used with invalid union variant.
|
||||
- If `discriminator` is used with `Union` type with one variant.
|
||||
- If `discriminator` value mapped to multiple choices.
|
||||
ValueError:
|
||||
If the definition for ref is missing.
|
||||
PydanticUserError:
|
||||
- If a model in union doesn't have a discriminator field.
|
||||
- If discriminator field has a non-string alias.
|
||||
- If discriminator fields have different aliases.
|
||||
- If discriminator field not of type `Literal`.
|
||||
"""
|
||||
self.definitions.update(collect_definitions(schema))
|
||||
assert not self._used
|
||||
schema = self._apply_to_root(schema)
|
||||
if self._should_be_nullable and not self._is_nullable:
|
||||
schema = core_schema.nullable_schema(schema)
|
||||
self._used = True
|
||||
new_defs = collect_definitions(schema)
|
||||
missing_defs = self.definitions.keys() - new_defs.keys()
|
||||
if missing_defs:
|
||||
schema = core_schema.definitions_schema(schema, [self.definitions[ref] for ref in missing_defs])
|
||||
return schema
|
||||
|
||||
def _apply_to_root(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
"""This method handles the outer-most stage of recursion over the input schema:
|
||||
unwrapping nullable or definitions schemas, and calling the `_handle_choice`
|
||||
method iteratively on the choices extracted (recursively) from the possibly-wrapped union.
|
||||
"""
|
||||
if schema['type'] == 'nullable':
|
||||
self._is_nullable = True
|
||||
wrapped = self._apply_to_root(schema['schema'])
|
||||
nullable_wrapper = schema.copy()
|
||||
nullable_wrapper['schema'] = wrapped
|
||||
return nullable_wrapper
|
||||
|
||||
if schema['type'] == 'definitions':
|
||||
wrapped = self._apply_to_root(schema['schema'])
|
||||
definitions_wrapper = schema.copy()
|
||||
definitions_wrapper['schema'] = wrapped
|
||||
return definitions_wrapper
|
||||
|
||||
if schema['type'] != 'union':
|
||||
# If the schema is not a union, it probably means it just had a single member and
|
||||
# was flattened by pydantic_core.
|
||||
# However, it still may make sense to apply the discriminator to this schema,
|
||||
# as a way to get discriminated-union-style error messages, so we allow this here.
|
||||
schema = core_schema.union_schema([schema])
|
||||
|
||||
# Reverse the choices list before extending the stack so that they get handled in the order they occur
|
||||
choices_schemas = [v[0] if isinstance(v, tuple) else v for v in schema['choices'][::-1]]
|
||||
self._choices_to_handle.extend(choices_schemas)
|
||||
while self._choices_to_handle:
|
||||
choice = self._choices_to_handle.pop()
|
||||
self._handle_choice(choice)
|
||||
|
||||
if self._discriminator_alias is not None and self._discriminator_alias != self.discriminator:
|
||||
# * We need to annotate `discriminator` as a union here to handle both branches of this conditional
|
||||
# * We need to annotate `discriminator` as list[list[str | int]] and not list[list[str]] due to the
|
||||
# invariance of list, and because list[list[str | int]] is the type of the discriminator argument
|
||||
# to tagged_union_schema below
|
||||
# * See the docstring of pydantic_core.core_schema.tagged_union_schema for more details about how to
|
||||
# interpret the value of the discriminator argument to tagged_union_schema. (The list[list[str]] here
|
||||
# is the appropriate way to provide a list of fallback attributes to check for a discriminator value.)
|
||||
discriminator: str | list[list[str | int]] = [[self.discriminator], [self._discriminator_alias]]
|
||||
else:
|
||||
discriminator = self.discriminator
|
||||
return core_schema.tagged_union_schema(
|
||||
choices=self._tagged_union_choices,
|
||||
discriminator=discriminator,
|
||||
custom_error_type=schema.get('custom_error_type'),
|
||||
custom_error_message=schema.get('custom_error_message'),
|
||||
custom_error_context=schema.get('custom_error_context'),
|
||||
strict=False,
|
||||
from_attributes=True,
|
||||
ref=schema.get('ref'),
|
||||
metadata=schema.get('metadata'),
|
||||
serialization=schema.get('serialization'),
|
||||
)
|
||||
|
||||
def _handle_choice(self, choice: core_schema.CoreSchema) -> None:
|
||||
"""This method handles the "middle" stage of recursion over the input schema.
|
||||
Specifically, it is responsible for handling each choice of the outermost union
|
||||
(and any "coalesced" choices obtained from inner unions).
|
||||
|
||||
Here, "handling" entails:
|
||||
* Coalescing nested unions and compatible tagged-unions
|
||||
* Tracking the presence of 'none' and 'nullable' schemas occurring as choices
|
||||
* Validating that each allowed discriminator value maps to a unique choice
|
||||
* Updating the _tagged_union_choices mapping that will ultimately be used to build the TaggedUnionSchema.
|
||||
"""
|
||||
if choice['type'] == 'definition-ref':
|
||||
if choice['schema_ref'] not in self.definitions:
|
||||
raise MissingDefinitionForUnionRef(choice['schema_ref'])
|
||||
|
||||
if choice['type'] == 'none':
|
||||
self._should_be_nullable = True
|
||||
elif choice['type'] == 'definitions':
|
||||
self._handle_choice(choice['schema'])
|
||||
elif choice['type'] == 'nullable':
|
||||
self._should_be_nullable = True
|
||||
self._handle_choice(choice['schema']) # unwrap the nullable schema
|
||||
elif choice['type'] == 'union':
|
||||
# Reverse the choices list before extending the stack so that they get handled in the order they occur
|
||||
choices_schemas = [v[0] if isinstance(v, tuple) else v for v in choice['choices'][::-1]]
|
||||
self._choices_to_handle.extend(choices_schemas)
|
||||
elif choice['type'] not in {
|
||||
'model',
|
||||
'typed-dict',
|
||||
'tagged-union',
|
||||
'lax-or-strict',
|
||||
'dataclass',
|
||||
'dataclass-args',
|
||||
'definition-ref',
|
||||
} and not _core_utils.is_function_with_inner_schema(choice):
|
||||
# We should eventually handle 'definition-ref' as well
|
||||
raise TypeError(
|
||||
f'{choice["type"]!r} is not a valid discriminated union variant;'
|
||||
' should be a `BaseModel` or `dataclass`'
|
||||
)
|
||||
else:
|
||||
if choice['type'] == 'tagged-union' and self._is_discriminator_shared(choice):
|
||||
# In this case, this inner tagged-union is compatible with the outer tagged-union,
|
||||
# and its choices can be coalesced into the outer TaggedUnionSchema.
|
||||
subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))]
|
||||
# Reverse the choices list before extending the stack so that they get handled in the order they occur
|
||||
self._choices_to_handle.extend(subchoices[::-1])
|
||||
return
|
||||
|
||||
inferred_discriminator_values = self._infer_discriminator_values_for_choice(choice, source_name=None)
|
||||
self._set_unique_choice_for_values(choice, inferred_discriminator_values)
|
||||
|
||||
def _is_discriminator_shared(self, choice: core_schema.TaggedUnionSchema) -> bool:
|
||||
"""This method returns a boolean indicating whether the discriminator for the `choice`
|
||||
is the same as that being used for the outermost tagged union. This is used to
|
||||
determine whether this TaggedUnionSchema choice should be "coalesced" into the top level,
|
||||
or whether it should be treated as a separate (nested) choice.
|
||||
"""
|
||||
inner_discriminator = choice['discriminator']
|
||||
return inner_discriminator == self.discriminator or (
|
||||
isinstance(inner_discriminator, list)
|
||||
and (self.discriminator in inner_discriminator or [self.discriminator] in inner_discriminator)
|
||||
)
|
||||
|
||||
def _infer_discriminator_values_for_choice( # noqa C901
|
||||
self, choice: core_schema.CoreSchema, source_name: str | None
|
||||
) -> list[str | int]:
|
||||
"""This function recurses over `choice`, extracting all discriminator values that should map to this choice.
|
||||
|
||||
`model_name` is accepted for the purpose of producing useful error messages.
|
||||
"""
|
||||
if choice['type'] == 'definitions':
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name)
|
||||
elif choice['type'] == 'function-plain':
|
||||
raise TypeError(
|
||||
f'{choice["type"]!r} is not a valid discriminated union variant;'
|
||||
' should be a `BaseModel` or `dataclass`'
|
||||
)
|
||||
elif _core_utils.is_function_with_inner_schema(choice):
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name)
|
||||
elif choice['type'] == 'lax-or-strict':
|
||||
return sorted(
|
||||
set(
|
||||
self._infer_discriminator_values_for_choice(choice['lax_schema'], source_name=None)
|
||||
+ self._infer_discriminator_values_for_choice(choice['strict_schema'], source_name=None)
|
||||
)
|
||||
)
|
||||
|
||||
elif choice['type'] == 'tagged-union':
|
||||
values: list[str | int] = []
|
||||
# Ignore str/int "choices" since these are just references to other choices
|
||||
subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))]
|
||||
for subchoice in subchoices:
|
||||
subchoice_values = self._infer_discriminator_values_for_choice(subchoice, source_name=None)
|
||||
values.extend(subchoice_values)
|
||||
return values
|
||||
|
||||
elif choice['type'] == 'union':
|
||||
values = []
|
||||
for subchoice in choice['choices']:
|
||||
subchoice_schema = subchoice[0] if isinstance(subchoice, tuple) else subchoice
|
||||
subchoice_values = self._infer_discriminator_values_for_choice(subchoice_schema, source_name=None)
|
||||
values.extend(subchoice_values)
|
||||
return values
|
||||
|
||||
elif choice['type'] == 'nullable':
|
||||
self._should_be_nullable = True
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=None)
|
||||
|
||||
elif choice['type'] == 'model':
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__)
|
||||
|
||||
elif choice['type'] == 'dataclass':
|
||||
return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__)
|
||||
|
||||
elif choice['type'] == 'model-fields':
|
||||
return self._infer_discriminator_values_for_model_choice(choice, source_name=source_name)
|
||||
|
||||
elif choice['type'] == 'dataclass-args':
|
||||
return self._infer_discriminator_values_for_dataclass_choice(choice, source_name=source_name)
|
||||
|
||||
elif choice['type'] == 'typed-dict':
|
||||
return self._infer_discriminator_values_for_typed_dict_choice(choice, source_name=source_name)
|
||||
|
||||
elif choice['type'] == 'definition-ref':
|
||||
schema_ref = choice['schema_ref']
|
||||
if schema_ref not in self.definitions:
|
||||
raise MissingDefinitionForUnionRef(schema_ref)
|
||||
return self._infer_discriminator_values_for_choice(self.definitions[schema_ref], source_name=source_name)
|
||||
else:
|
||||
raise TypeError(
|
||||
f'{choice["type"]!r} is not a valid discriminated union variant;'
|
||||
' should be a `BaseModel` or `dataclass`'
|
||||
)
|
||||
|
||||
def _infer_discriminator_values_for_typed_dict_choice(
|
||||
self, choice: core_schema.TypedDictSchema, source_name: str | None = None
|
||||
) -> list[str | int]:
|
||||
"""This method just extracts the _infer_discriminator_values_for_choice logic specific to TypedDictSchema
|
||||
for the sake of readability.
|
||||
"""
|
||||
source = 'TypedDict' if source_name is None else f'TypedDict {source_name!r}'
|
||||
field = choice['fields'].get(self.discriminator)
|
||||
if field is None:
|
||||
raise PydanticUserError(
|
||||
f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
|
||||
)
|
||||
return self._infer_discriminator_values_for_field(field, source)
|
||||
|
||||
def _infer_discriminator_values_for_model_choice(
|
||||
self, choice: core_schema.ModelFieldsSchema, source_name: str | None = None
|
||||
) -> list[str | int]:
|
||||
source = 'ModelFields' if source_name is None else f'Model {source_name!r}'
|
||||
field = choice['fields'].get(self.discriminator)
|
||||
if field is None:
|
||||
raise PydanticUserError(
|
||||
f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
|
||||
)
|
||||
return self._infer_discriminator_values_for_field(field, source)
|
||||
|
||||
def _infer_discriminator_values_for_dataclass_choice(
|
||||
self, choice: core_schema.DataclassArgsSchema, source_name: str | None = None
|
||||
) -> list[str | int]:
|
||||
source = 'DataclassArgs' if source_name is None else f'Dataclass {source_name!r}'
|
||||
for field in choice['fields']:
|
||||
if field['name'] == self.discriminator:
|
||||
break
|
||||
else:
|
||||
raise PydanticUserError(
|
||||
f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
|
||||
)
|
||||
return self._infer_discriminator_values_for_field(field, source)
|
||||
|
||||
def _infer_discriminator_values_for_field(self, field: CoreSchemaField, source: str) -> list[str | int]:
|
||||
if field['type'] == 'computed-field':
|
||||
# This should never occur as a discriminator, as it is only relevant to serialization
|
||||
return []
|
||||
alias = field.get('validation_alias', self.discriminator)
|
||||
if not isinstance(alias, str):
|
||||
raise PydanticUserError(
|
||||
f'Alias {alias!r} is not supported in a discriminated union', code='discriminator-alias-type'
|
||||
)
|
||||
if self._discriminator_alias is None:
|
||||
self._discriminator_alias = alias
|
||||
elif self._discriminator_alias != alias:
|
||||
raise PydanticUserError(
|
||||
f'Aliases for discriminator {self.discriminator!r} must be the same '
|
||||
f'(got {alias}, {self._discriminator_alias})',
|
||||
code='discriminator-alias',
|
||||
)
|
||||
return self._infer_discriminator_values_for_inner_schema(field['schema'], source)
|
||||
|
||||
def _infer_discriminator_values_for_inner_schema(
|
||||
self, schema: core_schema.CoreSchema, source: str
|
||||
) -> list[str | int]:
|
||||
"""When inferring discriminator values for a field, we typically extract the expected values from a literal
|
||||
schema. This function does that, but also handles nested unions and defaults.
|
||||
"""
|
||||
if schema['type'] == 'literal':
|
||||
return schema['expected']
|
||||
|
||||
elif schema['type'] == 'union':
|
||||
# Generally when multiple values are allowed they should be placed in a single `Literal`, but
|
||||
# we add this case to handle the situation where a field is annotated as a `Union` of `Literal`s.
|
||||
# For example, this lets us handle `Union[Literal['key'], Union[Literal['Key'], Literal['KEY']]]`
|
||||
values: list[Any] = []
|
||||
for choice in schema['choices']:
|
||||
choice_schema = choice[0] if isinstance(choice, tuple) else choice
|
||||
choice_values = self._infer_discriminator_values_for_inner_schema(choice_schema, source)
|
||||
values.extend(choice_values)
|
||||
return values
|
||||
|
||||
elif schema['type'] == 'default':
|
||||
# This will happen if the field has a default value; we ignore it while extracting the discriminator values
|
||||
return self._infer_discriminator_values_for_inner_schema(schema['schema'], source)
|
||||
|
||||
elif schema['type'] == 'function-after':
|
||||
# After validators don't affect the discriminator values
|
||||
return self._infer_discriminator_values_for_inner_schema(schema['schema'], source)
|
||||
|
||||
elif schema['type'] in {'function-before', 'function-wrap', 'function-plain'}:
|
||||
validator_type = repr(schema['type'].split('-')[1])
|
||||
raise PydanticUserError(
|
||||
f'Cannot use a mode={validator_type} validator in the'
|
||||
f' discriminator field {self.discriminator!r} of {source}',
|
||||
code='discriminator-validator',
|
||||
)
|
||||
|
||||
else:
|
||||
raise PydanticUserError(
|
||||
f'{source} needs field {self.discriminator!r} to be of type `Literal`',
|
||||
code='discriminator-needs-literal',
|
||||
)
|
||||
|
||||
def _set_unique_choice_for_values(self, choice: core_schema.CoreSchema, values: Sequence[str | int]) -> None:
|
||||
"""This method updates `self.tagged_union_choices` so that all provided (discriminator) `values` map to the
|
||||
provided `choice`, validating that none of these values already map to another (different) choice.
|
||||
"""
|
||||
for discriminator_value in values:
|
||||
if discriminator_value in self._tagged_union_choices:
|
||||
# It is okay if `value` is already in tagged_union_choices as long as it maps to the same value.
|
||||
# Because tagged_union_choices may map values to other values, we need to walk the choices dict
|
||||
# until we get to a "real" choice, and confirm that is equal to the one assigned.
|
||||
existing_choice = self._tagged_union_choices[discriminator_value]
|
||||
if existing_choice != choice:
|
||||
raise TypeError(
|
||||
f'Value {discriminator_value!r} for discriminator '
|
||||
f'{self.discriminator!r} mapped to multiple choices'
|
||||
)
|
||||
else:
|
||||
self._tagged_union_choices[discriminator_value] = choice
|
319
lib/pydantic/_internal/_fields.py
Normal file
319
lib/pydantic/_internal/_fields.py
Normal file
|
@ -0,0 +1,319 @@
|
|||
"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import dataclasses
|
||||
import sys
|
||||
import warnings
|
||||
from copy import copy
|
||||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
from pydantic.errors import PydanticUserError
|
||||
|
||||
from . import _typing_extra
|
||||
from ._config import ConfigWrapper
|
||||
from ._repr import Representation
|
||||
from ._typing_extra import get_cls_type_hints_lenient, get_type_hints, is_classvar, is_finalvar
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from annotated_types import BaseMetadata
|
||||
|
||||
from ..fields import FieldInfo
|
||||
from ..main import BaseModel
|
||||
from ._dataclasses import StandardDataclass
|
||||
from ._decorators import DecoratorInfos
|
||||
|
||||
|
||||
def get_type_hints_infer_globalns(
|
||||
obj: Any,
|
||||
localns: dict[str, Any] | None = None,
|
||||
include_extras: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""Gets type hints for an object by inferring the global namespace.
|
||||
|
||||
It uses the `typing.get_type_hints`, The only thing that we do here is fetching
|
||||
global namespace from `obj.__module__` if it is not `None`.
|
||||
|
||||
Args:
|
||||
obj: The object to get its type hints.
|
||||
localns: The local namespaces.
|
||||
include_extras: Whether to recursively include annotation metadata.
|
||||
|
||||
Returns:
|
||||
The object type hints.
|
||||
"""
|
||||
module_name = getattr(obj, '__module__', None)
|
||||
globalns: dict[str, Any] | None = None
|
||||
if module_name:
|
||||
try:
|
||||
globalns = sys.modules[module_name].__dict__
|
||||
except KeyError:
|
||||
# happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
|
||||
pass
|
||||
return get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras)
|
||||
|
||||
|
||||
class PydanticMetadata(Representation):
|
||||
"""Base class for annotation markers like `Strict`."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
def pydantic_general_metadata(**metadata: Any) -> BaseMetadata:
|
||||
"""Create a new `_PydanticGeneralMetadata` class with the given metadata.
|
||||
|
||||
Args:
|
||||
**metadata: The metadata to add.
|
||||
|
||||
Returns:
|
||||
The new `_PydanticGeneralMetadata` class.
|
||||
"""
|
||||
return _general_metadata_cls()(metadata) # type: ignore
|
||||
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def _general_metadata_cls() -> type[BaseMetadata]:
|
||||
"""Do it this way to avoid importing `annotated_types` at import time."""
|
||||
from annotated_types import BaseMetadata
|
||||
|
||||
class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata):
|
||||
"""Pydantic general metadata like `max_digits`."""
|
||||
|
||||
def __init__(self, metadata: Any):
|
||||
self.__dict__ = metadata
|
||||
|
||||
return _PydanticGeneralMetadata # type: ignore
|
||||
|
||||
|
||||
def collect_model_fields( # noqa: C901
|
||||
cls: type[BaseModel],
|
||||
bases: tuple[type[Any], ...],
|
||||
config_wrapper: ConfigWrapper,
|
||||
types_namespace: dict[str, Any] | None,
|
||||
*,
|
||||
typevars_map: dict[Any, Any] | None = None,
|
||||
) -> tuple[dict[str, FieldInfo], set[str]]:
|
||||
"""Collect the fields of a nascent pydantic model.
|
||||
|
||||
Also collect the names of any ClassVars present in the type hints.
|
||||
|
||||
The returned value is a tuple of two items: the fields dict, and the set of ClassVar names.
|
||||
|
||||
Args:
|
||||
cls: BaseModel or dataclass.
|
||||
bases: Parents of the class, generally `cls.__bases__`.
|
||||
config_wrapper: The config wrapper instance.
|
||||
types_namespace: Optional extra namespace to look for types in.
|
||||
typevars_map: A dictionary mapping type variables to their concrete types.
|
||||
|
||||
Returns:
|
||||
A tuple contains fields and class variables.
|
||||
|
||||
Raises:
|
||||
NameError:
|
||||
- If there is a conflict between a field name and protected namespaces.
|
||||
- If there is a field other than `root` in `RootModel`.
|
||||
- If a field shadows an attribute in the parent model.
|
||||
"""
|
||||
from ..fields import FieldInfo
|
||||
|
||||
type_hints = get_cls_type_hints_lenient(cls, types_namespace)
|
||||
|
||||
# https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
|
||||
# annotations is only used for finding fields in parent classes
|
||||
annotations = cls.__dict__.get('__annotations__', {})
|
||||
fields: dict[str, FieldInfo] = {}
|
||||
|
||||
class_vars: set[str] = set()
|
||||
for ann_name, ann_type in type_hints.items():
|
||||
if ann_name == 'model_config':
|
||||
# We never want to treat `model_config` as a field
|
||||
# Note: we may need to change this logic if/when we introduce a `BareModel` class with no
|
||||
# protected namespaces (where `model_config` might be allowed as a field name)
|
||||
continue
|
||||
for protected_namespace in config_wrapper.protected_namespaces:
|
||||
if ann_name.startswith(protected_namespace):
|
||||
for b in bases:
|
||||
if hasattr(b, ann_name):
|
||||
from ..main import BaseModel
|
||||
|
||||
if not (issubclass(b, BaseModel) and ann_name in b.model_fields):
|
||||
raise NameError(
|
||||
f'Field "{ann_name}" conflicts with member {getattr(b, ann_name)}'
|
||||
f' of protected namespace "{protected_namespace}".'
|
||||
)
|
||||
else:
|
||||
valid_namespaces = tuple(
|
||||
x for x in config_wrapper.protected_namespaces if not ann_name.startswith(x)
|
||||
)
|
||||
warnings.warn(
|
||||
f'Field "{ann_name}" has conflict with protected namespace "{protected_namespace}".'
|
||||
'\n\nYou may be able to resolve this warning by setting'
|
||||
f" `model_config['protected_namespaces'] = {valid_namespaces}`.",
|
||||
UserWarning,
|
||||
)
|
||||
if is_classvar(ann_type):
|
||||
class_vars.add(ann_name)
|
||||
continue
|
||||
if _is_finalvar_with_default_val(ann_type, getattr(cls, ann_name, PydanticUndefined)):
|
||||
class_vars.add(ann_name)
|
||||
continue
|
||||
if not is_valid_field_name(ann_name):
|
||||
continue
|
||||
if cls.__pydantic_root_model__ and ann_name != 'root':
|
||||
raise NameError(
|
||||
f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`"
|
||||
)
|
||||
|
||||
# when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get
|
||||
# "... shadows an attribute" errors
|
||||
generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin')
|
||||
for base in bases:
|
||||
dataclass_fields = {
|
||||
field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ())
|
||||
}
|
||||
if hasattr(base, ann_name):
|
||||
if base is generic_origin:
|
||||
# Don't error when "shadowing" of attributes in parametrized generics
|
||||
continue
|
||||
|
||||
if ann_name in dataclass_fields:
|
||||
# Don't error when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set
|
||||
# on the class instance.
|
||||
continue
|
||||
warnings.warn(
|
||||
f'Field name "{ann_name}" shadows an attribute in parent "{base.__qualname__}"; ',
|
||||
UserWarning,
|
||||
)
|
||||
|
||||
try:
|
||||
default = getattr(cls, ann_name, PydanticUndefined)
|
||||
if default is PydanticUndefined:
|
||||
raise AttributeError
|
||||
except AttributeError:
|
||||
if ann_name in annotations:
|
||||
field_info = FieldInfo.from_annotation(ann_type)
|
||||
else:
|
||||
# if field has no default value and is not in __annotations__ this means that it is
|
||||
# defined in a base class and we can take it from there
|
||||
model_fields_lookup: dict[str, FieldInfo] = {}
|
||||
for x in cls.__bases__[::-1]:
|
||||
model_fields_lookup.update(getattr(x, 'model_fields', {}))
|
||||
if ann_name in model_fields_lookup:
|
||||
# The field was present on one of the (possibly multiple) base classes
|
||||
# copy the field to make sure typevar substitutions don't cause issues with the base classes
|
||||
field_info = copy(model_fields_lookup[ann_name])
|
||||
else:
|
||||
# The field was not found on any base classes; this seems to be caused by fields not getting
|
||||
# generated thanks to models not being fully defined while initializing recursive models.
|
||||
# Nothing stops us from just creating a new FieldInfo for this type hint, so we do this.
|
||||
field_info = FieldInfo.from_annotation(ann_type)
|
||||
else:
|
||||
field_info = FieldInfo.from_annotated_attribute(ann_type, default)
|
||||
# attributes which are fields are removed from the class namespace:
|
||||
# 1. To match the behaviour of annotation-only fields
|
||||
# 2. To avoid false positives in the NameError check above
|
||||
try:
|
||||
delattr(cls, ann_name)
|
||||
except AttributeError:
|
||||
pass # indicates the attribute was on a parent class
|
||||
|
||||
# Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__
|
||||
# to make sure the decorators have already been built for this exact class
|
||||
decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__']
|
||||
if ann_name in decorators.computed_fields:
|
||||
raise ValueError("you can't override a field with a computed field")
|
||||
fields[ann_name] = field_info
|
||||
|
||||
if typevars_map:
|
||||
for field in fields.values():
|
||||
field.apply_typevars_map(typevars_map, types_namespace)
|
||||
|
||||
return fields, class_vars
|
||||
|
||||
|
||||
def _is_finalvar_with_default_val(type_: type[Any], val: Any) -> bool:
|
||||
from ..fields import FieldInfo
|
||||
|
||||
if not is_finalvar(type_):
|
||||
return False
|
||||
elif val is PydanticUndefined:
|
||||
return False
|
||||
elif isinstance(val, FieldInfo) and (val.default is PydanticUndefined and val.default_factory is None):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def collect_dataclass_fields(
|
||||
cls: type[StandardDataclass], types_namespace: dict[str, Any] | None, *, typevars_map: dict[Any, Any] | None = None
|
||||
) -> dict[str, FieldInfo]:
|
||||
"""Collect the fields of a dataclass.
|
||||
|
||||
Args:
|
||||
cls: dataclass.
|
||||
types_namespace: Optional extra namespace to look for types in.
|
||||
typevars_map: A dictionary mapping type variables to their concrete types.
|
||||
|
||||
Returns:
|
||||
The dataclass fields.
|
||||
"""
|
||||
from ..fields import FieldInfo
|
||||
|
||||
fields: dict[str, FieldInfo] = {}
|
||||
dataclass_fields: dict[str, dataclasses.Field] = cls.__dataclass_fields__
|
||||
cls_localns = dict(vars(cls)) # this matches get_cls_type_hints_lenient, but all tests pass with `= None` instead
|
||||
|
||||
source_module = sys.modules.get(cls.__module__)
|
||||
if source_module is not None:
|
||||
types_namespace = {**source_module.__dict__, **(types_namespace or {})}
|
||||
|
||||
for ann_name, dataclass_field in dataclass_fields.items():
|
||||
ann_type = _typing_extra.eval_type_lenient(dataclass_field.type, types_namespace, cls_localns)
|
||||
if is_classvar(ann_type):
|
||||
continue
|
||||
|
||||
if (
|
||||
not dataclass_field.init
|
||||
and dataclass_field.default == dataclasses.MISSING
|
||||
and dataclass_field.default_factory == dataclasses.MISSING
|
||||
):
|
||||
# TODO: We should probably do something with this so that validate_assignment behaves properly
|
||||
# Issue: https://github.com/pydantic/pydantic/issues/5470
|
||||
continue
|
||||
|
||||
if isinstance(dataclass_field.default, FieldInfo):
|
||||
if dataclass_field.default.init_var:
|
||||
if dataclass_field.default.init is False:
|
||||
raise PydanticUserError(
|
||||
f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.',
|
||||
code='clashing-init-and-init-var',
|
||||
)
|
||||
|
||||
# TODO: same note as above re validate_assignment
|
||||
continue
|
||||
field_info = FieldInfo.from_annotated_attribute(ann_type, dataclass_field.default)
|
||||
else:
|
||||
field_info = FieldInfo.from_annotated_attribute(ann_type, dataclass_field)
|
||||
|
||||
fields[ann_name] = field_info
|
||||
|
||||
if field_info.default is not PydanticUndefined and isinstance(getattr(cls, ann_name, field_info), FieldInfo):
|
||||
# We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo
|
||||
setattr(cls, ann_name, field_info.default)
|
||||
|
||||
if typevars_map:
|
||||
for field in fields.values():
|
||||
field.apply_typevars_map(typevars_map, types_namespace)
|
||||
|
||||
return fields
|
||||
|
||||
|
||||
def is_valid_field_name(name: str) -> bool:
|
||||
return not name.startswith('_')
|
||||
|
||||
|
||||
def is_valid_privateattr_name(name: str) -> bool:
|
||||
return name.startswith('_') and not name.startswith('__')
|
23
lib/pydantic/_internal/_forward_ref.py
Normal file
23
lib/pydantic/_internal/_forward_ref.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
from __future__ import annotations as _annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
|
||||
@dataclass
|
||||
class PydanticRecursiveRef:
|
||||
type_ref: str
|
||||
|
||||
__name__ = 'PydanticRecursiveRef'
|
||||
__hash__ = object.__hash__
|
||||
|
||||
def __call__(self) -> None:
|
||||
"""Defining __call__ is necessary for the `typing` module to let you use an instance of
|
||||
this class as the result of resolving a standard ForwardRef.
|
||||
"""
|
||||
|
||||
def __or__(self, other):
|
||||
return Union[self, other] # type: ignore
|
||||
|
||||
def __ror__(self, other):
|
||||
return Union[other, self] # type: ignore
|
2231
lib/pydantic/_internal/_generate_schema.py
Normal file
2231
lib/pydantic/_internal/_generate_schema.py
Normal file
File diff suppressed because it is too large
Load diff
517
lib/pydantic/_internal/_generics.py
Normal file
517
lib/pydantic/_internal/_generics.py
Normal file
|
@ -0,0 +1,517 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import types
|
||||
import typing
|
||||
from collections import ChainMap
|
||||
from contextlib import contextmanager
|
||||
from contextvars import ContextVar
|
||||
from types import prepare_class
|
||||
from typing import TYPE_CHECKING, Any, Iterator, List, Mapping, MutableMapping, Tuple, TypeVar
|
||||
from weakref import WeakValueDictionary
|
||||
|
||||
import typing_extensions
|
||||
|
||||
from ._core_utils import get_type_ref
|
||||
from ._forward_ref import PydanticRecursiveRef
|
||||
from ._typing_extra import TypeVarType, typing_base
|
||||
from ._utils import all_identical, is_model_class
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import _UnionGenericAlias # type: ignore[attr-defined]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..main import BaseModel
|
||||
|
||||
GenericTypesCacheKey = Tuple[Any, Any, Tuple[Any, ...]]
|
||||
|
||||
# Note: We want to remove LimitedDict, but to do this, we'd need to improve the handling of generics caching.
|
||||
# Right now, to handle recursive generics, we some types must remain cached for brief periods without references.
|
||||
# By chaining the WeakValuesDict with a LimitedDict, we have a way to retain caching for all types with references,
|
||||
# while also retaining a limited number of types even without references. This is generally enough to build
|
||||
# specific recursive generic models without losing required items out of the cache.
|
||||
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
_LIMITED_DICT_SIZE = 100
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class LimitedDict(dict, MutableMapping[KT, VT]):
|
||||
def __init__(self, size_limit: int = _LIMITED_DICT_SIZE):
|
||||
...
|
||||
|
||||
else:
|
||||
|
||||
class LimitedDict(dict):
|
||||
"""Limit the size/length of a dict used for caching to avoid unlimited increase in memory usage.
|
||||
|
||||
Since the dict is ordered, and we always remove elements from the beginning, this is effectively a FIFO cache.
|
||||
"""
|
||||
|
||||
def __init__(self, size_limit: int = _LIMITED_DICT_SIZE):
|
||||
self.size_limit = size_limit
|
||||
super().__init__()
|
||||
|
||||
def __setitem__(self, __key: Any, __value: Any) -> None:
|
||||
super().__setitem__(__key, __value)
|
||||
if len(self) > self.size_limit:
|
||||
excess = len(self) - self.size_limit + self.size_limit // 10
|
||||
to_remove = list(self.keys())[:excess]
|
||||
for key in to_remove:
|
||||
del self[key]
|
||||
|
||||
|
||||
# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected
|
||||
# once they are no longer referenced by the caller.
|
||||
if sys.version_info >= (3, 9): # Typing for weak dictionaries available at 3.9
|
||||
GenericTypesCache = WeakValueDictionary[GenericTypesCacheKey, 'type[BaseModel]']
|
||||
else:
|
||||
GenericTypesCache = WeakValueDictionary
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class DeepChainMap(ChainMap[KT, VT]): # type: ignore
|
||||
...
|
||||
|
||||
else:
|
||||
|
||||
class DeepChainMap(ChainMap):
|
||||
"""Variant of ChainMap that allows direct updates to inner scopes.
|
||||
|
||||
Taken from https://docs.python.org/3/library/collections.html#collections.ChainMap,
|
||||
with some light modifications for this use case.
|
||||
"""
|
||||
|
||||
def clear(self) -> None:
|
||||
for mapping in self.maps:
|
||||
mapping.clear()
|
||||
|
||||
def __setitem__(self, key: KT, value: VT) -> None:
|
||||
for mapping in self.maps:
|
||||
mapping[key] = value
|
||||
|
||||
def __delitem__(self, key: KT) -> None:
|
||||
hit = False
|
||||
for mapping in self.maps:
|
||||
if key in mapping:
|
||||
del mapping[key]
|
||||
hit = True
|
||||
if not hit:
|
||||
raise KeyError(key)
|
||||
|
||||
|
||||
# Despite the fact that LimitedDict _seems_ no longer necessary, I'm very nervous to actually remove it
|
||||
# and discover later on that we need to re-add all this infrastructure...
|
||||
# _GENERIC_TYPES_CACHE = DeepChainMap(GenericTypesCache(), LimitedDict())
|
||||
|
||||
_GENERIC_TYPES_CACHE = GenericTypesCache()
|
||||
|
||||
|
||||
class PydanticGenericMetadata(typing_extensions.TypedDict):
|
||||
origin: type[BaseModel] | None # analogous to typing._GenericAlias.__origin__
|
||||
args: tuple[Any, ...] # analogous to typing._GenericAlias.__args__
|
||||
parameters: tuple[type[Any], ...] # analogous to typing.Generic.__parameters__
|
||||
|
||||
|
||||
def create_generic_submodel(
|
||||
model_name: str, origin: type[BaseModel], args: tuple[Any, ...], params: tuple[Any, ...]
|
||||
) -> type[BaseModel]:
|
||||
"""Dynamically create a submodel of a provided (generic) BaseModel.
|
||||
|
||||
This is used when producing concrete parametrizations of generic models. This function
|
||||
only *creates* the new subclass; the schema/validators/serialization must be updated to
|
||||
reflect a concrete parametrization elsewhere.
|
||||
|
||||
Args:
|
||||
model_name: The name of the newly created model.
|
||||
origin: The base class for the new model to inherit from.
|
||||
args: A tuple of generic metadata arguments.
|
||||
params: A tuple of generic metadata parameters.
|
||||
|
||||
Returns:
|
||||
The created submodel.
|
||||
"""
|
||||
namespace: dict[str, Any] = {'__module__': origin.__module__}
|
||||
bases = (origin,)
|
||||
meta, ns, kwds = prepare_class(model_name, bases)
|
||||
namespace.update(ns)
|
||||
created_model = meta(
|
||||
model_name,
|
||||
bases,
|
||||
namespace,
|
||||
__pydantic_generic_metadata__={
|
||||
'origin': origin,
|
||||
'args': args,
|
||||
'parameters': params,
|
||||
},
|
||||
__pydantic_reset_parent_namespace__=False,
|
||||
**kwds,
|
||||
)
|
||||
|
||||
model_module, called_globally = _get_caller_frame_info(depth=3)
|
||||
if called_globally: # create global reference and therefore allow pickling
|
||||
object_by_reference = None
|
||||
reference_name = model_name
|
||||
reference_module_globals = sys.modules[created_model.__module__].__dict__
|
||||
while object_by_reference is not created_model:
|
||||
object_by_reference = reference_module_globals.setdefault(reference_name, created_model)
|
||||
reference_name += '_'
|
||||
|
||||
return created_model
|
||||
|
||||
|
||||
def _get_caller_frame_info(depth: int = 2) -> tuple[str | None, bool]:
|
||||
"""Used inside a function to check whether it was called globally.
|
||||
|
||||
Args:
|
||||
depth: The depth to get the frame.
|
||||
|
||||
Returns:
|
||||
A tuple contains `module_name` and `called_globally`.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the function is not called inside a function.
|
||||
"""
|
||||
try:
|
||||
previous_caller_frame = sys._getframe(depth)
|
||||
except ValueError as e:
|
||||
raise RuntimeError('This function must be used inside another function') from e
|
||||
except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it
|
||||
return None, False
|
||||
frame_globals = previous_caller_frame.f_globals
|
||||
return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals
|
||||
|
||||
|
||||
DictValues: type[Any] = {}.values().__class__
|
||||
|
||||
|
||||
def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]:
|
||||
"""Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.
|
||||
|
||||
This is inspired as an alternative to directly accessing the `__parameters__` attribute of a GenericAlias,
|
||||
since __parameters__ of (nested) generic BaseModel subclasses won't show up in that list.
|
||||
"""
|
||||
if isinstance(v, TypeVar):
|
||||
yield v
|
||||
elif is_model_class(v):
|
||||
yield from v.__pydantic_generic_metadata__['parameters']
|
||||
elif isinstance(v, (DictValues, list)):
|
||||
for var in v:
|
||||
yield from iter_contained_typevars(var)
|
||||
else:
|
||||
args = get_args(v)
|
||||
for arg in args:
|
||||
yield from iter_contained_typevars(arg)
|
||||
|
||||
|
||||
def get_args(v: Any) -> Any:
|
||||
pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None)
|
||||
if pydantic_generic_metadata:
|
||||
return pydantic_generic_metadata.get('args')
|
||||
return typing_extensions.get_args(v)
|
||||
|
||||
|
||||
def get_origin(v: Any) -> Any:
|
||||
pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None)
|
||||
if pydantic_generic_metadata:
|
||||
return pydantic_generic_metadata.get('origin')
|
||||
return typing_extensions.get_origin(v)
|
||||
|
||||
|
||||
def get_standard_typevars_map(cls: type[Any]) -> dict[TypeVarType, Any] | None:
|
||||
"""Package a generic type's typevars and parametrization (if present) into a dictionary compatible with the
|
||||
`replace_types` function. Specifically, this works with standard typing generics and typing._GenericAlias.
|
||||
"""
|
||||
origin = get_origin(cls)
|
||||
if origin is None:
|
||||
return None
|
||||
if not hasattr(origin, '__parameters__'):
|
||||
return None
|
||||
|
||||
# In this case, we know that cls is a _GenericAlias, and origin is the generic type
|
||||
# So it is safe to access cls.__args__ and origin.__parameters__
|
||||
args: tuple[Any, ...] = cls.__args__ # type: ignore
|
||||
parameters: tuple[TypeVarType, ...] = origin.__parameters__
|
||||
return dict(zip(parameters, args))
|
||||
|
||||
|
||||
def get_model_typevars_map(cls: type[BaseModel]) -> dict[TypeVarType, Any] | None:
|
||||
"""Package a generic BaseModel's typevars and concrete parametrization (if present) into a dictionary compatible
|
||||
with the `replace_types` function.
|
||||
|
||||
Since BaseModel.__class_getitem__ does not produce a typing._GenericAlias, and the BaseModel generic info is
|
||||
stored in the __pydantic_generic_metadata__ attribute, we need special handling here.
|
||||
"""
|
||||
# TODO: This could be unified with `get_standard_typevars_map` if we stored the generic metadata
|
||||
# in the __origin__, __args__, and __parameters__ attributes of the model.
|
||||
generic_metadata = cls.__pydantic_generic_metadata__
|
||||
origin = generic_metadata['origin']
|
||||
args = generic_metadata['args']
|
||||
return dict(zip(iter_contained_typevars(origin), args))
|
||||
|
||||
|
||||
def replace_types(type_: Any, type_map: Mapping[Any, Any] | None) -> Any:
|
||||
"""Return type with all occurrences of `type_map` keys recursively replaced with their values.
|
||||
|
||||
Args:
|
||||
type_: The class or generic alias.
|
||||
type_map: Mapping from `TypeVar` instance to concrete types.
|
||||
|
||||
Returns:
|
||||
A new type representing the basic structure of `type_` with all
|
||||
`typevar_map` keys recursively replaced.
|
||||
|
||||
Example:
|
||||
```py
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
from pydantic._internal._generics import replace_types
|
||||
|
||||
replace_types(Tuple[str, Union[List[str], float]], {str: int})
|
||||
#> Tuple[int, Union[List[int], float]]
|
||||
```
|
||||
"""
|
||||
if not type_map:
|
||||
return type_
|
||||
|
||||
type_args = get_args(type_)
|
||||
origin_type = get_origin(type_)
|
||||
|
||||
if origin_type is typing_extensions.Annotated:
|
||||
annotated_type, *annotations = type_args
|
||||
annotated = replace_types(annotated_type, type_map)
|
||||
for annotation in annotations:
|
||||
annotated = typing_extensions.Annotated[annotated, annotation]
|
||||
return annotated
|
||||
|
||||
# Having type args is a good indicator that this is a typing module
|
||||
# class instantiation or a generic alias of some sort.
|
||||
if type_args:
|
||||
resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args)
|
||||
if all_identical(type_args, resolved_type_args):
|
||||
# If all arguments are the same, there is no need to modify the
|
||||
# type or create a new object at all
|
||||
return type_
|
||||
if (
|
||||
origin_type is not None
|
||||
and isinstance(type_, typing_base)
|
||||
and not isinstance(origin_type, typing_base)
|
||||
and getattr(type_, '_name', None) is not None
|
||||
):
|
||||
# In python < 3.9 generic aliases don't exist so any of these like `list`,
|
||||
# `type` or `collections.abc.Callable` need to be translated.
|
||||
# See: https://www.python.org/dev/peps/pep-0585
|
||||
origin_type = getattr(typing, type_._name)
|
||||
assert origin_type is not None
|
||||
# PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__.
|
||||
# We also cannot use isinstance() since we have to compare types.
|
||||
if sys.version_info >= (3, 10) and origin_type is types.UnionType:
|
||||
return _UnionGenericAlias(origin_type, resolved_type_args)
|
||||
# NotRequired[T] and Required[T] don't support tuple type resolved_type_args, hence the condition below
|
||||
return origin_type[resolved_type_args[0] if len(resolved_type_args) == 1 else resolved_type_args]
|
||||
|
||||
# We handle pydantic generic models separately as they don't have the same
|
||||
# semantics as "typing" classes or generic aliases
|
||||
|
||||
if not origin_type and is_model_class(type_):
|
||||
parameters = type_.__pydantic_generic_metadata__['parameters']
|
||||
if not parameters:
|
||||
return type_
|
||||
resolved_type_args = tuple(replace_types(t, type_map) for t in parameters)
|
||||
if all_identical(parameters, resolved_type_args):
|
||||
return type_
|
||||
return type_[resolved_type_args]
|
||||
|
||||
# Handle special case for typehints that can have lists as arguments.
|
||||
# `typing.Callable[[int, str], int]` is an example for this.
|
||||
if isinstance(type_, (List, list)):
|
||||
resolved_list = list(replace_types(element, type_map) for element in type_)
|
||||
if all_identical(type_, resolved_list):
|
||||
return type_
|
||||
return resolved_list
|
||||
|
||||
# If all else fails, we try to resolve the type directly and otherwise just
|
||||
# return the input with no modifications.
|
||||
return type_map.get(type_, type_)
|
||||
|
||||
|
||||
def has_instance_in_type(type_: Any, isinstance_target: Any) -> bool:
|
||||
"""Checks if the type, or any of its arbitrary nested args, satisfy
|
||||
`isinstance(<type>, isinstance_target)`.
|
||||
"""
|
||||
if isinstance(type_, isinstance_target):
|
||||
return True
|
||||
|
||||
type_args = get_args(type_)
|
||||
origin_type = get_origin(type_)
|
||||
|
||||
if origin_type is typing_extensions.Annotated:
|
||||
annotated_type, *annotations = type_args
|
||||
return has_instance_in_type(annotated_type, isinstance_target)
|
||||
|
||||
# Having type args is a good indicator that this is a typing module
|
||||
# class instantiation or a generic alias of some sort.
|
||||
if any(has_instance_in_type(a, isinstance_target) for a in type_args):
|
||||
return True
|
||||
|
||||
# Handle special case for typehints that can have lists as arguments.
|
||||
# `typing.Callable[[int, str], int]` is an example for this.
|
||||
if isinstance(type_, (List, list)) and not isinstance(type_, typing_extensions.ParamSpec):
|
||||
if any(has_instance_in_type(element, isinstance_target) for element in type_):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def check_parameters_count(cls: type[BaseModel], parameters: tuple[Any, ...]) -> None:
|
||||
"""Check the generic model parameters count is equal.
|
||||
|
||||
Args:
|
||||
cls: The generic model.
|
||||
parameters: A tuple of passed parameters to the generic model.
|
||||
|
||||
Raises:
|
||||
TypeError: If the passed parameters count is not equal to generic model parameters count.
|
||||
"""
|
||||
actual = len(parameters)
|
||||
expected = len(cls.__pydantic_generic_metadata__['parameters'])
|
||||
if actual != expected:
|
||||
description = 'many' if actual > expected else 'few'
|
||||
raise TypeError(f'Too {description} parameters for {cls}; actual {actual}, expected {expected}')
|
||||
|
||||
|
||||
_generic_recursion_cache: ContextVar[set[str] | None] = ContextVar('_generic_recursion_cache', default=None)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def generic_recursion_self_type(
|
||||
origin: type[BaseModel], args: tuple[Any, ...]
|
||||
) -> Iterator[PydanticRecursiveRef | None]:
|
||||
"""This contextmanager should be placed around the recursive calls used to build a generic type,
|
||||
and accept as arguments the generic origin type and the type arguments being passed to it.
|
||||
|
||||
If the same origin and arguments are observed twice, it implies that a self-reference placeholder
|
||||
can be used while building the core schema, and will produce a schema_ref that will be valid in the
|
||||
final parent schema.
|
||||
"""
|
||||
previously_seen_type_refs = _generic_recursion_cache.get()
|
||||
if previously_seen_type_refs is None:
|
||||
previously_seen_type_refs = set()
|
||||
token = _generic_recursion_cache.set(previously_seen_type_refs)
|
||||
else:
|
||||
token = None
|
||||
|
||||
try:
|
||||
type_ref = get_type_ref(origin, args_override=args)
|
||||
if type_ref in previously_seen_type_refs:
|
||||
self_type = PydanticRecursiveRef(type_ref=type_ref)
|
||||
yield self_type
|
||||
else:
|
||||
previously_seen_type_refs.add(type_ref)
|
||||
yield None
|
||||
finally:
|
||||
if token:
|
||||
_generic_recursion_cache.reset(token)
|
||||
|
||||
|
||||
def recursively_defined_type_refs() -> set[str]:
|
||||
visited = _generic_recursion_cache.get()
|
||||
if not visited:
|
||||
return set() # not in a generic recursion, so there are no types
|
||||
|
||||
return visited.copy() # don't allow modifications
|
||||
|
||||
|
||||
def get_cached_generic_type_early(parent: type[BaseModel], typevar_values: Any) -> type[BaseModel] | None:
|
||||
"""The use of a two-stage cache lookup approach was necessary to have the highest performance possible for
|
||||
repeated calls to `__class_getitem__` on generic types (which may happen in tighter loops during runtime),
|
||||
while still ensuring that certain alternative parametrizations ultimately resolve to the same type.
|
||||
|
||||
As a concrete example, this approach was necessary to make Model[List[T]][int] equal to Model[List[int]].
|
||||
The approach could be modified to not use two different cache keys at different points, but the
|
||||
_early_cache_key is optimized to be as quick to compute as possible (for repeated-access speed), and the
|
||||
_late_cache_key is optimized to be as "correct" as possible, so that two types that will ultimately be the
|
||||
same after resolving the type arguments will always produce cache hits.
|
||||
|
||||
If we wanted to move to only using a single cache key per type, we would either need to always use the
|
||||
slower/more computationally intensive logic associated with _late_cache_key, or would need to accept
|
||||
that Model[List[T]][int] is a different type than Model[List[T]][int]. Because we rely on subclass relationships
|
||||
during validation, I think it is worthwhile to ensure that types that are functionally equivalent are actually
|
||||
equal.
|
||||
"""
|
||||
return _GENERIC_TYPES_CACHE.get(_early_cache_key(parent, typevar_values))
|
||||
|
||||
|
||||
def get_cached_generic_type_late(
|
||||
parent: type[BaseModel], typevar_values: Any, origin: type[BaseModel], args: tuple[Any, ...]
|
||||
) -> type[BaseModel] | None:
|
||||
"""See the docstring of `get_cached_generic_type_early` for more information about the two-stage cache lookup."""
|
||||
cached = _GENERIC_TYPES_CACHE.get(_late_cache_key(origin, args, typevar_values))
|
||||
if cached is not None:
|
||||
set_cached_generic_type(parent, typevar_values, cached, origin, args)
|
||||
return cached
|
||||
|
||||
|
||||
def set_cached_generic_type(
|
||||
parent: type[BaseModel],
|
||||
typevar_values: tuple[Any, ...],
|
||||
type_: type[BaseModel],
|
||||
origin: type[BaseModel] | None = None,
|
||||
args: tuple[Any, ...] | None = None,
|
||||
) -> None:
|
||||
"""See the docstring of `get_cached_generic_type_early` for more information about why items are cached with
|
||||
two different keys.
|
||||
"""
|
||||
_GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values)] = type_
|
||||
if len(typevar_values) == 1:
|
||||
_GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values[0])] = type_
|
||||
if origin and args:
|
||||
_GENERIC_TYPES_CACHE[_late_cache_key(origin, args, typevar_values)] = type_
|
||||
|
||||
|
||||
def _union_orderings_key(typevar_values: Any) -> Any:
|
||||
"""This is intended to help differentiate between Union types with the same arguments in different order.
|
||||
|
||||
Thanks to caching internal to the `typing` module, it is not possible to distinguish between
|
||||
List[Union[int, float]] and List[Union[float, int]] (and similarly for other "parent" origins besides List)
|
||||
because `typing` considers Union[int, float] to be equal to Union[float, int].
|
||||
|
||||
However, you _can_ distinguish between (top-level) Union[int, float] vs. Union[float, int].
|
||||
Because we parse items as the first Union type that is successful, we get slightly more consistent behavior
|
||||
if we make an effort to distinguish the ordering of items in a union. It would be best if we could _always_
|
||||
get the exact-correct order of items in the union, but that would require a change to the `typing` module itself.
|
||||
(See https://github.com/python/cpython/issues/86483 for reference.)
|
||||
"""
|
||||
if isinstance(typevar_values, tuple):
|
||||
args_data = []
|
||||
for value in typevar_values:
|
||||
args_data.append(_union_orderings_key(value))
|
||||
return tuple(args_data)
|
||||
elif typing_extensions.get_origin(typevar_values) is typing.Union:
|
||||
return get_args(typevar_values)
|
||||
else:
|
||||
return ()
|
||||
|
||||
|
||||
def _early_cache_key(cls: type[BaseModel], typevar_values: Any) -> GenericTypesCacheKey:
|
||||
"""This is intended for minimal computational overhead during lookups of cached types.
|
||||
|
||||
Note that this is overly simplistic, and it's possible that two different cls/typevar_values
|
||||
inputs would ultimately result in the same type being created in BaseModel.__class_getitem__.
|
||||
To handle this, we have a fallback _late_cache_key that is checked later if the _early_cache_key
|
||||
lookup fails, and should result in a cache hit _precisely_ when the inputs to __class_getitem__
|
||||
would result in the same type.
|
||||
"""
|
||||
return cls, typevar_values, _union_orderings_key(typevar_values)
|
||||
|
||||
|
||||
def _late_cache_key(origin: type[BaseModel], args: tuple[Any, ...], typevar_values: Any) -> GenericTypesCacheKey:
|
||||
"""This is intended for use later in the process of creating a new type, when we have more information
|
||||
about the exact args that will be passed. If it turns out that a different set of inputs to
|
||||
__class_getitem__ resulted in the same inputs to the generic type creation process, we can still
|
||||
return the cached type, and update the cache with the _early_cache_key as well.
|
||||
"""
|
||||
# The _union_orderings_key is placed at the start here to ensure there cannot be a collision with an
|
||||
# _early_cache_key, as that function will always produce a BaseModel subclass as the first item in the key,
|
||||
# whereas this function will always produce a tuple as the first item in the key.
|
||||
return _union_orderings_key(typevar_values), origin, args
|
26
lib/pydantic/_internal/_git.py
Normal file
26
lib/pydantic/_internal/_git.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
"""Git utilities, adopted from mypy's git utilities (https://github.com/python/mypy/blob/master/mypy/git.py)."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
||||
def is_git_repo(dir: str) -> bool:
|
||||
"""Is the given directory version-controlled with git?"""
|
||||
return os.path.exists(os.path.join(dir, '.git'))
|
||||
|
||||
|
||||
def have_git() -> bool:
|
||||
"""Can we run the git executable?"""
|
||||
try:
|
||||
subprocess.check_output(['git', '--help'])
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
return False
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
def git_revision(dir: str) -> str:
|
||||
"""Get the SHA-1 of the HEAD of a git repository."""
|
||||
return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=dir).decode('utf-8').strip()
|
10
lib/pydantic/_internal/_internal_dataclass.py
Normal file
10
lib/pydantic/_internal/_internal_dataclass.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
import sys
|
||||
from typing import Any, Dict
|
||||
|
||||
dataclass_kwargs: Dict[str, Any]
|
||||
|
||||
# `slots` is available on Python >= 3.10
|
||||
if sys.version_info >= (3, 10):
|
||||
slots_true = {'slots': True}
|
||||
else:
|
||||
slots_true = {}
|
410
lib/pydantic/_internal/_known_annotated_metadata.py
Normal file
410
lib/pydantic/_internal/_known_annotated_metadata.py
Normal file
|
@ -0,0 +1,410 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from copy import copy
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterable
|
||||
|
||||
from pydantic_core import CoreSchema, PydanticCustomError, to_jsonable_python
|
||||
from pydantic_core import core_schema as cs
|
||||
|
||||
from ._fields import PydanticMetadata
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..annotated_handlers import GetJsonSchemaHandler
|
||||
|
||||
|
||||
STRICT = {'strict'}
|
||||
SEQUENCE_CONSTRAINTS = {'min_length', 'max_length'}
|
||||
INEQUALITY = {'le', 'ge', 'lt', 'gt'}
|
||||
NUMERIC_CONSTRAINTS = {'multiple_of', 'allow_inf_nan', *INEQUALITY}
|
||||
|
||||
STR_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT, 'strip_whitespace', 'to_lower', 'to_upper', 'pattern'}
|
||||
BYTES_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
|
||||
LIST_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
TUPLE_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
SET_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
DICT_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
GENERATOR_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT}
|
||||
|
||||
FLOAT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
INT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
BOOL_CONSTRAINTS = STRICT
|
||||
UUID_CONSTRAINTS = STRICT
|
||||
|
||||
DATE_TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
TIMEDELTA_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
|
||||
LAX_OR_STRICT_CONSTRAINTS = STRICT
|
||||
|
||||
UNION_CONSTRAINTS = {'union_mode'}
|
||||
URL_CONSTRAINTS = {
|
||||
'max_length',
|
||||
'allowed_schemes',
|
||||
'host_required',
|
||||
'default_host',
|
||||
'default_port',
|
||||
'default_path',
|
||||
}
|
||||
|
||||
TEXT_SCHEMA_TYPES = ('str', 'bytes', 'url', 'multi-host-url')
|
||||
SEQUENCE_SCHEMA_TYPES = ('list', 'tuple', 'set', 'frozenset', 'generator', *TEXT_SCHEMA_TYPES)
|
||||
NUMERIC_SCHEMA_TYPES = ('float', 'int', 'date', 'time', 'timedelta', 'datetime')
|
||||
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS: dict[str, set[str]] = defaultdict(set)
|
||||
for constraint in STR_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(TEXT_SCHEMA_TYPES)
|
||||
for constraint in BYTES_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('bytes',))
|
||||
for constraint in LIST_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('list',))
|
||||
for constraint in TUPLE_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('tuple',))
|
||||
for constraint in SET_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('set', 'frozenset'))
|
||||
for constraint in DICT_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('dict',))
|
||||
for constraint in GENERATOR_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('generator',))
|
||||
for constraint in FLOAT_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('float',))
|
||||
for constraint in INT_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('int',))
|
||||
for constraint in DATE_TIME_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('date', 'time', 'datetime'))
|
||||
for constraint in TIMEDELTA_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('timedelta',))
|
||||
for constraint in TIME_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('time',))
|
||||
for schema_type in (*TEXT_SCHEMA_TYPES, *SEQUENCE_SCHEMA_TYPES, *NUMERIC_SCHEMA_TYPES, 'typed-dict', 'model'):
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS['strict'].add(schema_type)
|
||||
for constraint in UNION_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('union',))
|
||||
for constraint in URL_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('url', 'multi-host-url'))
|
||||
for constraint in BOOL_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('bool',))
|
||||
for constraint in UUID_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('uuid',))
|
||||
for constraint in LAX_OR_STRICT_CONSTRAINTS:
|
||||
CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('lax-or-strict',))
|
||||
|
||||
|
||||
def add_js_update_schema(s: cs.CoreSchema, f: Callable[[], dict[str, Any]]) -> None:
|
||||
def update_js_schema(s: cs.CoreSchema, handler: GetJsonSchemaHandler) -> dict[str, Any]:
|
||||
js_schema = handler(s)
|
||||
js_schema.update(f())
|
||||
return js_schema
|
||||
|
||||
if 'metadata' in s:
|
||||
metadata = s['metadata']
|
||||
if 'pydantic_js_functions' in s:
|
||||
metadata['pydantic_js_functions'].append(update_js_schema)
|
||||
else:
|
||||
metadata['pydantic_js_functions'] = [update_js_schema]
|
||||
else:
|
||||
s['metadata'] = {'pydantic_js_functions': [update_js_schema]}
|
||||
|
||||
|
||||
def as_jsonable_value(v: Any) -> Any:
|
||||
if type(v) not in (int, str, float, bytes, bool, type(None)):
|
||||
return to_jsonable_python(v)
|
||||
return v
|
||||
|
||||
|
||||
def expand_grouped_metadata(annotations: Iterable[Any]) -> Iterable[Any]:
|
||||
"""Expand the annotations.
|
||||
|
||||
Args:
|
||||
annotations: An iterable of annotations.
|
||||
|
||||
Returns:
|
||||
An iterable of expanded annotations.
|
||||
|
||||
Example:
|
||||
```py
|
||||
from annotated_types import Ge, Len
|
||||
|
||||
from pydantic._internal._known_annotated_metadata import expand_grouped_metadata
|
||||
|
||||
print(list(expand_grouped_metadata([Ge(4), Len(5)])))
|
||||
#> [Ge(ge=4), MinLen(min_length=5)]
|
||||
```
|
||||
"""
|
||||
import annotated_types as at
|
||||
|
||||
from pydantic.fields import FieldInfo # circular import
|
||||
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, at.GroupedMetadata):
|
||||
yield from annotation
|
||||
elif isinstance(annotation, FieldInfo):
|
||||
yield from annotation.metadata
|
||||
# this is a bit problematic in that it results in duplicate metadata
|
||||
# all of our "consumers" can handle it, but it is not ideal
|
||||
# we probably should split up FieldInfo into:
|
||||
# - annotated types metadata
|
||||
# - individual metadata known only to Pydantic
|
||||
annotation = copy(annotation)
|
||||
annotation.metadata = []
|
||||
yield annotation
|
||||
else:
|
||||
yield annotation
|
||||
|
||||
|
||||
def apply_known_metadata(annotation: Any, schema: CoreSchema) -> CoreSchema | None: # noqa: C901
|
||||
"""Apply `annotation` to `schema` if it is an annotation we know about (Gt, Le, etc.).
|
||||
Otherwise return `None`.
|
||||
|
||||
This does not handle all known annotations. If / when it does, it can always
|
||||
return a CoreSchema and return the unmodified schema if the annotation should be ignored.
|
||||
|
||||
Assumes that GroupedMetadata has already been expanded via `expand_grouped_metadata`.
|
||||
|
||||
Args:
|
||||
annotation: The annotation.
|
||||
schema: The schema.
|
||||
|
||||
Returns:
|
||||
An updated schema with annotation if it is an annotation we know about, `None` otherwise.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If `Predicate` fails.
|
||||
"""
|
||||
import annotated_types as at
|
||||
|
||||
from . import _validators
|
||||
|
||||
schema = schema.copy()
|
||||
schema_update, other_metadata = collect_known_metadata([annotation])
|
||||
schema_type = schema['type']
|
||||
for constraint, value in schema_update.items():
|
||||
if constraint not in CONSTRAINTS_TO_ALLOWED_SCHEMAS:
|
||||
raise ValueError(f'Unknown constraint {constraint}')
|
||||
allowed_schemas = CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint]
|
||||
|
||||
if schema_type in allowed_schemas:
|
||||
if constraint == 'union_mode' and schema_type == 'union':
|
||||
schema['mode'] = value # type: ignore # schema is UnionSchema
|
||||
else:
|
||||
schema[constraint] = value
|
||||
continue
|
||||
|
||||
if constraint == 'allow_inf_nan' and value is False:
|
||||
return cs.no_info_after_validator_function(
|
||||
_validators.forbid_inf_nan_check,
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'pattern':
|
||||
# insert a str schema to make sure the regex engine matches
|
||||
return cs.chain_schema(
|
||||
[
|
||||
schema,
|
||||
cs.str_schema(pattern=value),
|
||||
]
|
||||
)
|
||||
elif constraint == 'gt':
|
||||
s = cs.no_info_after_validator_function(
|
||||
partial(_validators.greater_than_validator, gt=value),
|
||||
schema,
|
||||
)
|
||||
add_js_update_schema(s, lambda: {'gt': as_jsonable_value(value)})
|
||||
return s
|
||||
elif constraint == 'ge':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.greater_than_or_equal_validator, ge=value),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'lt':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.less_than_validator, lt=value),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'le':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.less_than_or_equal_validator, le=value),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'multiple_of':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.multiple_of_validator, multiple_of=value),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'min_length':
|
||||
s = cs.no_info_after_validator_function(
|
||||
partial(_validators.min_length_validator, min_length=value),
|
||||
schema,
|
||||
)
|
||||
add_js_update_schema(s, lambda: {'minLength': (as_jsonable_value(value))})
|
||||
return s
|
||||
elif constraint == 'max_length':
|
||||
s = cs.no_info_after_validator_function(
|
||||
partial(_validators.max_length_validator, max_length=value),
|
||||
schema,
|
||||
)
|
||||
add_js_update_schema(s, lambda: {'maxLength': (as_jsonable_value(value))})
|
||||
return s
|
||||
elif constraint == 'strip_whitespace':
|
||||
return cs.chain_schema(
|
||||
[
|
||||
schema,
|
||||
cs.str_schema(strip_whitespace=True),
|
||||
]
|
||||
)
|
||||
elif constraint == 'to_lower':
|
||||
return cs.chain_schema(
|
||||
[
|
||||
schema,
|
||||
cs.str_schema(to_lower=True),
|
||||
]
|
||||
)
|
||||
elif constraint == 'to_upper':
|
||||
return cs.chain_schema(
|
||||
[
|
||||
schema,
|
||||
cs.str_schema(to_upper=True),
|
||||
]
|
||||
)
|
||||
elif constraint == 'min_length':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.min_length_validator, min_length=annotation.min_length),
|
||||
schema,
|
||||
)
|
||||
elif constraint == 'max_length':
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.max_length_validator, max_length=annotation.max_length),
|
||||
schema,
|
||||
)
|
||||
else:
|
||||
raise RuntimeError(f'Unable to apply constraint {constraint} to schema {schema_type}')
|
||||
|
||||
for annotation in other_metadata:
|
||||
if isinstance(annotation, at.Gt):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.greater_than_validator, gt=annotation.gt),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.Ge):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.greater_than_or_equal_validator, ge=annotation.ge),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.Lt):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.less_than_validator, lt=annotation.lt),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.Le):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.less_than_or_equal_validator, le=annotation.le),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.MultipleOf):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.multiple_of_validator, multiple_of=annotation.multiple_of),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.MinLen):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.min_length_validator, min_length=annotation.min_length),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.MaxLen):
|
||||
return cs.no_info_after_validator_function(
|
||||
partial(_validators.max_length_validator, max_length=annotation.max_length),
|
||||
schema,
|
||||
)
|
||||
elif isinstance(annotation, at.Predicate):
|
||||
predicate_name = f'{annotation.func.__qualname__} ' if hasattr(annotation.func, '__qualname__') else ''
|
||||
|
||||
def val_func(v: Any) -> Any:
|
||||
# annotation.func may also raise an exception, let it pass through
|
||||
if not annotation.func(v):
|
||||
raise PydanticCustomError(
|
||||
'predicate_failed',
|
||||
f'Predicate {predicate_name}failed', # type: ignore
|
||||
)
|
||||
return v
|
||||
|
||||
return cs.no_info_after_validator_function(val_func, schema)
|
||||
# ignore any other unknown metadata
|
||||
return None
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
def collect_known_metadata(annotations: Iterable[Any]) -> tuple[dict[str, Any], list[Any]]:
|
||||
"""Split `annotations` into known metadata and unknown annotations.
|
||||
|
||||
Args:
|
||||
annotations: An iterable of annotations.
|
||||
|
||||
Returns:
|
||||
A tuple contains a dict of known metadata and a list of unknown annotations.
|
||||
|
||||
Example:
|
||||
```py
|
||||
from annotated_types import Gt, Len
|
||||
|
||||
from pydantic._internal._known_annotated_metadata import collect_known_metadata
|
||||
|
||||
print(collect_known_metadata([Gt(1), Len(42), ...]))
|
||||
#> ({'gt': 1, 'min_length': 42}, [Ellipsis])
|
||||
```
|
||||
"""
|
||||
import annotated_types as at
|
||||
|
||||
annotations = expand_grouped_metadata(annotations)
|
||||
|
||||
res: dict[str, Any] = {}
|
||||
remaining: list[Any] = []
|
||||
for annotation in annotations:
|
||||
# isinstance(annotation, PydanticMetadata) also covers ._fields:_PydanticGeneralMetadata
|
||||
if isinstance(annotation, PydanticMetadata):
|
||||
res.update(annotation.__dict__)
|
||||
# we don't use dataclasses.asdict because that recursively calls asdict on the field values
|
||||
elif isinstance(annotation, at.MinLen):
|
||||
res.update({'min_length': annotation.min_length})
|
||||
elif isinstance(annotation, at.MaxLen):
|
||||
res.update({'max_length': annotation.max_length})
|
||||
elif isinstance(annotation, at.Gt):
|
||||
res.update({'gt': annotation.gt})
|
||||
elif isinstance(annotation, at.Ge):
|
||||
res.update({'ge': annotation.ge})
|
||||
elif isinstance(annotation, at.Lt):
|
||||
res.update({'lt': annotation.lt})
|
||||
elif isinstance(annotation, at.Le):
|
||||
res.update({'le': annotation.le})
|
||||
elif isinstance(annotation, at.MultipleOf):
|
||||
res.update({'multiple_of': annotation.multiple_of})
|
||||
elif isinstance(annotation, type) and issubclass(annotation, PydanticMetadata):
|
||||
# also support PydanticMetadata classes being used without initialisation,
|
||||
# e.g. `Annotated[int, Strict]` as well as `Annotated[int, Strict()]`
|
||||
res.update({k: v for k, v in vars(annotation).items() if not k.startswith('_')})
|
||||
else:
|
||||
remaining.append(annotation)
|
||||
# Nones can sneak in but pydantic-core will reject them
|
||||
# it'd be nice to clean things up so we don't put in None (we probably don't _need_ to, it was just easier)
|
||||
# but this is simple enough to kick that can down the road
|
||||
res = {k: v for k, v in res.items() if v is not None}
|
||||
return res, remaining
|
||||
|
||||
|
||||
def check_metadata(metadata: dict[str, Any], allowed: Iterable[str], source_type: Any) -> None:
|
||||
"""A small utility function to validate that the given metadata can be applied to the target.
|
||||
More than saving lines of code, this gives us a consistent error message for all of our internal implementations.
|
||||
|
||||
Args:
|
||||
metadata: A dict of metadata.
|
||||
allowed: An iterable of allowed metadata.
|
||||
source_type: The source type.
|
||||
|
||||
Raises:
|
||||
TypeError: If there is metadatas that can't be applied on source type.
|
||||
"""
|
||||
unknown = metadata.keys() - set(allowed)
|
||||
if unknown:
|
||||
raise TypeError(
|
||||
f'The following constraints cannot be applied to {source_type!r}: {", ".join([f"{k!r}" for k in unknown])}'
|
||||
)
|
140
lib/pydantic/_internal/_mock_val_ser.py
Normal file
140
lib/pydantic/_internal/_mock_val_ser.py
Normal file
|
@ -0,0 +1,140 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Callable, Generic, TypeVar
|
||||
|
||||
from pydantic_core import SchemaSerializer, SchemaValidator
|
||||
from typing_extensions import Literal
|
||||
|
||||
from ..errors import PydanticErrorCodes, PydanticUserError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..dataclasses import PydanticDataclass
|
||||
from ..main import BaseModel
|
||||
|
||||
|
||||
ValSer = TypeVar('ValSer', SchemaValidator, SchemaSerializer)
|
||||
|
||||
|
||||
class MockValSer(Generic[ValSer]):
|
||||
"""Mocker for `pydantic_core.SchemaValidator` or `pydantic_core.SchemaSerializer` which optionally attempts to
|
||||
rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails.
|
||||
"""
|
||||
|
||||
__slots__ = '_error_message', '_code', '_val_or_ser', '_attempt_rebuild'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
error_message: str,
|
||||
*,
|
||||
code: PydanticErrorCodes,
|
||||
val_or_ser: Literal['validator', 'serializer'],
|
||||
attempt_rebuild: Callable[[], ValSer | None] | None = None,
|
||||
) -> None:
|
||||
self._error_message = error_message
|
||||
self._val_or_ser = SchemaValidator if val_or_ser == 'validator' else SchemaSerializer
|
||||
self._code: PydanticErrorCodes = code
|
||||
self._attempt_rebuild = attempt_rebuild
|
||||
|
||||
def __getattr__(self, item: str) -> None:
|
||||
__tracebackhide__ = True
|
||||
if self._attempt_rebuild:
|
||||
val_ser = self._attempt_rebuild()
|
||||
if val_ser is not None:
|
||||
return getattr(val_ser, item)
|
||||
|
||||
# raise an AttributeError if `item` doesn't exist
|
||||
getattr(self._val_or_ser, item)
|
||||
raise PydanticUserError(self._error_message, code=self._code)
|
||||
|
||||
def rebuild(self) -> ValSer | None:
|
||||
if self._attempt_rebuild:
|
||||
val_ser = self._attempt_rebuild()
|
||||
if val_ser is not None:
|
||||
return val_ser
|
||||
else:
|
||||
raise PydanticUserError(self._error_message, code=self._code)
|
||||
return None
|
||||
|
||||
|
||||
def set_model_mocks(cls: type[BaseModel], cls_name: str, undefined_name: str = 'all referenced types') -> None:
|
||||
"""Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a model.
|
||||
|
||||
Args:
|
||||
cls: The model class to set the mocks on
|
||||
cls_name: Name of the model class, used in error messages
|
||||
undefined_name: Name of the undefined thing, used in error messages
|
||||
"""
|
||||
undefined_type_error_message = (
|
||||
f'`{cls_name}` is not fully defined; you should define {undefined_name},'
|
||||
f' then call `{cls_name}.model_rebuild()`.'
|
||||
)
|
||||
|
||||
def attempt_rebuild_validator() -> SchemaValidator | None:
|
||||
if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False:
|
||||
return cls.__pydantic_validator__
|
||||
else:
|
||||
return None
|
||||
|
||||
cls.__pydantic_validator__ = MockValSer( # type: ignore[assignment]
|
||||
undefined_type_error_message,
|
||||
code='class-not-fully-defined',
|
||||
val_or_ser='validator',
|
||||
attempt_rebuild=attempt_rebuild_validator,
|
||||
)
|
||||
|
||||
def attempt_rebuild_serializer() -> SchemaSerializer | None:
|
||||
if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False:
|
||||
return cls.__pydantic_serializer__
|
||||
else:
|
||||
return None
|
||||
|
||||
cls.__pydantic_serializer__ = MockValSer( # type: ignore[assignment]
|
||||
undefined_type_error_message,
|
||||
code='class-not-fully-defined',
|
||||
val_or_ser='serializer',
|
||||
attempt_rebuild=attempt_rebuild_serializer,
|
||||
)
|
||||
|
||||
|
||||
def set_dataclass_mocks(
|
||||
cls: type[PydanticDataclass], cls_name: str, undefined_name: str = 'all referenced types'
|
||||
) -> None:
|
||||
"""Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a dataclass.
|
||||
|
||||
Args:
|
||||
cls: The model class to set the mocks on
|
||||
cls_name: Name of the model class, used in error messages
|
||||
undefined_name: Name of the undefined thing, used in error messages
|
||||
"""
|
||||
from ..dataclasses import rebuild_dataclass
|
||||
|
||||
undefined_type_error_message = (
|
||||
f'`{cls_name}` is not fully defined; you should define {undefined_name},'
|
||||
f' then call `pydantic.dataclasses.rebuild_dataclass({cls_name})`.'
|
||||
)
|
||||
|
||||
def attempt_rebuild_validator() -> SchemaValidator | None:
|
||||
if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False:
|
||||
return cls.__pydantic_validator__
|
||||
else:
|
||||
return None
|
||||
|
||||
cls.__pydantic_validator__ = MockValSer( # type: ignore[assignment]
|
||||
undefined_type_error_message,
|
||||
code='class-not-fully-defined',
|
||||
val_or_ser='validator',
|
||||
attempt_rebuild=attempt_rebuild_validator,
|
||||
)
|
||||
|
||||
def attempt_rebuild_serializer() -> SchemaSerializer | None:
|
||||
if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False:
|
||||
return cls.__pydantic_serializer__
|
||||
else:
|
||||
return None
|
||||
|
||||
cls.__pydantic_serializer__ = MockValSer( # type: ignore[assignment]
|
||||
undefined_type_error_message,
|
||||
code='class-not-fully-defined',
|
||||
val_or_ser='validator',
|
||||
attempt_rebuild=attempt_rebuild_serializer,
|
||||
)
|
637
lib/pydantic/_internal/_model_construction.py
Normal file
637
lib/pydantic/_internal/_model_construction.py
Normal file
|
@ -0,0 +1,637 @@
|
|||
"""Private logic for creating models."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import operator
|
||||
import typing
|
||||
import warnings
|
||||
import weakref
|
||||
from abc import ABCMeta
|
||||
from functools import partial
|
||||
from types import FunctionType
|
||||
from typing import Any, Callable, Generic
|
||||
|
||||
import typing_extensions
|
||||
from pydantic_core import PydanticUndefined, SchemaSerializer
|
||||
from typing_extensions import dataclass_transform, deprecated
|
||||
|
||||
from ..errors import PydanticUndefinedAnnotation, PydanticUserError
|
||||
from ..plugin._schema_validator import create_schema_validator
|
||||
from ..warnings import GenericBeforeBaseModelWarning, PydanticDeprecatedSince20
|
||||
from ._config import ConfigWrapper
|
||||
from ._decorators import DecoratorInfos, PydanticDescriptorProxy, get_attribute_from_bases
|
||||
from ._fields import collect_model_fields, is_valid_field_name, is_valid_privateattr_name
|
||||
from ._generate_schema import GenerateSchema
|
||||
from ._generics import PydanticGenericMetadata, get_model_typevars_map
|
||||
from ._mock_val_ser import MockValSer, set_model_mocks
|
||||
from ._schema_generation_shared import CallbackGetCoreSchemaHandler
|
||||
from ._signature import generate_pydantic_signature
|
||||
from ._typing_extra import get_cls_types_namespace, is_annotated, is_classvar, parent_frame_namespace
|
||||
from ._utils import ClassAttribute, SafeGetItemProxy
|
||||
from ._validate_call import ValidateCallWrapper
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ..fields import Field as PydanticModelField
|
||||
from ..fields import FieldInfo, ModelPrivateAttr
|
||||
from ..main import BaseModel
|
||||
else:
|
||||
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
||||
# and https://youtrack.jetbrains.com/issue/PY-51428
|
||||
DeprecationWarning = PydanticDeprecatedSince20
|
||||
PydanticModelField = object()
|
||||
|
||||
object_setattr = object.__setattr__
|
||||
|
||||
|
||||
class _ModelNamespaceDict(dict):
|
||||
"""A dictionary subclass that intercepts attribute setting on model classes and
|
||||
warns about overriding of decorators.
|
||||
"""
|
||||
|
||||
def __setitem__(self, k: str, v: object) -> None:
|
||||
existing: Any = self.get(k, None)
|
||||
if existing and v is not existing and isinstance(existing, PydanticDescriptorProxy):
|
||||
warnings.warn(f'`{k}` overrides an existing Pydantic `{existing.decorator_info.decorator_repr}` decorator')
|
||||
|
||||
return super().__setitem__(k, v)
|
||||
|
||||
|
||||
@dataclass_transform(kw_only_default=True, field_specifiers=(PydanticModelField,))
|
||||
class ModelMetaclass(ABCMeta):
|
||||
def __new__(
|
||||
mcs,
|
||||
cls_name: str,
|
||||
bases: tuple[type[Any], ...],
|
||||
namespace: dict[str, Any],
|
||||
__pydantic_generic_metadata__: PydanticGenericMetadata | None = None,
|
||||
__pydantic_reset_parent_namespace__: bool = True,
|
||||
_create_model_module: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> type:
|
||||
"""Metaclass for creating Pydantic models.
|
||||
|
||||
Args:
|
||||
cls_name: The name of the class to be created.
|
||||
bases: The base classes of the class to be created.
|
||||
namespace: The attribute dictionary of the class to be created.
|
||||
__pydantic_generic_metadata__: Metadata for generic models.
|
||||
__pydantic_reset_parent_namespace__: Reset parent namespace.
|
||||
_create_model_module: The module of the class to be created, if created by `create_model`.
|
||||
**kwargs: Catch-all for any other keyword arguments.
|
||||
|
||||
Returns:
|
||||
The new class created by the metaclass.
|
||||
"""
|
||||
# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we rely on the fact
|
||||
# that `BaseModel` itself won't have any bases, but any subclass of it will, to determine whether the `__new__`
|
||||
# call we're in the middle of is for the `BaseModel` class.
|
||||
if bases:
|
||||
base_field_names, class_vars, base_private_attributes = mcs._collect_bases_data(bases)
|
||||
|
||||
config_wrapper = ConfigWrapper.for_model(bases, namespace, kwargs)
|
||||
namespace['model_config'] = config_wrapper.config_dict
|
||||
private_attributes = inspect_namespace(
|
||||
namespace, config_wrapper.ignored_types, class_vars, base_field_names
|
||||
)
|
||||
if private_attributes:
|
||||
original_model_post_init = get_model_post_init(namespace, bases)
|
||||
if original_model_post_init is not None:
|
||||
# if there are private_attributes and a model_post_init function, we handle both
|
||||
|
||||
def wrapped_model_post_init(self: BaseModel, __context: Any) -> None:
|
||||
"""We need to both initialize private attributes and call the user-defined model_post_init
|
||||
method.
|
||||
"""
|
||||
init_private_attributes(self, __context)
|
||||
original_model_post_init(self, __context)
|
||||
|
||||
namespace['model_post_init'] = wrapped_model_post_init
|
||||
else:
|
||||
namespace['model_post_init'] = init_private_attributes
|
||||
|
||||
namespace['__class_vars__'] = class_vars
|
||||
namespace['__private_attributes__'] = {**base_private_attributes, **private_attributes}
|
||||
|
||||
cls: type[BaseModel] = super().__new__(mcs, cls_name, bases, namespace, **kwargs) # type: ignore
|
||||
|
||||
from ..main import BaseModel
|
||||
|
||||
mro = cls.__mro__
|
||||
if Generic in mro and mro.index(Generic) < mro.index(BaseModel):
|
||||
warnings.warn(
|
||||
GenericBeforeBaseModelWarning(
|
||||
'Classes should inherit from `BaseModel` before generic classes (e.g. `typing.Generic[T]`) '
|
||||
'for pydantic generics to work properly.'
|
||||
),
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
cls.__pydantic_custom_init__ = not getattr(cls.__init__, '__pydantic_base_init__', False)
|
||||
cls.__pydantic_post_init__ = None if cls.model_post_init is BaseModel.model_post_init else 'model_post_init'
|
||||
|
||||
cls.__pydantic_decorators__ = DecoratorInfos.build(cls)
|
||||
|
||||
# Use the getattr below to grab the __parameters__ from the `typing.Generic` parent class
|
||||
if __pydantic_generic_metadata__:
|
||||
cls.__pydantic_generic_metadata__ = __pydantic_generic_metadata__
|
||||
else:
|
||||
parent_parameters = getattr(cls, '__pydantic_generic_metadata__', {}).get('parameters', ())
|
||||
parameters = getattr(cls, '__parameters__', None) or parent_parameters
|
||||
if parameters and parent_parameters and not all(x in parameters for x in parent_parameters):
|
||||
combined_parameters = parent_parameters + tuple(x for x in parameters if x not in parent_parameters)
|
||||
parameters_str = ', '.join([str(x) for x in combined_parameters])
|
||||
generic_type_label = f'typing.Generic[{parameters_str}]'
|
||||
error_message = (
|
||||
f'All parameters must be present on typing.Generic;'
|
||||
f' you should inherit from {generic_type_label}.'
|
||||
)
|
||||
if Generic not in bases: # pragma: no cover
|
||||
# We raise an error here not because it is desirable, but because some cases are mishandled.
|
||||
# It would be nice to remove this error and still have things behave as expected, it's just
|
||||
# challenging because we are using a custom `__class_getitem__` to parametrize generic models,
|
||||
# and not returning a typing._GenericAlias from it.
|
||||
bases_str = ', '.join([x.__name__ for x in bases] + [generic_type_label])
|
||||
error_message += (
|
||||
f' Note: `typing.Generic` must go last: `class {cls.__name__}({bases_str}): ...`)'
|
||||
)
|
||||
raise TypeError(error_message)
|
||||
|
||||
cls.__pydantic_generic_metadata__ = {
|
||||
'origin': None,
|
||||
'args': (),
|
||||
'parameters': parameters,
|
||||
}
|
||||
|
||||
cls.__pydantic_complete__ = False # Ensure this specific class gets completed
|
||||
|
||||
# preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487
|
||||
# for attributes not in `new_namespace` (e.g. private attributes)
|
||||
for name, obj in private_attributes.items():
|
||||
obj.__set_name__(cls, name)
|
||||
|
||||
if __pydantic_reset_parent_namespace__:
|
||||
cls.__pydantic_parent_namespace__ = build_lenient_weakvaluedict(parent_frame_namespace())
|
||||
parent_namespace = getattr(cls, '__pydantic_parent_namespace__', None)
|
||||
if isinstance(parent_namespace, dict):
|
||||
parent_namespace = unpack_lenient_weakvaluedict(parent_namespace)
|
||||
|
||||
types_namespace = get_cls_types_namespace(cls, parent_namespace)
|
||||
set_model_fields(cls, bases, config_wrapper, types_namespace)
|
||||
|
||||
if config_wrapper.frozen and '__hash__' not in namespace:
|
||||
set_default_hash_func(cls, bases)
|
||||
|
||||
complete_model_class(
|
||||
cls,
|
||||
cls_name,
|
||||
config_wrapper,
|
||||
raise_errors=False,
|
||||
types_namespace=types_namespace,
|
||||
create_model_module=_create_model_module,
|
||||
)
|
||||
|
||||
# If this is placed before the complete_model_class call above,
|
||||
# the generic computed fields return type is set to PydanticUndefined
|
||||
cls.model_computed_fields = {k: v.info for k, v in cls.__pydantic_decorators__.computed_fields.items()}
|
||||
|
||||
# using super(cls, cls) on the next line ensures we only call the parent class's __pydantic_init_subclass__
|
||||
# I believe the `type: ignore` is only necessary because mypy doesn't realize that this code branch is
|
||||
# only hit for _proper_ subclasses of BaseModel
|
||||
super(cls, cls).__pydantic_init_subclass__(**kwargs) # type: ignore[misc]
|
||||
return cls
|
||||
else:
|
||||
# this is the BaseModel class itself being created, no logic required
|
||||
return super().__new__(mcs, cls_name, bases, namespace, **kwargs)
|
||||
|
||||
if not typing.TYPE_CHECKING: # pragma: no branch
|
||||
# We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
|
||||
|
||||
def __getattr__(self, item: str) -> Any:
|
||||
"""This is necessary to keep attribute access working for class attribute access."""
|
||||
private_attributes = self.__dict__.get('__private_attributes__')
|
||||
if private_attributes and item in private_attributes:
|
||||
return private_attributes[item]
|
||||
if item == '__pydantic_core_schema__':
|
||||
# This means the class didn't get a schema generated for it, likely because there was an undefined reference
|
||||
maybe_mock_validator = getattr(self, '__pydantic_validator__', None)
|
||||
if isinstance(maybe_mock_validator, MockValSer):
|
||||
rebuilt_validator = maybe_mock_validator.rebuild()
|
||||
if rebuilt_validator is not None:
|
||||
# In this case, a validator was built, and so `__pydantic_core_schema__` should now be set
|
||||
return getattr(self, '__pydantic_core_schema__')
|
||||
raise AttributeError(item)
|
||||
|
||||
@classmethod
|
||||
def __prepare__(cls, *args: Any, **kwargs: Any) -> dict[str, object]:
|
||||
return _ModelNamespaceDict()
|
||||
|
||||
def __instancecheck__(self, instance: Any) -> bool:
|
||||
"""Avoid calling ABC _abc_subclasscheck unless we're pretty sure.
|
||||
|
||||
See #3829 and python/cpython#92810
|
||||
"""
|
||||
return hasattr(instance, '__pydantic_validator__') and super().__instancecheck__(instance)
|
||||
|
||||
@staticmethod
|
||||
def _collect_bases_data(bases: tuple[type[Any], ...]) -> tuple[set[str], set[str], dict[str, ModelPrivateAttr]]:
|
||||
from ..main import BaseModel
|
||||
|
||||
field_names: set[str] = set()
|
||||
class_vars: set[str] = set()
|
||||
private_attributes: dict[str, ModelPrivateAttr] = {}
|
||||
for base in bases:
|
||||
if issubclass(base, BaseModel) and base is not BaseModel:
|
||||
# model_fields might not be defined yet in the case of generics, so we use getattr here:
|
||||
field_names.update(getattr(base, 'model_fields', {}).keys())
|
||||
class_vars.update(base.__class_vars__)
|
||||
private_attributes.update(base.__private_attributes__)
|
||||
return field_names, class_vars, private_attributes
|
||||
|
||||
@property
|
||||
@deprecated('The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None)
|
||||
def __fields__(self) -> dict[str, FieldInfo]:
|
||||
warnings.warn(
|
||||
'The `__fields__` attribute is deprecated, use `model_fields` instead.', PydanticDeprecatedSince20
|
||||
)
|
||||
return self.model_fields # type: ignore
|
||||
|
||||
def __dir__(self) -> list[str]:
|
||||
attributes = list(super().__dir__())
|
||||
if '__fields__' in attributes:
|
||||
attributes.remove('__fields__')
|
||||
return attributes
|
||||
|
||||
|
||||
def init_private_attributes(self: BaseModel, __context: Any) -> None:
|
||||
"""This function is meant to behave like a BaseModel method to initialise private attributes.
|
||||
|
||||
It takes context as an argument since that's what pydantic-core passes when calling it.
|
||||
|
||||
Args:
|
||||
self: The BaseModel instance.
|
||||
__context: The context.
|
||||
"""
|
||||
if getattr(self, '__pydantic_private__', None) is None:
|
||||
pydantic_private = {}
|
||||
for name, private_attr in self.__private_attributes__.items():
|
||||
default = private_attr.get_default()
|
||||
if default is not PydanticUndefined:
|
||||
pydantic_private[name] = default
|
||||
object_setattr(self, '__pydantic_private__', pydantic_private)
|
||||
|
||||
|
||||
def get_model_post_init(namespace: dict[str, Any], bases: tuple[type[Any], ...]) -> Callable[..., Any] | None:
|
||||
"""Get the `model_post_init` method from the namespace or the class bases, or `None` if not defined."""
|
||||
if 'model_post_init' in namespace:
|
||||
return namespace['model_post_init']
|
||||
|
||||
from ..main import BaseModel
|
||||
|
||||
model_post_init = get_attribute_from_bases(bases, 'model_post_init')
|
||||
if model_post_init is not BaseModel.model_post_init:
|
||||
return model_post_init
|
||||
|
||||
|
||||
def inspect_namespace( # noqa C901
|
||||
namespace: dict[str, Any],
|
||||
ignored_types: tuple[type[Any], ...],
|
||||
base_class_vars: set[str],
|
||||
base_class_fields: set[str],
|
||||
) -> dict[str, ModelPrivateAttr]:
|
||||
"""Iterate over the namespace and:
|
||||
* gather private attributes
|
||||
* check for items which look like fields but are not (e.g. have no annotation) and warn.
|
||||
|
||||
Args:
|
||||
namespace: The attribute dictionary of the class to be created.
|
||||
ignored_types: A tuple of ignore types.
|
||||
base_class_vars: A set of base class class variables.
|
||||
base_class_fields: A set of base class fields.
|
||||
|
||||
Returns:
|
||||
A dict contains private attributes info.
|
||||
|
||||
Raises:
|
||||
TypeError: If there is a `__root__` field in model.
|
||||
NameError: If private attribute name is invalid.
|
||||
PydanticUserError:
|
||||
- If a field does not have a type annotation.
|
||||
- If a field on base class was overridden by a non-annotated attribute.
|
||||
"""
|
||||
from ..fields import FieldInfo, ModelPrivateAttr, PrivateAttr
|
||||
|
||||
all_ignored_types = ignored_types + default_ignored_types()
|
||||
|
||||
private_attributes: dict[str, ModelPrivateAttr] = {}
|
||||
raw_annotations = namespace.get('__annotations__', {})
|
||||
|
||||
if '__root__' in raw_annotations or '__root__' in namespace:
|
||||
raise TypeError("To define root models, use `pydantic.RootModel` rather than a field called '__root__'")
|
||||
|
||||
ignored_names: set[str] = set()
|
||||
for var_name, value in list(namespace.items()):
|
||||
if var_name == 'model_config':
|
||||
continue
|
||||
elif (
|
||||
isinstance(value, type)
|
||||
and value.__module__ == namespace['__module__']
|
||||
and value.__qualname__.startswith(namespace['__qualname__'])
|
||||
):
|
||||
# `value` is a nested type defined in this namespace; don't error
|
||||
continue
|
||||
elif isinstance(value, all_ignored_types) or value.__class__.__module__ == 'functools':
|
||||
ignored_names.add(var_name)
|
||||
continue
|
||||
elif isinstance(value, ModelPrivateAttr):
|
||||
if var_name.startswith('__'):
|
||||
raise NameError(
|
||||
'Private attributes must not use dunder names;'
|
||||
f' use a single underscore prefix instead of {var_name!r}.'
|
||||
)
|
||||
elif is_valid_field_name(var_name):
|
||||
raise NameError(
|
||||
'Private attributes must not use valid field names;'
|
||||
f' use sunder names, e.g. {"_" + var_name!r} instead of {var_name!r}.'
|
||||
)
|
||||
private_attributes[var_name] = value
|
||||
del namespace[var_name]
|
||||
elif isinstance(value, FieldInfo) and not is_valid_field_name(var_name):
|
||||
suggested_name = var_name.lstrip('_') or 'my_field' # don't suggest '' for all-underscore name
|
||||
raise NameError(
|
||||
f'Fields must not use names with leading underscores;'
|
||||
f' e.g., use {suggested_name!r} instead of {var_name!r}.'
|
||||
)
|
||||
|
||||
elif var_name.startswith('__'):
|
||||
continue
|
||||
elif is_valid_privateattr_name(var_name):
|
||||
if var_name not in raw_annotations or not is_classvar(raw_annotations[var_name]):
|
||||
private_attributes[var_name] = PrivateAttr(default=value)
|
||||
del namespace[var_name]
|
||||
elif var_name in base_class_vars:
|
||||
continue
|
||||
elif var_name not in raw_annotations:
|
||||
if var_name in base_class_fields:
|
||||
raise PydanticUserError(
|
||||
f'Field {var_name!r} defined on a base class was overridden by a non-annotated attribute. '
|
||||
f'All field definitions, including overrides, require a type annotation.',
|
||||
code='model-field-overridden',
|
||||
)
|
||||
elif isinstance(value, FieldInfo):
|
||||
raise PydanticUserError(
|
||||
f'Field {var_name!r} requires a type annotation', code='model-field-missing-annotation'
|
||||
)
|
||||
else:
|
||||
raise PydanticUserError(
|
||||
f'A non-annotated attribute was detected: `{var_name} = {value!r}`. All model fields require a '
|
||||
f'type annotation; if `{var_name}` is not meant to be a field, you may be able to resolve this '
|
||||
f"error by annotating it as a `ClassVar` or updating `model_config['ignored_types']`.",
|
||||
code='model-field-missing-annotation',
|
||||
)
|
||||
|
||||
for ann_name, ann_type in raw_annotations.items():
|
||||
if (
|
||||
is_valid_privateattr_name(ann_name)
|
||||
and ann_name not in private_attributes
|
||||
and ann_name not in ignored_names
|
||||
and not is_classvar(ann_type)
|
||||
and ann_type not in all_ignored_types
|
||||
and getattr(ann_type, '__module__', None) != 'functools'
|
||||
):
|
||||
if is_annotated(ann_type):
|
||||
_, *metadata = typing_extensions.get_args(ann_type)
|
||||
private_attr = next((v for v in metadata if isinstance(v, ModelPrivateAttr)), None)
|
||||
if private_attr is not None:
|
||||
private_attributes[ann_name] = private_attr
|
||||
continue
|
||||
private_attributes[ann_name] = PrivateAttr()
|
||||
|
||||
return private_attributes
|
||||
|
||||
|
||||
def set_default_hash_func(cls: type[BaseModel], bases: tuple[type[Any], ...]) -> None:
|
||||
base_hash_func = get_attribute_from_bases(bases, '__hash__')
|
||||
new_hash_func = make_hash_func(cls)
|
||||
if base_hash_func in {None, object.__hash__} or getattr(base_hash_func, '__code__', None) == new_hash_func.__code__:
|
||||
# If `__hash__` is some default, we generate a hash function.
|
||||
# It will be `None` if not overridden from BaseModel.
|
||||
# It may be `object.__hash__` if there is another
|
||||
# parent class earlier in the bases which doesn't override `__hash__` (e.g. `typing.Generic`).
|
||||
# It may be a value set by `set_default_hash_func` if `cls` is a subclass of another frozen model.
|
||||
# In the last case we still need a new hash function to account for new `model_fields`.
|
||||
cls.__hash__ = new_hash_func
|
||||
|
||||
|
||||
def make_hash_func(cls: type[BaseModel]) -> Any:
|
||||
getter = operator.itemgetter(*cls.model_fields.keys()) if cls.model_fields else lambda _: 0
|
||||
|
||||
def hash_func(self: Any) -> int:
|
||||
try:
|
||||
return hash(getter(self.__dict__))
|
||||
except KeyError:
|
||||
# In rare cases (such as when using the deprecated copy method), the __dict__ may not contain
|
||||
# all model fields, which is how we can get here.
|
||||
# getter(self.__dict__) is much faster than any 'safe' method that accounts for missing keys,
|
||||
# and wrapping it in a `try` doesn't slow things down much in the common case.
|
||||
return hash(getter(SafeGetItemProxy(self.__dict__)))
|
||||
|
||||
return hash_func
|
||||
|
||||
|
||||
def set_model_fields(
|
||||
cls: type[BaseModel], bases: tuple[type[Any], ...], config_wrapper: ConfigWrapper, types_namespace: dict[str, Any]
|
||||
) -> None:
|
||||
"""Collect and set `cls.model_fields` and `cls.__class_vars__`.
|
||||
|
||||
Args:
|
||||
cls: BaseModel or dataclass.
|
||||
bases: Parents of the class, generally `cls.__bases__`.
|
||||
config_wrapper: The config wrapper instance.
|
||||
types_namespace: Optional extra namespace to look for types in.
|
||||
"""
|
||||
typevars_map = get_model_typevars_map(cls)
|
||||
fields, class_vars = collect_model_fields(cls, bases, config_wrapper, types_namespace, typevars_map=typevars_map)
|
||||
|
||||
cls.model_fields = fields
|
||||
cls.__class_vars__.update(class_vars)
|
||||
|
||||
for k in class_vars:
|
||||
# Class vars should not be private attributes
|
||||
# We remove them _here_ and not earlier because we rely on inspecting the class to determine its classvars,
|
||||
# but private attributes are determined by inspecting the namespace _prior_ to class creation.
|
||||
# In the case that a classvar with a leading-'_' is defined via a ForwardRef (e.g., when using
|
||||
# `__future__.annotations`), we want to remove the private attribute which was detected _before_ we knew it
|
||||
# evaluated to a classvar
|
||||
|
||||
value = cls.__private_attributes__.pop(k, None)
|
||||
if value is not None and value.default is not PydanticUndefined:
|
||||
setattr(cls, k, value.default)
|
||||
|
||||
|
||||
def complete_model_class(
|
||||
cls: type[BaseModel],
|
||||
cls_name: str,
|
||||
config_wrapper: ConfigWrapper,
|
||||
*,
|
||||
raise_errors: bool = True,
|
||||
types_namespace: dict[str, Any] | None,
|
||||
create_model_module: str | None = None,
|
||||
) -> bool:
|
||||
"""Finish building a model class.
|
||||
|
||||
This logic must be called after class has been created since validation functions must be bound
|
||||
and `get_type_hints` requires a class object.
|
||||
|
||||
Args:
|
||||
cls: BaseModel or dataclass.
|
||||
cls_name: The model or dataclass name.
|
||||
config_wrapper: The config wrapper instance.
|
||||
raise_errors: Whether to raise errors.
|
||||
types_namespace: Optional extra namespace to look for types in.
|
||||
create_model_module: The module of the class to be created, if created by `create_model`.
|
||||
|
||||
Returns:
|
||||
`True` if the model is successfully completed, else `False`.
|
||||
|
||||
Raises:
|
||||
PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation` occurs in`__get_pydantic_core_schema__`
|
||||
and `raise_errors=True`.
|
||||
"""
|
||||
typevars_map = get_model_typevars_map(cls)
|
||||
gen_schema = GenerateSchema(
|
||||
config_wrapper,
|
||||
types_namespace,
|
||||
typevars_map,
|
||||
)
|
||||
|
||||
handler = CallbackGetCoreSchemaHandler(
|
||||
partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
|
||||
gen_schema,
|
||||
ref_mode='unpack',
|
||||
)
|
||||
|
||||
if config_wrapper.defer_build:
|
||||
set_model_mocks(cls, cls_name)
|
||||
return False
|
||||
|
||||
try:
|
||||
schema = cls.__get_pydantic_core_schema__(cls, handler)
|
||||
except PydanticUndefinedAnnotation as e:
|
||||
if raise_errors:
|
||||
raise
|
||||
set_model_mocks(cls, cls_name, f'`{e.name}`')
|
||||
return False
|
||||
|
||||
core_config = config_wrapper.core_config(cls)
|
||||
|
||||
try:
|
||||
schema = gen_schema.clean_schema(schema)
|
||||
except gen_schema.CollectedInvalid:
|
||||
set_model_mocks(cls, cls_name)
|
||||
return False
|
||||
|
||||
# debug(schema)
|
||||
cls.__pydantic_core_schema__ = schema
|
||||
|
||||
cls.__pydantic_validator__ = create_schema_validator(
|
||||
schema,
|
||||
cls,
|
||||
create_model_module or cls.__module__,
|
||||
cls.__qualname__,
|
||||
'create_model' if create_model_module else 'BaseModel',
|
||||
core_config,
|
||||
config_wrapper.plugin_settings,
|
||||
)
|
||||
cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
|
||||
cls.__pydantic_complete__ = True
|
||||
|
||||
# set __signature__ attr only for model class, but not for its instances
|
||||
cls.__signature__ = ClassAttribute(
|
||||
'__signature__',
|
||||
generate_pydantic_signature(init=cls.__init__, fields=cls.model_fields, config_wrapper=config_wrapper),
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
class _PydanticWeakRef:
|
||||
"""Wrapper for `weakref.ref` that enables `pickle` serialization.
|
||||
|
||||
Cloudpickle fails to serialize `weakref.ref` objects due to an arcane error related
|
||||
to abstract base classes (`abc.ABC`). This class works around the issue by wrapping
|
||||
`weakref.ref` instead of subclassing it.
|
||||
|
||||
See https://github.com/pydantic/pydantic/issues/6763 for context.
|
||||
|
||||
Semantics:
|
||||
- If not pickled, behaves the same as a `weakref.ref`.
|
||||
- If pickled along with the referenced object, the same `weakref.ref` behavior
|
||||
will be maintained between them after unpickling.
|
||||
- If pickled without the referenced object, after unpickling the underlying
|
||||
reference will be cleared (`__call__` will always return `None`).
|
||||
"""
|
||||
|
||||
def __init__(self, obj: Any):
|
||||
if obj is None:
|
||||
# The object will be `None` upon deserialization if the serialized weakref
|
||||
# had lost its underlying object.
|
||||
self._wr = None
|
||||
else:
|
||||
self._wr = weakref.ref(obj)
|
||||
|
||||
def __call__(self) -> Any:
|
||||
if self._wr is None:
|
||||
return None
|
||||
else:
|
||||
return self._wr()
|
||||
|
||||
def __reduce__(self) -> tuple[Callable, tuple[weakref.ReferenceType | None]]:
|
||||
return _PydanticWeakRef, (self(),)
|
||||
|
||||
|
||||
def build_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None:
|
||||
"""Takes an input dictionary, and produces a new value that (invertibly) replaces the values with weakrefs.
|
||||
|
||||
We can't just use a WeakValueDictionary because many types (including int, str, etc.) can't be stored as values
|
||||
in a WeakValueDictionary.
|
||||
|
||||
The `unpack_lenient_weakvaluedict` function can be used to reverse this operation.
|
||||
"""
|
||||
if d is None:
|
||||
return None
|
||||
result = {}
|
||||
for k, v in d.items():
|
||||
try:
|
||||
proxy = _PydanticWeakRef(v)
|
||||
except TypeError:
|
||||
proxy = v
|
||||
result[k] = proxy
|
||||
return result
|
||||
|
||||
|
||||
def unpack_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None:
|
||||
"""Inverts the transform performed by `build_lenient_weakvaluedict`."""
|
||||
if d is None:
|
||||
return None
|
||||
|
||||
result = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, _PydanticWeakRef):
|
||||
v = v()
|
||||
if v is not None:
|
||||
result[k] = v
|
||||
else:
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
|
||||
def default_ignored_types() -> tuple[type[Any], ...]:
|
||||
from ..fields import ComputedFieldInfo
|
||||
|
||||
return (
|
||||
FunctionType,
|
||||
property,
|
||||
classmethod,
|
||||
staticmethod,
|
||||
PydanticDescriptorProxy,
|
||||
ComputedFieldInfo,
|
||||
ValidateCallWrapper,
|
||||
)
|
117
lib/pydantic/_internal/_repr.py
Normal file
117
lib/pydantic/_internal/_repr.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
"""Tools to provide pretty/human-readable display of objects."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import types
|
||||
import typing
|
||||
from typing import Any
|
||||
|
||||
import typing_extensions
|
||||
|
||||
from . import _typing_extra
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
ReprArgs: typing_extensions.TypeAlias = 'typing.Iterable[tuple[str | None, Any]]'
|
||||
RichReprResult: typing_extensions.TypeAlias = (
|
||||
'typing.Iterable[Any | tuple[Any] | tuple[str, Any] | tuple[str, Any, Any]]'
|
||||
)
|
||||
|
||||
|
||||
class PlainRepr(str):
|
||||
"""String class where repr doesn't include quotes. Useful with Representation when you want to return a string
|
||||
representation of something that is valid (or pseudo-valid) python.
|
||||
"""
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
class Representation:
|
||||
# Mixin to provide `__str__`, `__repr__`, and `__pretty__` and `__rich_repr__` methods.
|
||||
# `__pretty__` is used by [devtools](https://python-devtools.helpmanual.io/).
|
||||
# `__rich_repr__` is used by [rich](https://rich.readthedocs.io/en/stable/pretty.html).
|
||||
# (this is not a docstring to avoid adding a docstring to classes which inherit from Representation)
|
||||
|
||||
# we don't want to use a type annotation here as it can break get_type_hints
|
||||
__slots__ = tuple() # type: typing.Collection[str]
|
||||
|
||||
def __repr_args__(self) -> ReprArgs:
|
||||
"""Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.
|
||||
|
||||
Can either return:
|
||||
* name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]`
|
||||
* or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]`
|
||||
"""
|
||||
attrs_names = self.__slots__
|
||||
if not attrs_names and hasattr(self, '__dict__'):
|
||||
attrs_names = self.__dict__.keys()
|
||||
attrs = ((s, getattr(self, s)) for s in attrs_names)
|
||||
return [(a, v) for a, v in attrs if v is not None]
|
||||
|
||||
def __repr_name__(self) -> str:
|
||||
"""Name of the instance's class, used in __repr__."""
|
||||
return self.__class__.__name__
|
||||
|
||||
def __repr_str__(self, join_str: str) -> str:
|
||||
return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())
|
||||
|
||||
def __pretty__(self, fmt: typing.Callable[[Any], Any], **kwargs: Any) -> typing.Generator[Any, None, None]:
|
||||
"""Used by devtools (https://python-devtools.helpmanual.io/) to pretty print objects."""
|
||||
yield self.__repr_name__() + '('
|
||||
yield 1
|
||||
for name, value in self.__repr_args__():
|
||||
if name is not None:
|
||||
yield name + '='
|
||||
yield fmt(value)
|
||||
yield ','
|
||||
yield 0
|
||||
yield -1
|
||||
yield ')'
|
||||
|
||||
def __rich_repr__(self) -> RichReprResult:
|
||||
"""Used by Rich (https://rich.readthedocs.io/en/stable/pretty.html) to pretty print objects."""
|
||||
for name, field_repr in self.__repr_args__():
|
||||
if name is None:
|
||||
yield field_repr
|
||||
else:
|
||||
yield name, field_repr
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.__repr_str__(' ')
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
|
||||
|
||||
|
||||
def display_as_type(obj: Any) -> str:
|
||||
"""Pretty representation of a type, should be as close as possible to the original type definition string.
|
||||
|
||||
Takes some logic from `typing._type_repr`.
|
||||
"""
|
||||
if isinstance(obj, types.FunctionType):
|
||||
return obj.__name__
|
||||
elif obj is ...:
|
||||
return '...'
|
||||
elif isinstance(obj, Representation):
|
||||
return repr(obj)
|
||||
elif isinstance(obj, typing_extensions.TypeAliasType):
|
||||
return str(obj)
|
||||
|
||||
if not isinstance(obj, (_typing_extra.typing_base, _typing_extra.WithArgsTypes, type)):
|
||||
obj = obj.__class__
|
||||
|
||||
if _typing_extra.origin_is_union(typing_extensions.get_origin(obj)):
|
||||
args = ', '.join(map(display_as_type, typing_extensions.get_args(obj)))
|
||||
return f'Union[{args}]'
|
||||
elif isinstance(obj, _typing_extra.WithArgsTypes):
|
||||
if typing_extensions.get_origin(obj) == typing_extensions.Literal:
|
||||
args = ', '.join(map(repr, typing_extensions.get_args(obj)))
|
||||
else:
|
||||
args = ', '.join(map(display_as_type, typing_extensions.get_args(obj)))
|
||||
try:
|
||||
return f'{obj.__qualname__}[{args}]'
|
||||
except AttributeError:
|
||||
return str(obj) # handles TypeAliasType in 3.12
|
||||
elif isinstance(obj, type):
|
||||
return obj.__qualname__
|
||||
else:
|
||||
return repr(obj).replace('typing.', '').replace('typing_extensions.', '')
|
124
lib/pydantic/_internal/_schema_generation_shared.py
Normal file
124
lib/pydantic/_internal/_schema_generation_shared.py
Normal file
|
@ -0,0 +1,124 @@
|
|||
"""Types and utility functions used by various other internal tools."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
from pydantic_core import core_schema
|
||||
from typing_extensions import Literal
|
||||
|
||||
from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..json_schema import GenerateJsonSchema, JsonSchemaValue
|
||||
from ._core_utils import CoreSchemaOrField
|
||||
from ._generate_schema import GenerateSchema
|
||||
|
||||
GetJsonSchemaFunction = Callable[[CoreSchemaOrField, GetJsonSchemaHandler], JsonSchemaValue]
|
||||
HandlerOverride = Callable[[CoreSchemaOrField], JsonSchemaValue]
|
||||
|
||||
|
||||
class GenerateJsonSchemaHandler(GetJsonSchemaHandler):
|
||||
"""JsonSchemaHandler implementation that doesn't do ref unwrapping by default.
|
||||
|
||||
This is used for any Annotated metadata so that we don't end up with conflicting
|
||||
modifications to the definition schema.
|
||||
|
||||
Used internally by Pydantic, please do not rely on this implementation.
|
||||
See `GetJsonSchemaHandler` for the handler API.
|
||||
"""
|
||||
|
||||
def __init__(self, generate_json_schema: GenerateJsonSchema, handler_override: HandlerOverride | None) -> None:
|
||||
self.generate_json_schema = generate_json_schema
|
||||
self.handler = handler_override or generate_json_schema.generate_inner
|
||||
self.mode = generate_json_schema.mode
|
||||
|
||||
def __call__(self, __core_schema: CoreSchemaOrField) -> JsonSchemaValue:
|
||||
return self.handler(__core_schema)
|
||||
|
||||
def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue) -> JsonSchemaValue:
|
||||
"""Resolves `$ref` in the json schema.
|
||||
|
||||
This returns the input json schema if there is no `$ref` in json schema.
|
||||
|
||||
Args:
|
||||
maybe_ref_json_schema: The input json schema that may contains `$ref`.
|
||||
|
||||
Returns:
|
||||
Resolved json schema.
|
||||
|
||||
Raises:
|
||||
LookupError: If it can't find the definition for `$ref`.
|
||||
"""
|
||||
if '$ref' not in maybe_ref_json_schema:
|
||||
return maybe_ref_json_schema
|
||||
ref = maybe_ref_json_schema['$ref']
|
||||
json_schema = self.generate_json_schema.get_schema_from_definitions(ref)
|
||||
if json_schema is None:
|
||||
raise LookupError(
|
||||
f'Could not find a ref for {ref}.'
|
||||
' Maybe you tried to call resolve_ref_schema from within a recursive model?'
|
||||
)
|
||||
return json_schema
|
||||
|
||||
|
||||
class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler):
|
||||
"""Wrapper to use an arbitrary function as a `GetCoreSchemaHandler`.
|
||||
|
||||
Used internally by Pydantic, please do not rely on this implementation.
|
||||
See `GetCoreSchemaHandler` for the handler API.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
handler: Callable[[Any], core_schema.CoreSchema],
|
||||
generate_schema: GenerateSchema,
|
||||
ref_mode: Literal['to-def', 'unpack'] = 'to-def',
|
||||
) -> None:
|
||||
self._handler = handler
|
||||
self._generate_schema = generate_schema
|
||||
self._ref_mode = ref_mode
|
||||
|
||||
def __call__(self, __source_type: Any) -> core_schema.CoreSchema:
|
||||
schema = self._handler(__source_type)
|
||||
ref = schema.get('ref')
|
||||
if self._ref_mode == 'to-def':
|
||||
if ref is not None:
|
||||
self._generate_schema.defs.definitions[ref] = schema
|
||||
return core_schema.definition_reference_schema(ref)
|
||||
return schema
|
||||
else: # ref_mode = 'unpack
|
||||
return self.resolve_ref_schema(schema)
|
||||
|
||||
def _get_types_namespace(self) -> dict[str, Any] | None:
|
||||
return self._generate_schema._types_namespace
|
||||
|
||||
def generate_schema(self, __source_type: Any) -> core_schema.CoreSchema:
|
||||
return self._generate_schema.generate_schema(__source_type)
|
||||
|
||||
@property
|
||||
def field_name(self) -> str | None:
|
||||
return self._generate_schema.field_name_stack.get()
|
||||
|
||||
def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
|
||||
"""Resolves reference in the core schema.
|
||||
|
||||
Args:
|
||||
maybe_ref_schema: The input core schema that may contains reference.
|
||||
|
||||
Returns:
|
||||
Resolved core schema.
|
||||
|
||||
Raises:
|
||||
LookupError: If it can't find the definition for reference.
|
||||
"""
|
||||
if maybe_ref_schema['type'] == 'definition-ref':
|
||||
ref = maybe_ref_schema['schema_ref']
|
||||
if ref not in self._generate_schema.defs.definitions:
|
||||
raise LookupError(
|
||||
f'Could not find a ref for {ref}.'
|
||||
' Maybe you tried to call resolve_ref_schema from within a recursive model?'
|
||||
)
|
||||
return self._generate_schema.defs.definitions[ref]
|
||||
elif maybe_ref_schema['type'] == 'definitions':
|
||||
return self.resolve_ref_schema(maybe_ref_schema['schema'])
|
||||
return maybe_ref_schema
|
164
lib/pydantic/_internal/_signature.py
Normal file
164
lib/pydantic/_internal/_signature.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from inspect import Parameter, Signature, signature
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
from ._config import ConfigWrapper
|
||||
from ._utils import is_valid_identifier
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..fields import FieldInfo
|
||||
|
||||
|
||||
def _field_name_for_signature(field_name: str, field_info: FieldInfo) -> str:
|
||||
"""Extract the correct name to use for the field when generating a signature.
|
||||
|
||||
Assuming the field has a valid alias, this will return the alias. Otherwise, it will return the field name.
|
||||
First priority is given to the validation_alias, then the alias, then the field name.
|
||||
|
||||
Args:
|
||||
field_name: The name of the field
|
||||
field_info: The corresponding FieldInfo object.
|
||||
|
||||
Returns:
|
||||
The correct name to use when generating a signature.
|
||||
"""
|
||||
|
||||
def _alias_if_valid(x: Any) -> str | None:
|
||||
"""Return the alias if it is a valid alias and identifier, else None."""
|
||||
return x if isinstance(x, str) and is_valid_identifier(x) else None
|
||||
|
||||
return _alias_if_valid(field_info.alias) or _alias_if_valid(field_info.validation_alias) or field_name
|
||||
|
||||
|
||||
def _process_param_defaults(param: Parameter) -> Parameter:
|
||||
"""Modify the signature for a parameter in a dataclass where the default value is a FieldInfo instance.
|
||||
|
||||
Args:
|
||||
param (Parameter): The parameter
|
||||
|
||||
Returns:
|
||||
Parameter: The custom processed parameter
|
||||
"""
|
||||
from ..fields import FieldInfo
|
||||
|
||||
param_default = param.default
|
||||
if isinstance(param_default, FieldInfo):
|
||||
annotation = param.annotation
|
||||
# Replace the annotation if appropriate
|
||||
# inspect does "clever" things to show annotations as strings because we have
|
||||
# `from __future__ import annotations` in main, we don't want that
|
||||
if annotation == 'Any':
|
||||
annotation = Any
|
||||
|
||||
# Replace the field default
|
||||
default = param_default.default
|
||||
if default is PydanticUndefined:
|
||||
if param_default.default_factory is PydanticUndefined:
|
||||
default = Signature.empty
|
||||
else:
|
||||
# this is used by dataclasses to indicate a factory exists:
|
||||
default = dataclasses._HAS_DEFAULT_FACTORY # type: ignore
|
||||
return param.replace(
|
||||
annotation=annotation, name=_field_name_for_signature(param.name, param_default), default=default
|
||||
)
|
||||
return param
|
||||
|
||||
|
||||
def _generate_signature_parameters( # noqa: C901 (ignore complexity, could use a refactor)
|
||||
init: Callable[..., None],
|
||||
fields: dict[str, FieldInfo],
|
||||
config_wrapper: ConfigWrapper,
|
||||
) -> dict[str, Parameter]:
|
||||
"""Generate a mapping of parameter names to Parameter objects for a pydantic BaseModel or dataclass."""
|
||||
from itertools import islice
|
||||
|
||||
present_params = signature(init).parameters.values()
|
||||
merged_params: dict[str, Parameter] = {}
|
||||
var_kw = None
|
||||
use_var_kw = False
|
||||
|
||||
for param in islice(present_params, 1, None): # skip self arg
|
||||
# inspect does "clever" things to show annotations as strings because we have
|
||||
# `from __future__ import annotations` in main, we don't want that
|
||||
if fields.get(param.name):
|
||||
# exclude params with init=False
|
||||
if getattr(fields[param.name], 'init', True) is False:
|
||||
continue
|
||||
param = param.replace(name=_field_name_for_signature(param.name, fields[param.name]))
|
||||
if param.annotation == 'Any':
|
||||
param = param.replace(annotation=Any)
|
||||
if param.kind is param.VAR_KEYWORD:
|
||||
var_kw = param
|
||||
continue
|
||||
merged_params[param.name] = param
|
||||
|
||||
if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through
|
||||
allow_names = config_wrapper.populate_by_name
|
||||
for field_name, field in fields.items():
|
||||
# when alias is a str it should be used for signature generation
|
||||
param_name = _field_name_for_signature(field_name, field)
|
||||
|
||||
if field_name in merged_params or param_name in merged_params:
|
||||
continue
|
||||
|
||||
if not is_valid_identifier(param_name):
|
||||
if allow_names:
|
||||
param_name = field_name
|
||||
else:
|
||||
use_var_kw = True
|
||||
continue
|
||||
|
||||
kwargs = {} if field.is_required() else {'default': field.get_default(call_default_factory=False)}
|
||||
merged_params[param_name] = Parameter(
|
||||
param_name, Parameter.KEYWORD_ONLY, annotation=field.rebuild_annotation(), **kwargs
|
||||
)
|
||||
|
||||
if config_wrapper.extra == 'allow':
|
||||
use_var_kw = True
|
||||
|
||||
if var_kw and use_var_kw:
|
||||
# Make sure the parameter for extra kwargs
|
||||
# does not have the same name as a field
|
||||
default_model_signature = [
|
||||
('self', Parameter.POSITIONAL_ONLY),
|
||||
('data', Parameter.VAR_KEYWORD),
|
||||
]
|
||||
if [(p.name, p.kind) for p in present_params] == default_model_signature:
|
||||
# if this is the standard model signature, use extra_data as the extra args name
|
||||
var_kw_name = 'extra_data'
|
||||
else:
|
||||
# else start from var_kw
|
||||
var_kw_name = var_kw.name
|
||||
|
||||
# generate a name that's definitely unique
|
||||
while var_kw_name in fields:
|
||||
var_kw_name += '_'
|
||||
merged_params[var_kw_name] = var_kw.replace(name=var_kw_name)
|
||||
|
||||
return merged_params
|
||||
|
||||
|
||||
def generate_pydantic_signature(
|
||||
init: Callable[..., None], fields: dict[str, FieldInfo], config_wrapper: ConfigWrapper, is_dataclass: bool = False
|
||||
) -> Signature:
|
||||
"""Generate signature for a pydantic BaseModel or dataclass.
|
||||
|
||||
Args:
|
||||
init: The class init.
|
||||
fields: The model fields.
|
||||
config_wrapper: The config wrapper instance.
|
||||
is_dataclass: Whether the model is a dataclass.
|
||||
|
||||
Returns:
|
||||
The dataclass/BaseModel subclass signature.
|
||||
"""
|
||||
merged_params = _generate_signature_parameters(init, fields, config_wrapper)
|
||||
|
||||
if is_dataclass:
|
||||
merged_params = {k: _process_param_defaults(v) for k, v in merged_params.items()}
|
||||
|
||||
return Signature(parameters=list(merged_params.values()), return_annotation=None)
|
714
lib/pydantic/_internal/_std_types_schema.py
Normal file
714
lib/pydantic/_internal/_std_types_schema.py
Normal file
|
@ -0,0 +1,714 @@
|
|||
"""Logic for generating pydantic-core schemas for standard library types.
|
||||
|
||||
Import of this module is deferred since it contains imports of many standard library modules.
|
||||
"""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import collections
|
||||
import collections.abc
|
||||
import dataclasses
|
||||
import decimal
|
||||
import inspect
|
||||
import os
|
||||
import typing
|
||||
from enum import Enum
|
||||
from functools import partial
|
||||
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
|
||||
from typing import Any, Callable, Iterable, TypeVar
|
||||
|
||||
import typing_extensions
|
||||
from pydantic_core import (
|
||||
CoreSchema,
|
||||
MultiHostUrl,
|
||||
PydanticCustomError,
|
||||
PydanticOmit,
|
||||
Url,
|
||||
core_schema,
|
||||
)
|
||||
from typing_extensions import get_args, get_origin
|
||||
|
||||
from pydantic.errors import PydanticSchemaGenerationError
|
||||
from pydantic.fields import FieldInfo
|
||||
from pydantic.types import Strict
|
||||
|
||||
from ..config import ConfigDict
|
||||
from ..json_schema import JsonSchemaValue, update_json_schema
|
||||
from . import _known_annotated_metadata, _typing_extra, _validators
|
||||
from ._core_utils import get_type_ref
|
||||
from ._internal_dataclass import slots_true
|
||||
from ._schema_generation_shared import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ._generate_schema import GenerateSchema
|
||||
|
||||
StdSchemaFunction = Callable[[GenerateSchema, type[Any]], core_schema.CoreSchema]
|
||||
|
||||
|
||||
@dataclasses.dataclass(**slots_true)
|
||||
class SchemaTransformer:
|
||||
get_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema]
|
||||
get_json_schema: Callable[[CoreSchema, GetJsonSchemaHandler], JsonSchemaValue]
|
||||
|
||||
def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
|
||||
return self.get_core_schema(source_type, handler)
|
||||
|
||||
def __get_pydantic_json_schema__(self, schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
|
||||
return self.get_json_schema(schema, handler)
|
||||
|
||||
|
||||
def get_enum_core_schema(enum_type: type[Enum], config: ConfigDict) -> CoreSchema:
|
||||
cases: list[Any] = list(enum_type.__members__.values())
|
||||
|
||||
enum_ref = get_type_ref(enum_type)
|
||||
description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__)
|
||||
if description == 'An enumeration.': # This is the default value provided by enum.EnumMeta.__new__; don't use it
|
||||
description = None
|
||||
updates = {'title': enum_type.__name__, 'description': description}
|
||||
updates = {k: v for k, v in updates.items() if v is not None}
|
||||
|
||||
def get_json_schema(_, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
|
||||
json_schema = handler(core_schema.literal_schema([x.value for x in cases], ref=enum_ref))
|
||||
original_schema = handler.resolve_ref_schema(json_schema)
|
||||
update_json_schema(original_schema, updates)
|
||||
return json_schema
|
||||
|
||||
if not cases:
|
||||
# Use an isinstance check for enums with no cases.
|
||||
# The most important use case for this is creating TypeVar bounds for generics that should
|
||||
# be restricted to enums. This is more consistent than it might seem at first, since you can only
|
||||
# subclass enum.Enum (or subclasses of enum.Enum) if all parent classes have no cases.
|
||||
# We use the get_json_schema function when an Enum subclass has been declared with no cases
|
||||
# so that we can still generate a valid json schema.
|
||||
return core_schema.is_instance_schema(enum_type, metadata={'pydantic_js_functions': [get_json_schema]})
|
||||
|
||||
use_enum_values = config.get('use_enum_values', False)
|
||||
|
||||
if len(cases) == 1:
|
||||
expected = repr(cases[0].value)
|
||||
else:
|
||||
expected = ', '.join([repr(case.value) for case in cases[:-1]]) + f' or {cases[-1].value!r}'
|
||||
|
||||
def to_enum(__input_value: Any) -> Enum:
|
||||
try:
|
||||
enum_field = enum_type(__input_value)
|
||||
if use_enum_values:
|
||||
return enum_field.value
|
||||
return enum_field
|
||||
except ValueError:
|
||||
# The type: ignore on the next line is to ignore the requirement of LiteralString
|
||||
raise PydanticCustomError('enum', f'Input should be {expected}', {'expected': expected}) # type: ignore
|
||||
|
||||
strict_python_schema = core_schema.is_instance_schema(enum_type)
|
||||
if use_enum_values:
|
||||
strict_python_schema = core_schema.chain_schema(
|
||||
[strict_python_schema, core_schema.no_info_plain_validator_function(lambda x: x.value)]
|
||||
)
|
||||
|
||||
to_enum_validator = core_schema.no_info_plain_validator_function(to_enum)
|
||||
if issubclass(enum_type, int):
|
||||
# this handles `IntEnum`, and also `Foobar(int, Enum)`
|
||||
updates['type'] = 'integer'
|
||||
lax = core_schema.chain_schema([core_schema.int_schema(), to_enum_validator])
|
||||
# Disallow float from JSON due to strict mode
|
||||
strict = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.int_schema()),
|
||||
python_schema=strict_python_schema,
|
||||
)
|
||||
elif issubclass(enum_type, str):
|
||||
# this handles `StrEnum` (3.11 only), and also `Foobar(str, Enum)`
|
||||
updates['type'] = 'string'
|
||||
lax = core_schema.chain_schema([core_schema.str_schema(), to_enum_validator])
|
||||
strict = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.str_schema()),
|
||||
python_schema=strict_python_schema,
|
||||
)
|
||||
elif issubclass(enum_type, float):
|
||||
updates['type'] = 'numeric'
|
||||
lax = core_schema.chain_schema([core_schema.float_schema(), to_enum_validator])
|
||||
strict = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.float_schema()),
|
||||
python_schema=strict_python_schema,
|
||||
)
|
||||
else:
|
||||
lax = to_enum_validator
|
||||
strict = core_schema.json_or_python_schema(json_schema=to_enum_validator, python_schema=strict_python_schema)
|
||||
return core_schema.lax_or_strict_schema(
|
||||
lax_schema=lax, strict_schema=strict, ref=enum_ref, metadata={'pydantic_js_functions': [get_json_schema]}
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(**slots_true)
|
||||
class InnerSchemaValidator:
|
||||
"""Use a fixed CoreSchema, avoiding interference from outward annotations."""
|
||||
|
||||
core_schema: CoreSchema
|
||||
js_schema: JsonSchemaValue | None = None
|
||||
js_core_schema: CoreSchema | None = None
|
||||
js_schema_update: JsonSchemaValue | None = None
|
||||
|
||||
def __get_pydantic_json_schema__(self, _schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
|
||||
if self.js_schema is not None:
|
||||
return self.js_schema
|
||||
js_schema = handler(self.js_core_schema or self.core_schema)
|
||||
if self.js_schema_update is not None:
|
||||
js_schema.update(self.js_schema_update)
|
||||
return js_schema
|
||||
|
||||
def __get_pydantic_core_schema__(self, _source_type: Any, _handler: GetCoreSchemaHandler) -> CoreSchema:
|
||||
return self.core_schema
|
||||
|
||||
|
||||
def decimal_prepare_pydantic_annotations(
|
||||
source: Any, annotations: Iterable[Any], config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
if source is not decimal.Decimal:
|
||||
return None
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
|
||||
config_allow_inf_nan = config.get('allow_inf_nan')
|
||||
if config_allow_inf_nan is not None:
|
||||
metadata.setdefault('allow_inf_nan', config_allow_inf_nan)
|
||||
|
||||
_known_annotated_metadata.check_metadata(
|
||||
metadata, {*_known_annotated_metadata.FLOAT_CONSTRAINTS, 'max_digits', 'decimal_places'}, decimal.Decimal
|
||||
)
|
||||
return source, [InnerSchemaValidator(core_schema.decimal_schema(**metadata)), *remaining_annotations]
|
||||
|
||||
|
||||
def datetime_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
import datetime
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
if source_type is datetime.date:
|
||||
sv = InnerSchemaValidator(core_schema.date_schema(**metadata))
|
||||
elif source_type is datetime.datetime:
|
||||
sv = InnerSchemaValidator(core_schema.datetime_schema(**metadata))
|
||||
elif source_type is datetime.time:
|
||||
sv = InnerSchemaValidator(core_schema.time_schema(**metadata))
|
||||
elif source_type is datetime.timedelta:
|
||||
sv = InnerSchemaValidator(core_schema.timedelta_schema(**metadata))
|
||||
else:
|
||||
return None
|
||||
# check now that we know the source type is correct
|
||||
_known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.DATE_TIME_CONSTRAINTS, source_type)
|
||||
return (source_type, [sv, *remaining_annotations])
|
||||
|
||||
|
||||
def uuid_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
# UUIDs have no constraints - they are fixed length, constructing a UUID instance checks the length
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
if source_type is not UUID:
|
||||
return None
|
||||
|
||||
return (source_type, [InnerSchemaValidator(core_schema.uuid_schema()), *annotations])
|
||||
|
||||
|
||||
def path_schema_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
import pathlib
|
||||
|
||||
if source_type not in {
|
||||
os.PathLike,
|
||||
pathlib.Path,
|
||||
pathlib.PurePath,
|
||||
pathlib.PosixPath,
|
||||
pathlib.PurePosixPath,
|
||||
pathlib.PureWindowsPath,
|
||||
}:
|
||||
return None
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
_known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.STR_CONSTRAINTS, source_type)
|
||||
|
||||
construct_path = pathlib.PurePath if source_type is os.PathLike else source_type
|
||||
|
||||
def path_validator(input_value: str) -> os.PathLike[Any]:
|
||||
try:
|
||||
return construct_path(input_value)
|
||||
except TypeError as e:
|
||||
raise PydanticCustomError('path_type', 'Input is not a valid path') from e
|
||||
|
||||
constrained_str_schema = core_schema.str_schema(**metadata)
|
||||
|
||||
instance_schema = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(path_validator, constrained_str_schema),
|
||||
python_schema=core_schema.is_instance_schema(source_type),
|
||||
)
|
||||
|
||||
strict: bool | None = None
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, Strict):
|
||||
strict = annotation.strict
|
||||
|
||||
schema = core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.union_schema(
|
||||
[
|
||||
instance_schema,
|
||||
core_schema.no_info_after_validator_function(path_validator, constrained_str_schema),
|
||||
],
|
||||
custom_error_type='path_type',
|
||||
custom_error_message='Input is not a valid path',
|
||||
strict=True,
|
||||
),
|
||||
strict_schema=instance_schema,
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
strict=strict,
|
||||
)
|
||||
|
||||
return (
|
||||
source_type,
|
||||
[
|
||||
InnerSchemaValidator(schema, js_core_schema=constrained_str_schema, js_schema_update={'format': 'path'}),
|
||||
*remaining_annotations,
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def dequeue_validator(
|
||||
input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, maxlen: None | int
|
||||
) -> collections.deque[Any]:
|
||||
if isinstance(input_value, collections.deque):
|
||||
maxlens = [v for v in (input_value.maxlen, maxlen) if v is not None]
|
||||
if maxlens:
|
||||
maxlen = min(maxlens)
|
||||
return collections.deque(handler(input_value), maxlen=maxlen)
|
||||
else:
|
||||
return collections.deque(handler(input_value), maxlen=maxlen)
|
||||
|
||||
|
||||
@dataclasses.dataclass(**slots_true)
|
||||
class SequenceValidator:
|
||||
mapped_origin: type[Any]
|
||||
item_source_type: type[Any]
|
||||
min_length: int | None = None
|
||||
max_length: int | None = None
|
||||
strict: bool = False
|
||||
|
||||
def serialize_sequence_via_list(
|
||||
self, v: Any, handler: core_schema.SerializerFunctionWrapHandler, info: core_schema.SerializationInfo
|
||||
) -> Any:
|
||||
items: list[Any] = []
|
||||
for index, item in enumerate(v):
|
||||
try:
|
||||
v = handler(item, index)
|
||||
except PydanticOmit:
|
||||
pass
|
||||
else:
|
||||
items.append(v)
|
||||
|
||||
if info.mode_is_json():
|
||||
return items
|
||||
else:
|
||||
return self.mapped_origin(items)
|
||||
|
||||
def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
|
||||
if self.item_source_type is Any:
|
||||
items_schema = None
|
||||
else:
|
||||
items_schema = handler.generate_schema(self.item_source_type)
|
||||
|
||||
metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict}
|
||||
|
||||
if self.mapped_origin in (list, set, frozenset):
|
||||
if self.mapped_origin is list:
|
||||
constrained_schema = core_schema.list_schema(items_schema, **metadata)
|
||||
elif self.mapped_origin is set:
|
||||
constrained_schema = core_schema.set_schema(items_schema, **metadata)
|
||||
else:
|
||||
assert self.mapped_origin is frozenset # safety check in case we forget to add a case
|
||||
constrained_schema = core_schema.frozenset_schema(items_schema, **metadata)
|
||||
|
||||
schema = constrained_schema
|
||||
else:
|
||||
# safety check in case we forget to add a case
|
||||
assert self.mapped_origin in (collections.deque, collections.Counter)
|
||||
|
||||
if self.mapped_origin is collections.deque:
|
||||
# if we have a MaxLen annotation might as well set that as the default maxlen on the deque
|
||||
# this lets us re-use existing metadata annotations to let users set the maxlen on a dequeue
|
||||
# that e.g. comes from JSON
|
||||
coerce_instance_wrap = partial(
|
||||
core_schema.no_info_wrap_validator_function,
|
||||
partial(dequeue_validator, maxlen=metadata.get('max_length', None)),
|
||||
)
|
||||
else:
|
||||
coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin)
|
||||
|
||||
constrained_schema = core_schema.list_schema(items_schema, **metadata)
|
||||
|
||||
check_instance = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.list_schema(),
|
||||
python_schema=core_schema.is_instance_schema(self.mapped_origin),
|
||||
)
|
||||
|
||||
serialization = core_schema.wrap_serializer_function_ser_schema(
|
||||
self.serialize_sequence_via_list, schema=items_schema or core_schema.any_schema(), info_arg=True
|
||||
)
|
||||
|
||||
strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)])
|
||||
|
||||
if metadata.get('strict', False):
|
||||
schema = strict
|
||||
else:
|
||||
lax = coerce_instance_wrap(constrained_schema)
|
||||
schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict)
|
||||
schema['serialization'] = serialization
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
SEQUENCE_ORIGIN_MAP: dict[Any, Any] = {
|
||||
typing.Deque: collections.deque,
|
||||
collections.deque: collections.deque,
|
||||
list: list,
|
||||
typing.List: list,
|
||||
set: set,
|
||||
typing.AbstractSet: set,
|
||||
typing.Set: set,
|
||||
frozenset: frozenset,
|
||||
typing.FrozenSet: frozenset,
|
||||
typing.Sequence: list,
|
||||
typing.MutableSequence: list,
|
||||
typing.MutableSet: set,
|
||||
# this doesn't handle subclasses of these
|
||||
# parametrized typing.Set creates one of these
|
||||
collections.abc.MutableSet: set,
|
||||
collections.abc.Set: frozenset,
|
||||
}
|
||||
|
||||
|
||||
def identity(s: CoreSchema) -> CoreSchema:
|
||||
return s
|
||||
|
||||
|
||||
def sequence_like_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
origin: Any = get_origin(source_type)
|
||||
|
||||
mapped_origin = SEQUENCE_ORIGIN_MAP.get(origin, None) if origin else SEQUENCE_ORIGIN_MAP.get(source_type, None)
|
||||
if mapped_origin is None:
|
||||
return None
|
||||
|
||||
args = get_args(source_type)
|
||||
|
||||
if not args:
|
||||
args = (Any,)
|
||||
elif len(args) != 1:
|
||||
raise ValueError('Expected sequence to have exactly 1 generic parameter')
|
||||
|
||||
item_source_type = args[0]
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
_known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type)
|
||||
|
||||
return (source_type, [SequenceValidator(mapped_origin, item_source_type, **metadata), *remaining_annotations])
|
||||
|
||||
|
||||
MAPPING_ORIGIN_MAP: dict[Any, Any] = {
|
||||
typing.DefaultDict: collections.defaultdict,
|
||||
collections.defaultdict: collections.defaultdict,
|
||||
collections.OrderedDict: collections.OrderedDict,
|
||||
typing_extensions.OrderedDict: collections.OrderedDict,
|
||||
dict: dict,
|
||||
typing.Dict: dict,
|
||||
collections.Counter: collections.Counter,
|
||||
typing.Counter: collections.Counter,
|
||||
# this doesn't handle subclasses of these
|
||||
typing.Mapping: dict,
|
||||
typing.MutableMapping: dict,
|
||||
# parametrized typing.{Mutable}Mapping creates one of these
|
||||
collections.abc.MutableMapping: dict,
|
||||
collections.abc.Mapping: dict,
|
||||
}
|
||||
|
||||
|
||||
def defaultdict_validator(
|
||||
input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, default_default_factory: Callable[[], Any]
|
||||
) -> collections.defaultdict[Any, Any]:
|
||||
if isinstance(input_value, collections.defaultdict):
|
||||
default_factory = input_value.default_factory
|
||||
return collections.defaultdict(default_factory, handler(input_value))
|
||||
else:
|
||||
return collections.defaultdict(default_default_factory, handler(input_value))
|
||||
|
||||
|
||||
def get_defaultdict_default_default_factory(values_source_type: Any) -> Callable[[], Any]:
|
||||
def infer_default() -> Callable[[], Any]:
|
||||
allowed_default_types: dict[Any, Any] = {
|
||||
typing.Tuple: tuple,
|
||||
tuple: tuple,
|
||||
collections.abc.Sequence: tuple,
|
||||
collections.abc.MutableSequence: list,
|
||||
typing.List: list,
|
||||
list: list,
|
||||
typing.Sequence: list,
|
||||
typing.Set: set,
|
||||
set: set,
|
||||
typing.MutableSet: set,
|
||||
collections.abc.MutableSet: set,
|
||||
collections.abc.Set: frozenset,
|
||||
typing.MutableMapping: dict,
|
||||
typing.Mapping: dict,
|
||||
collections.abc.Mapping: dict,
|
||||
collections.abc.MutableMapping: dict,
|
||||
float: float,
|
||||
int: int,
|
||||
str: str,
|
||||
bool: bool,
|
||||
}
|
||||
values_type_origin = get_origin(values_source_type) or values_source_type
|
||||
instructions = 'set using `DefaultDict[..., Annotated[..., Field(default_factory=...)]]`'
|
||||
if isinstance(values_type_origin, TypeVar):
|
||||
|
||||
def type_var_default_factory() -> None:
|
||||
raise RuntimeError(
|
||||
'Generic defaultdict cannot be used without a concrete value type or an'
|
||||
' explicit default factory, ' + instructions
|
||||
)
|
||||
|
||||
return type_var_default_factory
|
||||
elif values_type_origin not in allowed_default_types:
|
||||
# a somewhat subjective set of types that have reasonable default values
|
||||
allowed_msg = ', '.join([t.__name__ for t in set(allowed_default_types.values())])
|
||||
raise PydanticSchemaGenerationError(
|
||||
f'Unable to infer a default factory for keys of type {values_source_type}.'
|
||||
f' Only {allowed_msg} are supported, other types require an explicit default factory'
|
||||
' ' + instructions
|
||||
)
|
||||
return allowed_default_types[values_type_origin]
|
||||
|
||||
# Assume Annotated[..., Field(...)]
|
||||
if _typing_extra.is_annotated(values_source_type):
|
||||
field_info = next((v for v in get_args(values_source_type) if isinstance(v, FieldInfo)), None)
|
||||
else:
|
||||
field_info = None
|
||||
if field_info and field_info.default_factory:
|
||||
default_default_factory = field_info.default_factory
|
||||
else:
|
||||
default_default_factory = infer_default()
|
||||
return default_default_factory
|
||||
|
||||
|
||||
@dataclasses.dataclass(**slots_true)
|
||||
class MappingValidator:
|
||||
mapped_origin: type[Any]
|
||||
keys_source_type: type[Any]
|
||||
values_source_type: type[Any]
|
||||
min_length: int | None = None
|
||||
max_length: int | None = None
|
||||
strict: bool = False
|
||||
|
||||
def serialize_mapping_via_dict(self, v: Any, handler: core_schema.SerializerFunctionWrapHandler) -> Any:
|
||||
return handler(v)
|
||||
|
||||
def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
|
||||
if self.keys_source_type is Any:
|
||||
keys_schema = None
|
||||
else:
|
||||
keys_schema = handler.generate_schema(self.keys_source_type)
|
||||
if self.values_source_type is Any:
|
||||
values_schema = None
|
||||
else:
|
||||
values_schema = handler.generate_schema(self.values_source_type)
|
||||
|
||||
metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict}
|
||||
|
||||
if self.mapped_origin is dict:
|
||||
schema = core_schema.dict_schema(keys_schema, values_schema, **metadata)
|
||||
else:
|
||||
constrained_schema = core_schema.dict_schema(keys_schema, values_schema, **metadata)
|
||||
check_instance = core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.dict_schema(),
|
||||
python_schema=core_schema.is_instance_schema(self.mapped_origin),
|
||||
)
|
||||
|
||||
if self.mapped_origin is collections.defaultdict:
|
||||
default_default_factory = get_defaultdict_default_default_factory(self.values_source_type)
|
||||
coerce_instance_wrap = partial(
|
||||
core_schema.no_info_wrap_validator_function,
|
||||
partial(defaultdict_validator, default_default_factory=default_default_factory),
|
||||
)
|
||||
else:
|
||||
coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin)
|
||||
|
||||
serialization = core_schema.wrap_serializer_function_ser_schema(
|
||||
self.serialize_mapping_via_dict,
|
||||
schema=core_schema.dict_schema(
|
||||
keys_schema or core_schema.any_schema(), values_schema or core_schema.any_schema()
|
||||
),
|
||||
info_arg=False,
|
||||
)
|
||||
|
||||
strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)])
|
||||
|
||||
if metadata.get('strict', False):
|
||||
schema = strict
|
||||
else:
|
||||
lax = coerce_instance_wrap(constrained_schema)
|
||||
schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict)
|
||||
schema['serialization'] = serialization
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
def mapping_like_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
origin: Any = get_origin(source_type)
|
||||
|
||||
mapped_origin = MAPPING_ORIGIN_MAP.get(origin, None) if origin else MAPPING_ORIGIN_MAP.get(source_type, None)
|
||||
if mapped_origin is None:
|
||||
return None
|
||||
|
||||
args = get_args(source_type)
|
||||
|
||||
if not args:
|
||||
args = (Any, Any)
|
||||
elif mapped_origin is collections.Counter:
|
||||
# a single generic
|
||||
if len(args) != 1:
|
||||
raise ValueError('Expected Counter to have exactly 1 generic parameter')
|
||||
args = (args[0], int) # keys are always an int
|
||||
elif len(args) != 2:
|
||||
raise ValueError('Expected mapping to have exactly 2 generic parameters')
|
||||
|
||||
keys_source_type, values_source_type = args
|
||||
|
||||
metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
|
||||
_known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type)
|
||||
|
||||
return (
|
||||
source_type,
|
||||
[
|
||||
MappingValidator(mapped_origin, keys_source_type, values_source_type, **metadata),
|
||||
*remaining_annotations,
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def ip_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
def make_strict_ip_schema(tp: type[Any]) -> CoreSchema:
|
||||
return core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.no_info_after_validator_function(tp, core_schema.str_schema()),
|
||||
python_schema=core_schema.is_instance_schema(tp),
|
||||
)
|
||||
|
||||
if source_type is IPv4Address:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_address_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv4Address),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv4'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is IPv4Network:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_network_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv4Network),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv4network'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is IPv4Interface:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_interface_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv4Interface),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv4interface'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
|
||||
if source_type is IPv6Address:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_address_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv6Address),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv6'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is IPv6Network:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_network_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv6Network),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv6network'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is IPv6Interface:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.lax_or_strict_schema(
|
||||
lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_interface_validator),
|
||||
strict_schema=make_strict_ip_schema(IPv6Interface),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
),
|
||||
lambda _1, _2: {'type': 'string', 'format': 'ipv6interface'},
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def url_prepare_pydantic_annotations(
|
||||
source_type: Any, annotations: Iterable[Any], _config: ConfigDict
|
||||
) -> tuple[Any, list[Any]] | None:
|
||||
if source_type is Url:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.url_schema(),
|
||||
lambda cs, handler: handler(cs),
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
if source_type is MultiHostUrl:
|
||||
return source_type, [
|
||||
SchemaTransformer(
|
||||
lambda _1, _2: core_schema.multi_host_url_schema(),
|
||||
lambda cs, handler: handler(cs),
|
||||
),
|
||||
*annotations,
|
||||
]
|
||||
|
||||
|
||||
PREPARE_METHODS: tuple[Callable[[Any, Iterable[Any], ConfigDict], tuple[Any, list[Any]] | None], ...] = (
|
||||
decimal_prepare_pydantic_annotations,
|
||||
sequence_like_prepare_pydantic_annotations,
|
||||
datetime_prepare_pydantic_annotations,
|
||||
uuid_prepare_pydantic_annotations,
|
||||
path_schema_prepare_pydantic_annotations,
|
||||
mapping_like_prepare_pydantic_annotations,
|
||||
ip_prepare_pydantic_annotations,
|
||||
url_prepare_pydantic_annotations,
|
||||
)
|
469
lib/pydantic/_internal/_typing_extra.py
Normal file
469
lib/pydantic/_internal/_typing_extra.py
Normal file
|
@ -0,0 +1,469 @@
|
|||
"""Logic for interacting with type annotations, mostly extensions, shims and hacks to wrap python's typing module."""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import dataclasses
|
||||
import sys
|
||||
import types
|
||||
import typing
|
||||
from collections.abc import Callable
|
||||
from functools import partial
|
||||
from types import GetSetDescriptorType
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from typing_extensions import Annotated, Literal, TypeAliasType, TypeGuard, get_args, get_origin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._dataclasses import StandardDataclass
|
||||
|
||||
try:
|
||||
from typing import _TypingBase # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from typing import _Final as _TypingBase # type: ignore[attr-defined]
|
||||
|
||||
typing_base = _TypingBase
|
||||
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
# python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on)
|
||||
TypingGenericAlias = ()
|
||||
else:
|
||||
from typing import GenericAlias as TypingGenericAlias # type: ignore
|
||||
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from typing_extensions import NotRequired, Required
|
||||
else:
|
||||
from typing import NotRequired, Required # noqa: F401
|
||||
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
|
||||
def origin_is_union(tp: type[Any] | None) -> bool:
|
||||
return tp is typing.Union
|
||||
|
||||
WithArgsTypes = (TypingGenericAlias,)
|
||||
|
||||
else:
|
||||
|
||||
def origin_is_union(tp: type[Any] | None) -> bool:
|
||||
return tp is typing.Union or tp is types.UnionType
|
||||
|
||||
WithArgsTypes = typing._GenericAlias, types.GenericAlias, types.UnionType # type: ignore[attr-defined]
|
||||
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
NoneType = type(None)
|
||||
EllipsisType = type(Ellipsis)
|
||||
else:
|
||||
from types import NoneType as NoneType
|
||||
|
||||
|
||||
LITERAL_TYPES: set[Any] = {Literal}
|
||||
if hasattr(typing, 'Literal'):
|
||||
LITERAL_TYPES.add(typing.Literal) # type: ignore
|
||||
|
||||
NONE_TYPES: tuple[Any, ...] = (None, NoneType, *(tp[None] for tp in LITERAL_TYPES))
|
||||
|
||||
|
||||
TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type
|
||||
|
||||
|
||||
def is_none_type(type_: Any) -> bool:
|
||||
return type_ in NONE_TYPES
|
||||
|
||||
|
||||
def is_callable_type(type_: type[Any]) -> bool:
|
||||
return type_ is Callable or get_origin(type_) is Callable
|
||||
|
||||
|
||||
def is_literal_type(type_: type[Any]) -> bool:
|
||||
return Literal is not None and get_origin(type_) in LITERAL_TYPES
|
||||
|
||||
|
||||
def literal_values(type_: type[Any]) -> tuple[Any, ...]:
|
||||
return get_args(type_)
|
||||
|
||||
|
||||
def all_literal_values(type_: type[Any]) -> list[Any]:
|
||||
"""This method is used to retrieve all Literal values as
|
||||
Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586)
|
||||
e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`.
|
||||
"""
|
||||
if not is_literal_type(type_):
|
||||
return [type_]
|
||||
|
||||
values = literal_values(type_)
|
||||
return list(x for value in values for x in all_literal_values(value))
|
||||
|
||||
|
||||
def is_annotated(ann_type: Any) -> bool:
|
||||
from ._utils import lenient_issubclass
|
||||
|
||||
origin = get_origin(ann_type)
|
||||
return origin is not None and lenient_issubclass(origin, Annotated)
|
||||
|
||||
|
||||
def is_namedtuple(type_: type[Any]) -> bool:
|
||||
"""Check if a given class is a named tuple.
|
||||
It can be either a `typing.NamedTuple` or `collections.namedtuple`.
|
||||
"""
|
||||
from ._utils import lenient_issubclass
|
||||
|
||||
return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields')
|
||||
|
||||
|
||||
test_new_type = typing.NewType('test_new_type', str)
|
||||
|
||||
|
||||
def is_new_type(type_: type[Any]) -> bool:
|
||||
"""Check whether type_ was created using typing.NewType.
|
||||
|
||||
Can't use isinstance because it fails <3.10.
|
||||
"""
|
||||
return isinstance(type_, test_new_type.__class__) and hasattr(type_, '__supertype__') # type: ignore[arg-type]
|
||||
|
||||
|
||||
def _check_classvar(v: type[Any] | None) -> bool:
|
||||
if v is None:
|
||||
return False
|
||||
|
||||
return v.__class__ == typing.ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar'
|
||||
|
||||
|
||||
def is_classvar(ann_type: type[Any]) -> bool:
|
||||
if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)):
|
||||
return True
|
||||
|
||||
# this is an ugly workaround for class vars that contain forward references and are therefore themselves
|
||||
# forward references, see #3679
|
||||
if ann_type.__class__ == typing.ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['): # type: ignore
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _check_finalvar(v: type[Any] | None) -> bool:
|
||||
"""Check if a given type is a `typing.Final` type."""
|
||||
if v is None:
|
||||
return False
|
||||
|
||||
return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final')
|
||||
|
||||
|
||||
def is_finalvar(ann_type: Any) -> bool:
|
||||
return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type))
|
||||
|
||||
|
||||
def parent_frame_namespace(*, parent_depth: int = 2) -> dict[str, Any] | None:
|
||||
"""We allow use of items in parent namespace to get around the issue with `get_type_hints` only looking in the
|
||||
global module namespace. See https://github.com/pydantic/pydantic/issues/2678#issuecomment-1008139014 -> Scope
|
||||
and suggestion at the end of the next comment by @gvanrossum.
|
||||
|
||||
WARNING 1: it matters exactly where this is called. By default, this function will build a namespace from the
|
||||
parent of where it is called.
|
||||
|
||||
WARNING 2: this only looks in the parent namespace, not other parents since (AFAIK) there's no way to collect a
|
||||
dict of exactly what's in scope. Using `f_back` would work sometimes but would be very wrong and confusing in many
|
||||
other cases. See https://discuss.python.org/t/is-there-a-way-to-access-parent-nested-namespaces/20659.
|
||||
"""
|
||||
frame = sys._getframe(parent_depth)
|
||||
# if f_back is None, it's the global module namespace and we don't need to include it here
|
||||
if frame.f_back is None:
|
||||
return None
|
||||
else:
|
||||
return frame.f_locals
|
||||
|
||||
|
||||
def add_module_globals(obj: Any, globalns: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
module_name = getattr(obj, '__module__', None)
|
||||
if module_name:
|
||||
try:
|
||||
module_globalns = sys.modules[module_name].__dict__
|
||||
except KeyError:
|
||||
# happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
|
||||
pass
|
||||
else:
|
||||
if globalns:
|
||||
return {**module_globalns, **globalns}
|
||||
else:
|
||||
# copy module globals to make sure it can't be updated later
|
||||
return module_globalns.copy()
|
||||
|
||||
return globalns or {}
|
||||
|
||||
|
||||
def get_cls_types_namespace(cls: type[Any], parent_namespace: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
ns = add_module_globals(cls, parent_namespace)
|
||||
ns[cls.__name__] = cls
|
||||
return ns
|
||||
|
||||
|
||||
def get_cls_type_hints_lenient(obj: Any, globalns: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
"""Collect annotations from a class, including those from parent classes.
|
||||
|
||||
Unlike `typing.get_type_hints`, this function will not error if a forward reference is not resolvable.
|
||||
"""
|
||||
hints = {}
|
||||
for base in reversed(obj.__mro__):
|
||||
ann = base.__dict__.get('__annotations__')
|
||||
localns = dict(vars(base))
|
||||
if ann is not None and ann is not GetSetDescriptorType:
|
||||
for name, value in ann.items():
|
||||
hints[name] = eval_type_lenient(value, globalns, localns)
|
||||
return hints
|
||||
|
||||
|
||||
def eval_type_lenient(value: Any, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None) -> Any:
|
||||
"""Behaves like typing._eval_type, except it won't raise an error if a forward reference can't be resolved."""
|
||||
if value is None:
|
||||
value = NoneType
|
||||
elif isinstance(value, str):
|
||||
value = _make_forward_ref(value, is_argument=False, is_class=True)
|
||||
|
||||
try:
|
||||
return eval_type_backport(value, globalns, localns)
|
||||
except NameError:
|
||||
# the point of this function is to be tolerant to this case
|
||||
return value
|
||||
|
||||
|
||||
def eval_type_backport(
|
||||
value: Any, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None
|
||||
) -> Any:
|
||||
"""Like `typing._eval_type`, but falls back to the `eval_type_backport` package if it's
|
||||
installed to let older Python versions use newer typing features.
|
||||
Specifically, this transforms `X | Y` into `typing.Union[X, Y]`
|
||||
and `list[X]` into `typing.List[X]` etc. (for all the types made generic in PEP 585)
|
||||
if the original syntax is not supported in the current Python version.
|
||||
"""
|
||||
try:
|
||||
return typing._eval_type( # type: ignore
|
||||
value, globalns, localns
|
||||
)
|
||||
except TypeError as e:
|
||||
if not (isinstance(value, typing.ForwardRef) and is_backport_fixable_error(e)):
|
||||
raise
|
||||
try:
|
||||
from eval_type_backport import eval_type_backport
|
||||
except ImportError:
|
||||
raise TypeError(
|
||||
f'You have a type annotation {value.__forward_arg__!r} '
|
||||
f'which makes use of newer typing features than are supported in your version of Python. '
|
||||
f'To handle this error, you should either remove the use of new syntax '
|
||||
f'or install the `eval_type_backport` package.'
|
||||
) from e
|
||||
|
||||
return eval_type_backport(value, globalns, localns, try_default=False)
|
||||
|
||||
|
||||
def is_backport_fixable_error(e: TypeError) -> bool:
|
||||
msg = str(e)
|
||||
return msg.startswith('unsupported operand type(s) for |: ') or "' object is not subscriptable" in msg
|
||||
|
||||
|
||||
def get_function_type_hints(
|
||||
function: Callable[..., Any], *, include_keys: set[str] | None = None, types_namespace: dict[str, Any] | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Like `typing.get_type_hints`, but doesn't convert `X` to `Optional[X]` if the default value is `None`, also
|
||||
copes with `partial`.
|
||||
"""
|
||||
if isinstance(function, partial):
|
||||
annotations = function.func.__annotations__
|
||||
else:
|
||||
annotations = function.__annotations__
|
||||
|
||||
globalns = add_module_globals(function)
|
||||
type_hints = {}
|
||||
for name, value in annotations.items():
|
||||
if include_keys is not None and name not in include_keys:
|
||||
continue
|
||||
if value is None:
|
||||
value = NoneType
|
||||
elif isinstance(value, str):
|
||||
value = _make_forward_ref(value)
|
||||
|
||||
type_hints[name] = eval_type_backport(value, globalns, types_namespace)
|
||||
|
||||
return type_hints
|
||||
|
||||
|
||||
if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1):
|
||||
|
||||
def _make_forward_ref(
|
||||
arg: Any,
|
||||
is_argument: bool = True,
|
||||
*,
|
||||
is_class: bool = False,
|
||||
) -> typing.ForwardRef:
|
||||
"""Wrapper for ForwardRef that accounts for the `is_class` argument missing in older versions.
|
||||
The `module` argument is omitted as it breaks <3.9.8, =3.10.0 and isn't used in the calls below.
|
||||
|
||||
See https://github.com/python/cpython/pull/28560 for some background.
|
||||
The backport happened on 3.9.8, see:
|
||||
https://github.com/pydantic/pydantic/discussions/6244#discussioncomment-6275458,
|
||||
and on 3.10.1 for the 3.10 branch, see:
|
||||
https://github.com/pydantic/pydantic/issues/6912
|
||||
|
||||
Implemented as EAFP with memory.
|
||||
"""
|
||||
return typing.ForwardRef(arg, is_argument)
|
||||
|
||||
else:
|
||||
_make_forward_ref = typing.ForwardRef
|
||||
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
get_type_hints = typing.get_type_hints
|
||||
|
||||
else:
|
||||
"""
|
||||
For older versions of python, we have a custom implementation of `get_type_hints` which is a close as possible to
|
||||
the implementation in CPython 3.10.8.
|
||||
"""
|
||||
|
||||
@typing.no_type_check
|
||||
def get_type_hints( # noqa: C901
|
||||
obj: Any,
|
||||
globalns: dict[str, Any] | None = None,
|
||||
localns: dict[str, Any] | None = None,
|
||||
include_extras: bool = False,
|
||||
) -> dict[str, Any]: # pragma: no cover
|
||||
"""Taken verbatim from python 3.10.8 unchanged, except:
|
||||
* type annotations of the function definition above.
|
||||
* prefixing `typing.` where appropriate
|
||||
* Use `_make_forward_ref` instead of `typing.ForwardRef` to handle the `is_class` argument.
|
||||
|
||||
https://github.com/python/cpython/blob/aaaf5174241496afca7ce4d4584570190ff972fe/Lib/typing.py#L1773-L1875
|
||||
|
||||
DO NOT CHANGE THIS METHOD UNLESS ABSOLUTELY NECESSARY.
|
||||
======================================================
|
||||
|
||||
Return type hints for an object.
|
||||
|
||||
This is often the same as obj.__annotations__, but it handles
|
||||
forward references encoded as string literals, adds Optional[t] if a
|
||||
default value equal to None is set and recursively replaces all
|
||||
'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
|
||||
|
||||
The argument may be a module, class, method, or function. The annotations
|
||||
are returned as a dictionary. For classes, annotations include also
|
||||
inherited members.
|
||||
|
||||
TypeError is raised if the argument is not of a type that can contain
|
||||
annotations, and an empty dictionary is returned if no annotations are
|
||||
present.
|
||||
|
||||
BEWARE -- the behavior of globalns and localns is counterintuitive
|
||||
(unless you are familiar with how eval() and exec() work). The
|
||||
search order is locals first, then globals.
|
||||
|
||||
- If no dict arguments are passed, an attempt is made to use the
|
||||
globals from obj (or the respective module's globals for classes),
|
||||
and these are also used as the locals. If the object does not appear
|
||||
to have globals, an empty dictionary is used. For classes, the search
|
||||
order is globals first then locals.
|
||||
|
||||
- If one dict argument is passed, it is used for both globals and
|
||||
locals.
|
||||
|
||||
- If two dict arguments are passed, they specify globals and
|
||||
locals, respectively.
|
||||
"""
|
||||
if getattr(obj, '__no_type_check__', None):
|
||||
return {}
|
||||
# Classes require a special treatment.
|
||||
if isinstance(obj, type):
|
||||
hints = {}
|
||||
for base in reversed(obj.__mro__):
|
||||
if globalns is None:
|
||||
base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {})
|
||||
else:
|
||||
base_globals = globalns
|
||||
ann = base.__dict__.get('__annotations__', {})
|
||||
if isinstance(ann, types.GetSetDescriptorType):
|
||||
ann = {}
|
||||
base_locals = dict(vars(base)) if localns is None else localns
|
||||
if localns is None and globalns is None:
|
||||
# This is surprising, but required. Before Python 3.10,
|
||||
# get_type_hints only evaluated the globalns of
|
||||
# a class. To maintain backwards compatibility, we reverse
|
||||
# the globalns and localns order so that eval() looks into
|
||||
# *base_globals* first rather than *base_locals*.
|
||||
# This only affects ForwardRefs.
|
||||
base_globals, base_locals = base_locals, base_globals
|
||||
for name, value in ann.items():
|
||||
if value is None:
|
||||
value = type(None)
|
||||
if isinstance(value, str):
|
||||
value = _make_forward_ref(value, is_argument=False, is_class=True)
|
||||
|
||||
value = eval_type_backport(value, base_globals, base_locals)
|
||||
hints[name] = value
|
||||
if not include_extras and hasattr(typing, '_strip_annotations'):
|
||||
return {
|
||||
k: typing._strip_annotations(t) # type: ignore
|
||||
for k, t in hints.items()
|
||||
}
|
||||
else:
|
||||
return hints
|
||||
|
||||
if globalns is None:
|
||||
if isinstance(obj, types.ModuleType):
|
||||
globalns = obj.__dict__
|
||||
else:
|
||||
nsobj = obj
|
||||
# Find globalns for the unwrapped object.
|
||||
while hasattr(nsobj, '__wrapped__'):
|
||||
nsobj = nsobj.__wrapped__
|
||||
globalns = getattr(nsobj, '__globals__', {})
|
||||
if localns is None:
|
||||
localns = globalns
|
||||
elif localns is None:
|
||||
localns = globalns
|
||||
hints = getattr(obj, '__annotations__', None)
|
||||
if hints is None:
|
||||
# Return empty annotations for something that _could_ have them.
|
||||
if isinstance(obj, typing._allowed_types): # type: ignore
|
||||
return {}
|
||||
else:
|
||||
raise TypeError(f'{obj!r} is not a module, class, method, ' 'or function.')
|
||||
defaults = typing._get_defaults(obj) # type: ignore
|
||||
hints = dict(hints)
|
||||
for name, value in hints.items():
|
||||
if value is None:
|
||||
value = type(None)
|
||||
if isinstance(value, str):
|
||||
# class-level forward refs were handled above, this must be either
|
||||
# a module-level annotation or a function argument annotation
|
||||
|
||||
value = _make_forward_ref(
|
||||
value,
|
||||
is_argument=not isinstance(obj, types.ModuleType),
|
||||
is_class=False,
|
||||
)
|
||||
value = eval_type_backport(value, globalns, localns)
|
||||
if name in defaults and defaults[name] is None:
|
||||
value = typing.Optional[value]
|
||||
hints[name] = value
|
||||
return hints if include_extras else {k: typing._strip_annotations(t) for k, t in hints.items()} # type: ignore
|
||||
|
||||
|
||||
def is_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]:
|
||||
# The dataclasses.is_dataclass function doesn't seem to provide TypeGuard functionality,
|
||||
# so I created this convenience function
|
||||
return dataclasses.is_dataclass(_cls)
|
||||
|
||||
|
||||
def origin_is_type_alias_type(origin: Any) -> TypeGuard[TypeAliasType]:
|
||||
return isinstance(origin, TypeAliasType)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
|
||||
def is_generic_alias(type_: type[Any]) -> bool:
|
||||
return isinstance(type_, (types.GenericAlias, typing._GenericAlias)) # type: ignore[attr-defined]
|
||||
|
||||
else:
|
||||
|
||||
def is_generic_alias(type_: type[Any]) -> bool:
|
||||
return isinstance(type_, typing._GenericAlias) # type: ignore
|
362
lib/pydantic/_internal/_utils.py
Normal file
362
lib/pydantic/_internal/_utils.py
Normal file
|
@ -0,0 +1,362 @@
|
|||
"""Bucket of reusable internal utilities.
|
||||
|
||||
This should be reduced as much as possible with functions only used in one place, moved to that place.
|
||||
"""
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import dataclasses
|
||||
import keyword
|
||||
import typing
|
||||
import weakref
|
||||
from collections import OrderedDict, defaultdict, deque
|
||||
from copy import deepcopy
|
||||
from itertools import zip_longest
|
||||
from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType
|
||||
from typing import Any, Mapping, TypeVar
|
||||
|
||||
from typing_extensions import TypeAlias, TypeGuard
|
||||
|
||||
from . import _repr, _typing_extra
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
MappingIntStrAny: TypeAlias = 'typing.Mapping[int, Any] | typing.Mapping[str, Any]'
|
||||
AbstractSetIntStr: TypeAlias = 'typing.AbstractSet[int] | typing.AbstractSet[str]'
|
||||
from ..main import BaseModel
|
||||
|
||||
|
||||
# these are types that are returned unchanged by deepcopy
|
||||
IMMUTABLE_NON_COLLECTIONS_TYPES: set[type[Any]] = {
|
||||
int,
|
||||
float,
|
||||
complex,
|
||||
str,
|
||||
bool,
|
||||
bytes,
|
||||
type,
|
||||
_typing_extra.NoneType,
|
||||
FunctionType,
|
||||
BuiltinFunctionType,
|
||||
LambdaType,
|
||||
weakref.ref,
|
||||
CodeType,
|
||||
# note: including ModuleType will differ from behaviour of deepcopy by not producing error.
|
||||
# It might be not a good idea in general, but considering that this function used only internally
|
||||
# against default values of fields, this will allow to actually have a field with module as default value
|
||||
ModuleType,
|
||||
NotImplemented.__class__,
|
||||
Ellipsis.__class__,
|
||||
}
|
||||
|
||||
# these are types that if empty, might be copied with simple copy() instead of deepcopy()
|
||||
BUILTIN_COLLECTIONS: set[type[Any]] = {
|
||||
list,
|
||||
set,
|
||||
tuple,
|
||||
frozenset,
|
||||
dict,
|
||||
OrderedDict,
|
||||
defaultdict,
|
||||
deque,
|
||||
}
|
||||
|
||||
|
||||
def sequence_like(v: Any) -> bool:
|
||||
return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))
|
||||
|
||||
|
||||
def lenient_isinstance(o: Any, class_or_tuple: type[Any] | tuple[type[Any], ...] | None) -> bool: # pragma: no cover
|
||||
try:
|
||||
return isinstance(o, class_or_tuple) # type: ignore[arg-type]
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
def lenient_issubclass(cls: Any, class_or_tuple: Any) -> bool: # pragma: no cover
|
||||
try:
|
||||
return isinstance(cls, type) and issubclass(cls, class_or_tuple)
|
||||
except TypeError:
|
||||
if isinstance(cls, _typing_extra.WithArgsTypes):
|
||||
return False
|
||||
raise # pragma: no cover
|
||||
|
||||
|
||||
def is_model_class(cls: Any) -> TypeGuard[type[BaseModel]]:
|
||||
"""Returns true if cls is a _proper_ subclass of BaseModel, and provides proper type-checking,
|
||||
unlike raw calls to lenient_issubclass.
|
||||
"""
|
||||
from ..main import BaseModel
|
||||
|
||||
return lenient_issubclass(cls, BaseModel) and cls is not BaseModel
|
||||
|
||||
|
||||
def is_valid_identifier(identifier: str) -> bool:
|
||||
"""Checks that a string is a valid identifier and not a Python keyword.
|
||||
:param identifier: The identifier to test.
|
||||
:return: True if the identifier is valid.
|
||||
"""
|
||||
return identifier.isidentifier() and not keyword.iskeyword(identifier)
|
||||
|
||||
|
||||
KeyType = TypeVar('KeyType')
|
||||
|
||||
|
||||
def deep_update(mapping: dict[KeyType, Any], *updating_mappings: dict[KeyType, Any]) -> dict[KeyType, Any]:
|
||||
updated_mapping = mapping.copy()
|
||||
for updating_mapping in updating_mappings:
|
||||
for k, v in updating_mapping.items():
|
||||
if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):
|
||||
updated_mapping[k] = deep_update(updated_mapping[k], v)
|
||||
else:
|
||||
updated_mapping[k] = v
|
||||
return updated_mapping
|
||||
|
||||
|
||||
def update_not_none(mapping: dict[Any, Any], **update: Any) -> None:
|
||||
mapping.update({k: v for k, v in update.items() if v is not None})
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def unique_list(
|
||||
input_list: list[T] | tuple[T, ...],
|
||||
*,
|
||||
name_factory: typing.Callable[[T], str] = str,
|
||||
) -> list[T]:
|
||||
"""Make a list unique while maintaining order.
|
||||
We update the list if another one with the same name is set
|
||||
(e.g. model validator overridden in subclass).
|
||||
"""
|
||||
result: list[T] = []
|
||||
result_names: list[str] = []
|
||||
for v in input_list:
|
||||
v_name = name_factory(v)
|
||||
if v_name not in result_names:
|
||||
result_names.append(v_name)
|
||||
result.append(v)
|
||||
else:
|
||||
result[result_names.index(v_name)] = v
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class ValueItems(_repr.Representation):
|
||||
"""Class for more convenient calculation of excluded or included fields on values."""
|
||||
|
||||
__slots__ = ('_items', '_type')
|
||||
|
||||
def __init__(self, value: Any, items: AbstractSetIntStr | MappingIntStrAny) -> None:
|
||||
items = self._coerce_items(items)
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
items = self._normalize_indexes(items, len(value)) # type: ignore
|
||||
|
||||
self._items: MappingIntStrAny = items # type: ignore
|
||||
|
||||
def is_excluded(self, item: Any) -> bool:
|
||||
"""Check if item is fully excluded.
|
||||
|
||||
:param item: key or index of a value
|
||||
"""
|
||||
return self.is_true(self._items.get(item))
|
||||
|
||||
def is_included(self, item: Any) -> bool:
|
||||
"""Check if value is contained in self._items.
|
||||
|
||||
:param item: key or index of value
|
||||
"""
|
||||
return item in self._items
|
||||
|
||||
def for_element(self, e: int | str) -> AbstractSetIntStr | MappingIntStrAny | None:
|
||||
""":param e: key or index of element on value
|
||||
:return: raw values for element if self._items is dict and contain needed element
|
||||
"""
|
||||
item = self._items.get(e) # type: ignore
|
||||
return item if not self.is_true(item) else None
|
||||
|
||||
def _normalize_indexes(self, items: MappingIntStrAny, v_length: int) -> dict[int | str, Any]:
|
||||
""":param items: dict or set of indexes which will be normalized
|
||||
:param v_length: length of sequence indexes of which will be
|
||||
|
||||
>>> self._normalize_indexes({0: True, -2: True, -1: True}, 4)
|
||||
{0: True, 2: True, 3: True}
|
||||
>>> self._normalize_indexes({'__all__': True}, 4)
|
||||
{0: True, 1: True, 2: True, 3: True}
|
||||
"""
|
||||
normalized_items: dict[int | str, Any] = {}
|
||||
all_items = None
|
||||
for i, v in items.items():
|
||||
if not (isinstance(v, typing.Mapping) or isinstance(v, typing.AbstractSet) or self.is_true(v)):
|
||||
raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}')
|
||||
if i == '__all__':
|
||||
all_items = self._coerce_value(v)
|
||||
continue
|
||||
if not isinstance(i, int):
|
||||
raise TypeError(
|
||||
'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '
|
||||
'expected integer keys or keyword "__all__"'
|
||||
)
|
||||
normalized_i = v_length + i if i < 0 else i
|
||||
normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i))
|
||||
|
||||
if not all_items:
|
||||
return normalized_items
|
||||
if self.is_true(all_items):
|
||||
for i in range(v_length):
|
||||
normalized_items.setdefault(i, ...)
|
||||
return normalized_items
|
||||
for i in range(v_length):
|
||||
normalized_item = normalized_items.setdefault(i, {})
|
||||
if not self.is_true(normalized_item):
|
||||
normalized_items[i] = self.merge(all_items, normalized_item)
|
||||
return normalized_items
|
||||
|
||||
@classmethod
|
||||
def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any:
|
||||
"""Merge a `base` item with an `override` item.
|
||||
|
||||
Both `base` and `override` are converted to dictionaries if possible.
|
||||
Sets are converted to dictionaries with the sets entries as keys and
|
||||
Ellipsis as values.
|
||||
|
||||
Each key-value pair existing in `base` is merged with `override`,
|
||||
while the rest of the key-value pairs are updated recursively with this function.
|
||||
|
||||
Merging takes place based on the "union" of keys if `intersect` is
|
||||
set to `False` (default) and on the intersection of keys if
|
||||
`intersect` is set to `True`.
|
||||
"""
|
||||
override = cls._coerce_value(override)
|
||||
base = cls._coerce_value(base)
|
||||
if override is None:
|
||||
return base
|
||||
if cls.is_true(base) or base is None:
|
||||
return override
|
||||
if cls.is_true(override):
|
||||
return base if intersect else override
|
||||
|
||||
# intersection or union of keys while preserving ordering:
|
||||
if intersect:
|
||||
merge_keys = [k for k in base if k in override] + [k for k in override if k in base]
|
||||
else:
|
||||
merge_keys = list(base) + [k for k in override if k not in base]
|
||||
|
||||
merged: dict[int | str, Any] = {}
|
||||
for k in merge_keys:
|
||||
merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect)
|
||||
if merged_item is not None:
|
||||
merged[k] = merged_item
|
||||
|
||||
return merged
|
||||
|
||||
@staticmethod
|
||||
def _coerce_items(items: AbstractSetIntStr | MappingIntStrAny) -> MappingIntStrAny:
|
||||
if isinstance(items, typing.Mapping):
|
||||
pass
|
||||
elif isinstance(items, typing.AbstractSet):
|
||||
items = dict.fromkeys(items, ...) # type: ignore
|
||||
else:
|
||||
class_name = getattr(items, '__class__', '???')
|
||||
raise TypeError(f'Unexpected type of exclude value {class_name}')
|
||||
return items # type: ignore
|
||||
|
||||
@classmethod
|
||||
def _coerce_value(cls, value: Any) -> Any:
|
||||
if value is None or cls.is_true(value):
|
||||
return value
|
||||
return cls._coerce_items(value)
|
||||
|
||||
@staticmethod
|
||||
def is_true(v: Any) -> bool:
|
||||
return v is True or v is ...
|
||||
|
||||
def __repr_args__(self) -> _repr.ReprArgs:
|
||||
return [(None, self._items)]
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
|
||||
def ClassAttribute(name: str, value: T) -> T:
|
||||
...
|
||||
|
||||
else:
|
||||
|
||||
class ClassAttribute:
|
||||
"""Hide class attribute from its instances."""
|
||||
|
||||
__slots__ = 'name', 'value'
|
||||
|
||||
def __init__(self, name: str, value: Any) -> None:
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
def __get__(self, instance: Any, owner: type[Any]) -> None:
|
||||
if instance is None:
|
||||
return self.value
|
||||
raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')
|
||||
|
||||
|
||||
Obj = TypeVar('Obj')
|
||||
|
||||
|
||||
def smart_deepcopy(obj: Obj) -> Obj:
|
||||
"""Return type as is for immutable built-in types
|
||||
Use obj.copy() for built-in empty collections
|
||||
Use copy.deepcopy() for non-empty collections and unknown objects.
|
||||
"""
|
||||
obj_type = obj.__class__
|
||||
if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:
|
||||
return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway
|
||||
try:
|
||||
if not obj and obj_type in BUILTIN_COLLECTIONS:
|
||||
# faster way for empty collections, no need to copy its members
|
||||
return obj if obj_type is tuple else obj.copy() # tuple doesn't have copy method # type: ignore
|
||||
except (TypeError, ValueError, RuntimeError):
|
||||
# do we really dare to catch ALL errors? Seems a bit risky
|
||||
pass
|
||||
|
||||
return deepcopy(obj) # slowest way when we actually might need a deepcopy
|
||||
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
|
||||
def all_identical(left: typing.Iterable[Any], right: typing.Iterable[Any]) -> bool:
|
||||
"""Check that the items of `left` are the same objects as those in `right`.
|
||||
|
||||
>>> a, b = object(), object()
|
||||
>>> all_identical([a, b, a], [a, b, a])
|
||||
True
|
||||
>>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical"
|
||||
False
|
||||
"""
|
||||
for left_item, right_item in zip_longest(left, right, fillvalue=_SENTINEL):
|
||||
if left_item is not right_item:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class SafeGetItemProxy:
|
||||
"""Wrapper redirecting `__getitem__` to `get` with a sentinel value as default
|
||||
|
||||
This makes is safe to use in `operator.itemgetter` when some keys may be missing
|
||||
"""
|
||||
|
||||
# Define __slots__manually for performances
|
||||
# @dataclasses.dataclass() only support slots=True in python>=3.10
|
||||
__slots__ = ('wrapped',)
|
||||
|
||||
wrapped: Mapping[str, Any]
|
||||
|
||||
def __getitem__(self, __key: str) -> Any:
|
||||
return self.wrapped.get(__key, _SENTINEL)
|
||||
|
||||
# required to pass the object to operator.itemgetter() instances due to a quirk of typeshed
|
||||
# https://github.com/python/mypy/issues/13713
|
||||
# https://github.com/python/typeshed/pull/8785
|
||||
# Since this is typing-only, hide it in a typing.TYPE_CHECKING block
|
||||
if typing.TYPE_CHECKING:
|
||||
|
||||
def __contains__(self, __key: str) -> bool:
|
||||
return self.wrapped.__contains__(__key)
|
84
lib/pydantic/_internal/_validate_call.py
Normal file
84
lib/pydantic/_internal/_validate_call.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
from __future__ import annotations as _annotations
|
||||
|
||||
import inspect
|
||||
from functools import partial
|
||||
from typing import Any, Awaitable, Callable
|
||||
|
||||
import pydantic_core
|
||||
|
||||
from ..config import ConfigDict
|
||||
from ..plugin._schema_validator import create_schema_validator
|
||||
from . import _generate_schema, _typing_extra
|
||||
from ._config import ConfigWrapper
|
||||
|
||||
|
||||
class ValidateCallWrapper:
|
||||
"""This is a wrapper around a function that validates the arguments passed to it, and optionally the return value."""
|
||||
|
||||
__slots__ = (
|
||||
'__pydantic_validator__',
|
||||
'__name__',
|
||||
'__qualname__',
|
||||
'__annotations__',
|
||||
'__dict__', # required for __module__
|
||||
)
|
||||
|
||||
def __init__(self, function: Callable[..., Any], config: ConfigDict | None, validate_return: bool):
|
||||
if isinstance(function, partial):
|
||||
func = function.func
|
||||
schema_type = func
|
||||
self.__name__ = f'partial({func.__name__})'
|
||||
self.__qualname__ = f'partial({func.__qualname__})'
|
||||
self.__module__ = func.__module__
|
||||
else:
|
||||
schema_type = function
|
||||
self.__name__ = function.__name__
|
||||
self.__qualname__ = function.__qualname__
|
||||
self.__module__ = function.__module__
|
||||
|
||||
namespace = _typing_extra.add_module_globals(function, None)
|
||||
config_wrapper = ConfigWrapper(config)
|
||||
gen_schema = _generate_schema.GenerateSchema(config_wrapper, namespace)
|
||||
schema = gen_schema.clean_schema(gen_schema.generate_schema(function))
|
||||
core_config = config_wrapper.core_config(self)
|
||||
|
||||
self.__pydantic_validator__ = create_schema_validator(
|
||||
schema,
|
||||
schema_type,
|
||||
self.__module__,
|
||||
self.__qualname__,
|
||||
'validate_call',
|
||||
core_config,
|
||||
config_wrapper.plugin_settings,
|
||||
)
|
||||
|
||||
if validate_return:
|
||||
signature = inspect.signature(function)
|
||||
return_type = signature.return_annotation if signature.return_annotation is not signature.empty else Any
|
||||
gen_schema = _generate_schema.GenerateSchema(config_wrapper, namespace)
|
||||
schema = gen_schema.clean_schema(gen_schema.generate_schema(return_type))
|
||||
validator = create_schema_validator(
|
||||
schema,
|
||||
schema_type,
|
||||
self.__module__,
|
||||
self.__qualname__,
|
||||
'validate_call',
|
||||
core_config,
|
||||
config_wrapper.plugin_settings,
|
||||
)
|
||||
if inspect.iscoroutinefunction(function):
|
||||
|
||||
async def return_val_wrapper(aw: Awaitable[Any]) -> None:
|
||||
return validator.validate_python(await aw)
|
||||
|
||||
self.__return_pydantic_validator__ = return_val_wrapper
|
||||
else:
|
||||
self.__return_pydantic_validator__ = validator.validate_python
|
||||
else:
|
||||
self.__return_pydantic_validator__ = None
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
||||
res = self.__pydantic_validator__.validate_python(pydantic_core.ArgsKwargs(args, kwargs))
|
||||
if self.__return_pydantic_validator__:
|
||||
return self.__return_pydantic_validator__(res)
|
||||
return res
|
278
lib/pydantic/_internal/_validators.py
Normal file
278
lib/pydantic/_internal/_validators.py
Normal file
|
@ -0,0 +1,278 @@
|
|||
"""Validator functions for standard library types.
|
||||
|
||||
Import of this module is deferred since it contains imports of many standard library modules.
|
||||
"""
|
||||
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
import math
|
||||
import re
|
||||
import typing
|
||||
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
|
||||
from typing import Any
|
||||
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
from pydantic_core._pydantic_core import PydanticKnownError
|
||||
|
||||
|
||||
def sequence_validator(
|
||||
__input_value: typing.Sequence[Any],
|
||||
validator: core_schema.ValidatorFunctionWrapHandler,
|
||||
) -> typing.Sequence[Any]:
|
||||
"""Validator for `Sequence` types, isinstance(v, Sequence) has already been called."""
|
||||
value_type = type(__input_value)
|
||||
|
||||
# We don't accept any plain string as a sequence
|
||||
# Relevant issue: https://github.com/pydantic/pydantic/issues/5595
|
||||
if issubclass(value_type, (str, bytes)):
|
||||
raise PydanticCustomError(
|
||||
'sequence_str',
|
||||
"'{type_name}' instances are not allowed as a Sequence value",
|
||||
{'type_name': value_type.__name__},
|
||||
)
|
||||
|
||||
v_list = validator(__input_value)
|
||||
|
||||
# the rest of the logic is just re-creating the original type from `v_list`
|
||||
if value_type == list:
|
||||
return v_list
|
||||
elif issubclass(value_type, range):
|
||||
# return the list as we probably can't re-create the range
|
||||
return v_list
|
||||
else:
|
||||
# best guess at how to re-create the original type, more custom construction logic might be required
|
||||
return value_type(v_list) # type: ignore[call-arg]
|
||||
|
||||
|
||||
def import_string(value: Any) -> Any:
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
return _import_string_logic(value)
|
||||
except ImportError as e:
|
||||
raise PydanticCustomError('import_error', 'Invalid python path: {error}', {'error': str(e)}) from e
|
||||
else:
|
||||
# otherwise we just return the value and let the next validator do the rest of the work
|
||||
return value
|
||||
|
||||
|
||||
def _import_string_logic(dotted_path: str) -> Any:
|
||||
"""Inspired by uvicorn — dotted paths should include a colon before the final item if that item is not a module.
|
||||
(This is necessary to distinguish between a submodule and an attribute when there is a conflict.).
|
||||
|
||||
If the dotted path does not include a colon and the final item is not a valid module, importing as an attribute
|
||||
rather than a submodule will be attempted automatically.
|
||||
|
||||
So, for example, the following values of `dotted_path` result in the following returned values:
|
||||
* 'collections': <module 'collections'>
|
||||
* 'collections.abc': <module 'collections.abc'>
|
||||
* 'collections.abc:Mapping': <class 'collections.abc.Mapping'>
|
||||
* `collections.abc.Mapping`: <class 'collections.abc.Mapping'> (though this is a bit slower than the previous line)
|
||||
|
||||
An error will be raised under any of the following scenarios:
|
||||
* `dotted_path` contains more than one colon (e.g., 'collections:abc:Mapping')
|
||||
* the substring of `dotted_path` before the colon is not a valid module in the environment (e.g., '123:Mapping')
|
||||
* the substring of `dotted_path` after the colon is not an attribute of the module (e.g., 'collections:abc123')
|
||||
"""
|
||||
from importlib import import_module
|
||||
|
||||
components = dotted_path.strip().split(':')
|
||||
if len(components) > 2:
|
||||
raise ImportError(f"Import strings should have at most one ':'; received {dotted_path!r}")
|
||||
|
||||
module_path = components[0]
|
||||
if not module_path:
|
||||
raise ImportError(f'Import strings should have a nonempty module name; received {dotted_path!r}')
|
||||
|
||||
try:
|
||||
module = import_module(module_path)
|
||||
except ModuleNotFoundError as e:
|
||||
if '.' in module_path:
|
||||
# Check if it would be valid if the final item was separated from its module with a `:`
|
||||
maybe_module_path, maybe_attribute = dotted_path.strip().rsplit('.', 1)
|
||||
try:
|
||||
return _import_string_logic(f'{maybe_module_path}:{maybe_attribute}')
|
||||
except ImportError:
|
||||
pass
|
||||
raise ImportError(f'No module named {module_path!r}') from e
|
||||
raise e
|
||||
|
||||
if len(components) > 1:
|
||||
attribute = components[1]
|
||||
try:
|
||||
return getattr(module, attribute)
|
||||
except AttributeError as e:
|
||||
raise ImportError(f'cannot import name {attribute!r} from {module_path!r}') from e
|
||||
else:
|
||||
return module
|
||||
|
||||
|
||||
def pattern_either_validator(__input_value: Any) -> typing.Pattern[Any]:
|
||||
if isinstance(__input_value, typing.Pattern):
|
||||
return __input_value
|
||||
elif isinstance(__input_value, (str, bytes)):
|
||||
# todo strict mode
|
||||
return compile_pattern(__input_value) # type: ignore
|
||||
else:
|
||||
raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
|
||||
|
||||
|
||||
def pattern_str_validator(__input_value: Any) -> typing.Pattern[str]:
|
||||
if isinstance(__input_value, typing.Pattern):
|
||||
if isinstance(__input_value.pattern, str):
|
||||
return __input_value
|
||||
else:
|
||||
raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern')
|
||||
elif isinstance(__input_value, str):
|
||||
return compile_pattern(__input_value)
|
||||
elif isinstance(__input_value, bytes):
|
||||
raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern')
|
||||
else:
|
||||
raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
|
||||
|
||||
|
||||
def pattern_bytes_validator(__input_value: Any) -> typing.Pattern[bytes]:
|
||||
if isinstance(__input_value, typing.Pattern):
|
||||
if isinstance(__input_value.pattern, bytes):
|
||||
return __input_value
|
||||
else:
|
||||
raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern')
|
||||
elif isinstance(__input_value, bytes):
|
||||
return compile_pattern(__input_value)
|
||||
elif isinstance(__input_value, str):
|
||||
raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern')
|
||||
else:
|
||||
raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
|
||||
|
||||
|
||||
PatternType = typing.TypeVar('PatternType', str, bytes)
|
||||
|
||||
|
||||
def compile_pattern(pattern: PatternType) -> typing.Pattern[PatternType]:
|
||||
try:
|
||||
return re.compile(pattern)
|
||||
except re.error:
|
||||
raise PydanticCustomError('pattern_regex', 'Input should be a valid regular expression')
|
||||
|
||||
|
||||
def ip_v4_address_validator(__input_value: Any) -> IPv4Address:
|
||||
if isinstance(__input_value, IPv4Address):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv4Address(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v4_address', 'Input is not a valid IPv4 address')
|
||||
|
||||
|
||||
def ip_v6_address_validator(__input_value: Any) -> IPv6Address:
|
||||
if isinstance(__input_value, IPv6Address):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv6Address(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v6_address', 'Input is not a valid IPv6 address')
|
||||
|
||||
|
||||
def ip_v4_network_validator(__input_value: Any) -> IPv4Network:
|
||||
"""Assume IPv4Network initialised with a default `strict` argument.
|
||||
|
||||
See more:
|
||||
https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network
|
||||
"""
|
||||
if isinstance(__input_value, IPv4Network):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv4Network(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v4_network', 'Input is not a valid IPv4 network')
|
||||
|
||||
|
||||
def ip_v6_network_validator(__input_value: Any) -> IPv6Network:
|
||||
"""Assume IPv6Network initialised with a default `strict` argument.
|
||||
|
||||
See more:
|
||||
https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network
|
||||
"""
|
||||
if isinstance(__input_value, IPv6Network):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv6Network(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v6_network', 'Input is not a valid IPv6 network')
|
||||
|
||||
|
||||
def ip_v4_interface_validator(__input_value: Any) -> IPv4Interface:
|
||||
if isinstance(__input_value, IPv4Interface):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv4Interface(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v4_interface', 'Input is not a valid IPv4 interface')
|
||||
|
||||
|
||||
def ip_v6_interface_validator(__input_value: Any) -> IPv6Interface:
|
||||
if isinstance(__input_value, IPv6Interface):
|
||||
return __input_value
|
||||
|
||||
try:
|
||||
return IPv6Interface(__input_value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ip_v6_interface', 'Input is not a valid IPv6 interface')
|
||||
|
||||
|
||||
def greater_than_validator(x: Any, gt: Any) -> Any:
|
||||
if not (x > gt):
|
||||
raise PydanticKnownError('greater_than', {'gt': gt})
|
||||
return x
|
||||
|
||||
|
||||
def greater_than_or_equal_validator(x: Any, ge: Any) -> Any:
|
||||
if not (x >= ge):
|
||||
raise PydanticKnownError('greater_than_equal', {'ge': ge})
|
||||
return x
|
||||
|
||||
|
||||
def less_than_validator(x: Any, lt: Any) -> Any:
|
||||
if not (x < lt):
|
||||
raise PydanticKnownError('less_than', {'lt': lt})
|
||||
return x
|
||||
|
||||
|
||||
def less_than_or_equal_validator(x: Any, le: Any) -> Any:
|
||||
if not (x <= le):
|
||||
raise PydanticKnownError('less_than_equal', {'le': le})
|
||||
return x
|
||||
|
||||
|
||||
def multiple_of_validator(x: Any, multiple_of: Any) -> Any:
|
||||
if not (x % multiple_of == 0):
|
||||
raise PydanticKnownError('multiple_of', {'multiple_of': multiple_of})
|
||||
return x
|
||||
|
||||
|
||||
def min_length_validator(x: Any, min_length: Any) -> Any:
|
||||
if not (len(x) >= min_length):
|
||||
raise PydanticKnownError(
|
||||
'too_short',
|
||||
{'field_type': 'Value', 'min_length': min_length, 'actual_length': len(x)},
|
||||
)
|
||||
return x
|
||||
|
||||
|
||||
def max_length_validator(x: Any, max_length: Any) -> Any:
|
||||
if len(x) > max_length:
|
||||
raise PydanticKnownError(
|
||||
'too_long',
|
||||
{'field_type': 'Value', 'max_length': max_length, 'actual_length': len(x)},
|
||||
)
|
||||
return x
|
||||
|
||||
|
||||
def forbid_inf_nan_check(x: Any) -> Any:
|
||||
if not math.isfinite(x):
|
||||
raise PydanticKnownError('finite_number')
|
||||
return x
|
Loading…
Add table
Add a link
Reference in a new issue