Nelze vybrat více než 25 témat Téma musí začínat písmenem nebo číslem, může obsahovat pomlčky („-“) a může být dlouhé až 35 znaků.
 
 
 
 

665 řádky
24 KiB

  1. from collections import deque
  2. from copy import copy
  3. from dataclasses import dataclass, is_dataclass
  4. from enum import Enum
  5. from functools import lru_cache
  6. from typing import (
  7. Any,
  8. Callable,
  9. Deque,
  10. Dict,
  11. FrozenSet,
  12. List,
  13. Mapping,
  14. Sequence,
  15. Set,
  16. Tuple,
  17. Type,
  18. Union,
  19. cast,
  20. )
  21. from fastapi.exceptions import RequestErrorModel
  22. from fastapi.types import IncEx, ModelNameMap, UnionType
  23. from pydantic import BaseModel, create_model
  24. from pydantic.version import VERSION as PYDANTIC_VERSION
  25. from starlette.datastructures import UploadFile
  26. from typing_extensions import Annotated, Literal, get_args, get_origin
  27. PYDANTIC_VERSION_MINOR_TUPLE = tuple(int(x) for x in PYDANTIC_VERSION.split(".")[:2])
  28. PYDANTIC_V2 = PYDANTIC_VERSION_MINOR_TUPLE[0] == 2
  29. sequence_annotation_to_type = {
  30. Sequence: list,
  31. List: list,
  32. list: list,
  33. Tuple: tuple,
  34. tuple: tuple,
  35. Set: set,
  36. set: set,
  37. FrozenSet: frozenset,
  38. frozenset: frozenset,
  39. Deque: deque,
  40. deque: deque,
  41. }
  42. sequence_types = tuple(sequence_annotation_to_type.keys())
  43. Url: Type[Any]
  44. if PYDANTIC_V2:
  45. from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError
  46. from pydantic import TypeAdapter
  47. from pydantic import ValidationError as ValidationError
  48. from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined]
  49. GetJsonSchemaHandler as GetJsonSchemaHandler,
  50. )
  51. from pydantic._internal._typing_extra import eval_type_lenient
  52. from pydantic._internal._utils import lenient_issubclass as lenient_issubclass
  53. from pydantic.fields import FieldInfo
  54. from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema
  55. from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue
  56. from pydantic_core import CoreSchema as CoreSchema
  57. from pydantic_core import PydanticUndefined, PydanticUndefinedType
  58. from pydantic_core import Url as Url
  59. try:
  60. from pydantic_core.core_schema import (
  61. with_info_plain_validator_function as with_info_plain_validator_function,
  62. )
  63. except ImportError: # pragma: no cover
  64. from pydantic_core.core_schema import (
  65. general_plain_validator_function as with_info_plain_validator_function, # noqa: F401
  66. )
  67. RequiredParam = PydanticUndefined
  68. Undefined = PydanticUndefined
  69. UndefinedType = PydanticUndefinedType
  70. evaluate_forwardref = eval_type_lenient
  71. Validator = Any
  72. class BaseConfig:
  73. pass
  74. class ErrorWrapper(Exception):
  75. pass
  76. @dataclass
  77. class ModelField:
  78. field_info: FieldInfo
  79. name: str
  80. mode: Literal["validation", "serialization"] = "validation"
  81. @property
  82. def alias(self) -> str:
  83. a = self.field_info.alias
  84. return a if a is not None else self.name
  85. @property
  86. def required(self) -> bool:
  87. return self.field_info.is_required()
  88. @property
  89. def default(self) -> Any:
  90. return self.get_default()
  91. @property
  92. def type_(self) -> Any:
  93. return self.field_info.annotation
  94. def __post_init__(self) -> None:
  95. self._type_adapter: TypeAdapter[Any] = TypeAdapter(
  96. Annotated[self.field_info.annotation, self.field_info]
  97. )
  98. def get_default(self) -> Any:
  99. if self.field_info.is_required():
  100. return Undefined
  101. return self.field_info.get_default(call_default_factory=True)
  102. def validate(
  103. self,
  104. value: Any,
  105. values: Dict[str, Any] = {}, # noqa: B006
  106. *,
  107. loc: Tuple[Union[int, str], ...] = (),
  108. ) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
  109. try:
  110. return (
  111. self._type_adapter.validate_python(value, from_attributes=True),
  112. None,
  113. )
  114. except ValidationError as exc:
  115. return None, _regenerate_error_with_loc(
  116. errors=exc.errors(include_url=False), loc_prefix=loc
  117. )
  118. def serialize(
  119. self,
  120. value: Any,
  121. *,
  122. mode: Literal["json", "python"] = "json",
  123. include: Union[IncEx, None] = None,
  124. exclude: Union[IncEx, None] = None,
  125. by_alias: bool = True,
  126. exclude_unset: bool = False,
  127. exclude_defaults: bool = False,
  128. exclude_none: bool = False,
  129. ) -> Any:
  130. # What calls this code passes a value that already called
  131. # self._type_adapter.validate_python(value)
  132. return self._type_adapter.dump_python(
  133. value,
  134. mode=mode,
  135. include=include,
  136. exclude=exclude,
  137. by_alias=by_alias,
  138. exclude_unset=exclude_unset,
  139. exclude_defaults=exclude_defaults,
  140. exclude_none=exclude_none,
  141. )
  142. def __hash__(self) -> int:
  143. # Each ModelField is unique for our purposes, to allow making a dict from
  144. # ModelField to its JSON Schema.
  145. return id(self)
  146. def get_annotation_from_field_info(
  147. annotation: Any, field_info: FieldInfo, field_name: str
  148. ) -> Any:
  149. return annotation
  150. def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]:
  151. return errors # type: ignore[return-value]
  152. def _model_rebuild(model: Type[BaseModel]) -> None:
  153. model.model_rebuild()
  154. def _model_dump(
  155. model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
  156. ) -> Any:
  157. return model.model_dump(mode=mode, **kwargs)
  158. def _get_model_config(model: BaseModel) -> Any:
  159. return model.model_config
  160. def get_schema_from_model_field(
  161. *,
  162. field: ModelField,
  163. schema_generator: GenerateJsonSchema,
  164. model_name_map: ModelNameMap,
  165. field_mapping: Dict[
  166. Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
  167. ],
  168. separate_input_output_schemas: bool = True,
  169. ) -> Dict[str, Any]:
  170. override_mode: Union[Literal["validation"], None] = (
  171. None if separate_input_output_schemas else "validation"
  172. )
  173. # This expects that GenerateJsonSchema was already used to generate the definitions
  174. json_schema = field_mapping[(field, override_mode or field.mode)]
  175. if "$ref" not in json_schema:
  176. # TODO remove when deprecating Pydantic v1
  177. # Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207
  178. json_schema["title"] = (
  179. field.field_info.title or field.alias.title().replace("_", " ")
  180. )
  181. return json_schema
  182. def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
  183. return {}
  184. def get_definitions(
  185. *,
  186. fields: List[ModelField],
  187. schema_generator: GenerateJsonSchema,
  188. model_name_map: ModelNameMap,
  189. separate_input_output_schemas: bool = True,
  190. ) -> Tuple[
  191. Dict[
  192. Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
  193. ],
  194. Dict[str, Dict[str, Any]],
  195. ]:
  196. override_mode: Union[Literal["validation"], None] = (
  197. None if separate_input_output_schemas else "validation"
  198. )
  199. inputs = [
  200. (field, override_mode or field.mode, field._type_adapter.core_schema)
  201. for field in fields
  202. ]
  203. field_mapping, definitions = schema_generator.generate_definitions(
  204. inputs=inputs
  205. )
  206. for item_def in cast(Dict[str, Dict[str, Any]], definitions).values():
  207. if "description" in item_def:
  208. item_description = cast(str, item_def["description"]).split("\f")[0]
  209. item_def["description"] = item_description
  210. return field_mapping, definitions # type: ignore[return-value]
  211. def is_scalar_field(field: ModelField) -> bool:
  212. from fastapi import params
  213. return field_annotation_is_scalar(
  214. field.field_info.annotation
  215. ) and not isinstance(field.field_info, params.Body)
  216. def is_sequence_field(field: ModelField) -> bool:
  217. return field_annotation_is_sequence(field.field_info.annotation)
  218. def is_scalar_sequence_field(field: ModelField) -> bool:
  219. return field_annotation_is_scalar_sequence(field.field_info.annotation)
  220. def is_bytes_field(field: ModelField) -> bool:
  221. return is_bytes_or_nonable_bytes_annotation(field.type_)
  222. def is_bytes_sequence_field(field: ModelField) -> bool:
  223. return is_bytes_sequence_annotation(field.type_)
  224. def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
  225. cls = type(field_info)
  226. merged_field_info = cls.from_annotation(annotation)
  227. new_field_info = copy(field_info)
  228. new_field_info.metadata = merged_field_info.metadata
  229. new_field_info.annotation = merged_field_info.annotation
  230. return new_field_info
  231. def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
  232. origin_type = (
  233. get_origin(field.field_info.annotation) or field.field_info.annotation
  234. )
  235. assert issubclass(origin_type, sequence_types) # type: ignore[arg-type]
  236. return sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return]
  237. def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
  238. error = ValidationError.from_exception_data(
  239. "Field required", [{"type": "missing", "loc": loc, "input": {}}]
  240. ).errors(include_url=False)[0]
  241. error["input"] = None
  242. return error # type: ignore[return-value]
  243. def create_body_model(
  244. *, fields: Sequence[ModelField], model_name: str
  245. ) -> Type[BaseModel]:
  246. field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields}
  247. BodyModel: Type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload]
  248. return BodyModel
  249. def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
  250. return [
  251. ModelField(field_info=field_info, name=name)
  252. for name, field_info in model.model_fields.items()
  253. ]
  254. else:
  255. from fastapi.openapi.constants import REF_PREFIX as REF_PREFIX
  256. from pydantic import AnyUrl as Url # noqa: F401
  257. from pydantic import ( # type: ignore[assignment]
  258. BaseConfig as BaseConfig, # noqa: F401
  259. )
  260. from pydantic import ValidationError as ValidationError # noqa: F401
  261. from pydantic.class_validators import ( # type: ignore[no-redef]
  262. Validator as Validator, # noqa: F401
  263. )
  264. from pydantic.error_wrappers import ( # type: ignore[no-redef]
  265. ErrorWrapper as ErrorWrapper, # noqa: F401
  266. )
  267. from pydantic.errors import MissingError
  268. from pydantic.fields import ( # type: ignore[attr-defined]
  269. SHAPE_FROZENSET,
  270. SHAPE_LIST,
  271. SHAPE_SEQUENCE,
  272. SHAPE_SET,
  273. SHAPE_SINGLETON,
  274. SHAPE_TUPLE,
  275. SHAPE_TUPLE_ELLIPSIS,
  276. )
  277. from pydantic.fields import FieldInfo as FieldInfo
  278. from pydantic.fields import ( # type: ignore[no-redef,attr-defined]
  279. ModelField as ModelField, # noqa: F401
  280. )
  281. # Keeping old "Required" functionality from Pydantic V1, without
  282. # shadowing typing.Required.
  283. RequiredParam: Any = Ellipsis # type: ignore[no-redef]
  284. from pydantic.fields import ( # type: ignore[no-redef,attr-defined]
  285. Undefined as Undefined,
  286. )
  287. from pydantic.fields import ( # type: ignore[no-redef, attr-defined]
  288. UndefinedType as UndefinedType, # noqa: F401
  289. )
  290. from pydantic.schema import (
  291. field_schema,
  292. get_flat_models_from_fields,
  293. get_model_name_map,
  294. model_process_schema,
  295. )
  296. from pydantic.schema import ( # type: ignore[no-redef] # noqa: F401
  297. get_annotation_from_field_info as get_annotation_from_field_info,
  298. )
  299. from pydantic.typing import ( # type: ignore[no-redef]
  300. evaluate_forwardref as evaluate_forwardref, # noqa: F401
  301. )
  302. from pydantic.utils import ( # type: ignore[no-redef]
  303. lenient_issubclass as lenient_issubclass, # noqa: F401
  304. )
  305. GetJsonSchemaHandler = Any # type: ignore[assignment,misc]
  306. JsonSchemaValue = Dict[str, Any] # type: ignore[misc]
  307. CoreSchema = Any # type: ignore[assignment,misc]
  308. sequence_shapes = {
  309. SHAPE_LIST,
  310. SHAPE_SET,
  311. SHAPE_FROZENSET,
  312. SHAPE_TUPLE,
  313. SHAPE_SEQUENCE,
  314. SHAPE_TUPLE_ELLIPSIS,
  315. }
  316. sequence_shape_to_type = {
  317. SHAPE_LIST: list,
  318. SHAPE_SET: set,
  319. SHAPE_TUPLE: tuple,
  320. SHAPE_SEQUENCE: list,
  321. SHAPE_TUPLE_ELLIPSIS: list,
  322. }
  323. @dataclass
  324. class GenerateJsonSchema: # type: ignore[no-redef]
  325. ref_template: str
  326. class PydanticSchemaGenerationError(Exception): # type: ignore[no-redef]
  327. pass
  328. def with_info_plain_validator_function( # type: ignore[misc]
  329. function: Callable[..., Any],
  330. *,
  331. ref: Union[str, None] = None,
  332. metadata: Any = None,
  333. serialization: Any = None,
  334. ) -> Any:
  335. return {}
  336. def get_model_definitions(
  337. *,
  338. flat_models: Set[Union[Type[BaseModel], Type[Enum]]],
  339. model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str],
  340. ) -> Dict[str, Any]:
  341. definitions: Dict[str, Dict[str, Any]] = {}
  342. for model in flat_models:
  343. m_schema, m_definitions, m_nested_models = model_process_schema(
  344. model, model_name_map=model_name_map, ref_prefix=REF_PREFIX
  345. )
  346. definitions.update(m_definitions)
  347. model_name = model_name_map[model]
  348. if "description" in m_schema:
  349. m_schema["description"] = m_schema["description"].split("\f")[0]
  350. definitions[model_name] = m_schema
  351. return definitions
  352. def is_pv1_scalar_field(field: ModelField) -> bool:
  353. from fastapi import params
  354. field_info = field.field_info
  355. if not (
  356. field.shape == SHAPE_SINGLETON # type: ignore[attr-defined]
  357. and not lenient_issubclass(field.type_, BaseModel)
  358. and not lenient_issubclass(field.type_, dict)
  359. and not field_annotation_is_sequence(field.type_)
  360. and not is_dataclass(field.type_)
  361. and not isinstance(field_info, params.Body)
  362. ):
  363. return False
  364. if field.sub_fields: # type: ignore[attr-defined]
  365. if not all(
  366. is_pv1_scalar_field(f)
  367. for f in field.sub_fields # type: ignore[attr-defined]
  368. ):
  369. return False
  370. return True
  371. def is_pv1_scalar_sequence_field(field: ModelField) -> bool:
  372. if (field.shape in sequence_shapes) and not lenient_issubclass( # type: ignore[attr-defined]
  373. field.type_, BaseModel
  374. ):
  375. if field.sub_fields is not None: # type: ignore[attr-defined]
  376. for sub_field in field.sub_fields: # type: ignore[attr-defined]
  377. if not is_pv1_scalar_field(sub_field):
  378. return False
  379. return True
  380. if _annotation_is_sequence(field.type_):
  381. return True
  382. return False
  383. def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]:
  384. use_errors: List[Any] = []
  385. for error in errors:
  386. if isinstance(error, ErrorWrapper):
  387. new_errors = ValidationError( # type: ignore[call-arg]
  388. errors=[error], model=RequestErrorModel
  389. ).errors()
  390. use_errors.extend(new_errors)
  391. elif isinstance(error, list):
  392. use_errors.extend(_normalize_errors(error))
  393. else:
  394. use_errors.append(error)
  395. return use_errors
  396. def _model_rebuild(model: Type[BaseModel]) -> None:
  397. model.update_forward_refs()
  398. def _model_dump(
  399. model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
  400. ) -> Any:
  401. return model.dict(**kwargs)
  402. def _get_model_config(model: BaseModel) -> Any:
  403. return model.__config__ # type: ignore[attr-defined]
  404. def get_schema_from_model_field(
  405. *,
  406. field: ModelField,
  407. schema_generator: GenerateJsonSchema,
  408. model_name_map: ModelNameMap,
  409. field_mapping: Dict[
  410. Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
  411. ],
  412. separate_input_output_schemas: bool = True,
  413. ) -> Dict[str, Any]:
  414. # This expects that GenerateJsonSchema was already used to generate the definitions
  415. return field_schema( # type: ignore[no-any-return]
  416. field, model_name_map=model_name_map, ref_prefix=REF_PREFIX
  417. )[0]
  418. def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
  419. models = get_flat_models_from_fields(fields, known_models=set())
  420. return get_model_name_map(models) # type: ignore[no-any-return]
  421. def get_definitions(
  422. *,
  423. fields: List[ModelField],
  424. schema_generator: GenerateJsonSchema,
  425. model_name_map: ModelNameMap,
  426. separate_input_output_schemas: bool = True,
  427. ) -> Tuple[
  428. Dict[
  429. Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
  430. ],
  431. Dict[str, Dict[str, Any]],
  432. ]:
  433. models = get_flat_models_from_fields(fields, known_models=set())
  434. return {}, get_model_definitions(
  435. flat_models=models, model_name_map=model_name_map
  436. )
  437. def is_scalar_field(field: ModelField) -> bool:
  438. return is_pv1_scalar_field(field)
  439. def is_sequence_field(field: ModelField) -> bool:
  440. return field.shape in sequence_shapes or _annotation_is_sequence(field.type_) # type: ignore[attr-defined]
  441. def is_scalar_sequence_field(field: ModelField) -> bool:
  442. return is_pv1_scalar_sequence_field(field)
  443. def is_bytes_field(field: ModelField) -> bool:
  444. return lenient_issubclass(field.type_, bytes)
  445. def is_bytes_sequence_field(field: ModelField) -> bool:
  446. return field.shape in sequence_shapes and lenient_issubclass(field.type_, bytes) # type: ignore[attr-defined]
  447. def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
  448. return copy(field_info)
  449. def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
  450. return sequence_shape_to_type[field.shape](value) # type: ignore[no-any-return,attr-defined]
  451. def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
  452. missing_field_error = ErrorWrapper(MissingError(), loc=loc) # type: ignore[call-arg]
  453. new_error = ValidationError([missing_field_error], RequestErrorModel)
  454. return new_error.errors()[0] # type: ignore[return-value]
  455. def create_body_model(
  456. *, fields: Sequence[ModelField], model_name: str
  457. ) -> Type[BaseModel]:
  458. BodyModel = create_model(model_name)
  459. for f in fields:
  460. BodyModel.__fields__[f.name] = f # type: ignore[index]
  461. return BodyModel
  462. def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
  463. return list(model.__fields__.values()) # type: ignore[attr-defined]
  464. def _regenerate_error_with_loc(
  465. *, errors: Sequence[Any], loc_prefix: Tuple[Union[str, int], ...]
  466. ) -> List[Dict[str, Any]]:
  467. updated_loc_errors: List[Any] = [
  468. {**err, "loc": loc_prefix + err.get("loc", ())}
  469. for err in _normalize_errors(errors)
  470. ]
  471. return updated_loc_errors
  472. def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
  473. if lenient_issubclass(annotation, (str, bytes)):
  474. return False
  475. return lenient_issubclass(annotation, sequence_types)
  476. def field_annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
  477. origin = get_origin(annotation)
  478. if origin is Union or origin is UnionType:
  479. for arg in get_args(annotation):
  480. if field_annotation_is_sequence(arg):
  481. return True
  482. return False
  483. return _annotation_is_sequence(annotation) or _annotation_is_sequence(
  484. get_origin(annotation)
  485. )
  486. def value_is_sequence(value: Any) -> bool:
  487. return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) # type: ignore[arg-type]
  488. def _annotation_is_complex(annotation: Union[Type[Any], None]) -> bool:
  489. return (
  490. lenient_issubclass(annotation, (BaseModel, Mapping, UploadFile))
  491. or _annotation_is_sequence(annotation)
  492. or is_dataclass(annotation)
  493. )
  494. def field_annotation_is_complex(annotation: Union[Type[Any], None]) -> bool:
  495. origin = get_origin(annotation)
  496. if origin is Union or origin is UnionType:
  497. return any(field_annotation_is_complex(arg) for arg in get_args(annotation))
  498. return (
  499. _annotation_is_complex(annotation)
  500. or _annotation_is_complex(origin)
  501. or hasattr(origin, "__pydantic_core_schema__")
  502. or hasattr(origin, "__get_pydantic_core_schema__")
  503. )
  504. def field_annotation_is_scalar(annotation: Any) -> bool:
  505. # handle Ellipsis here to make tuple[int, ...] work nicely
  506. return annotation is Ellipsis or not field_annotation_is_complex(annotation)
  507. def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool:
  508. origin = get_origin(annotation)
  509. if origin is Union or origin is UnionType:
  510. at_least_one_scalar_sequence = False
  511. for arg in get_args(annotation):
  512. if field_annotation_is_scalar_sequence(arg):
  513. at_least_one_scalar_sequence = True
  514. continue
  515. elif not field_annotation_is_scalar(arg):
  516. return False
  517. return at_least_one_scalar_sequence
  518. return field_annotation_is_sequence(annotation) and all(
  519. field_annotation_is_scalar(sub_annotation)
  520. for sub_annotation in get_args(annotation)
  521. )
  522. def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool:
  523. if lenient_issubclass(annotation, bytes):
  524. return True
  525. origin = get_origin(annotation)
  526. if origin is Union or origin is UnionType:
  527. for arg in get_args(annotation):
  528. if lenient_issubclass(arg, bytes):
  529. return True
  530. return False
  531. def is_uploadfile_or_nonable_uploadfile_annotation(annotation: Any) -> bool:
  532. if lenient_issubclass(annotation, UploadFile):
  533. return True
  534. origin = get_origin(annotation)
  535. if origin is Union or origin is UnionType:
  536. for arg in get_args(annotation):
  537. if lenient_issubclass(arg, UploadFile):
  538. return True
  539. return False
  540. def is_bytes_sequence_annotation(annotation: Any) -> bool:
  541. origin = get_origin(annotation)
  542. if origin is Union or origin is UnionType:
  543. at_least_one = False
  544. for arg in get_args(annotation):
  545. if is_bytes_sequence_annotation(arg):
  546. at_least_one = True
  547. continue
  548. return at_least_one
  549. return field_annotation_is_sequence(annotation) and all(
  550. is_bytes_or_nonable_bytes_annotation(sub_annotation)
  551. for sub_annotation in get_args(annotation)
  552. )
  553. def is_uploadfile_sequence_annotation(annotation: Any) -> bool:
  554. origin = get_origin(annotation)
  555. if origin is Union or origin is UnionType:
  556. at_least_one = False
  557. for arg in get_args(annotation):
  558. if is_uploadfile_sequence_annotation(arg):
  559. at_least_one = True
  560. continue
  561. return at_least_one
  562. return field_annotation_is_sequence(annotation) and all(
  563. is_uploadfile_or_nonable_uploadfile_annotation(sub_annotation)
  564. for sub_annotation in get_args(annotation)
  565. )
  566. @lru_cache
  567. def get_cached_model_fields(model: Type[BaseModel]) -> List[ModelField]:
  568. return get_model_fields(model)