From 32f90f8b1537539517866f5394219ddca2430e0e Mon Sep 17 00:00:00 2001 From: Konstantin Chupin Date: Thu, 9 Nov 2023 18:55:30 +0100 Subject: [PATCH] Add attrs model mapper --- lib/dl_attrs_model_mapper/LICENSE | 201 +++++++ lib/dl_attrs_model_mapper/README.md | 1 + .../dl_attrs_model_mapper/__init__.py | 0 .../dl_attrs_model_mapper/base.py | 254 +++++++++ .../dl_attrs_model_mapper/domain.py | 195 +++++++ .../dl_attrs_model_mapper/field_processor.py | 143 +++++ .../dl_attrs_model_mapper/marshmallow.py | 357 ++++++++++++ .../marshmallow_base_schemas.py | 129 +++++ .../marshmallow_fields.py | 62 +++ .../dl_attrs_model_mapper/pretty_repr.py | 199 +++++++ .../dl_attrs_model_mapper/py.typed | 0 .../dl_attrs_model_mapper/structs/__init__.py | 0 .../dl_attrs_model_mapper/structs/mappings.py | 67 +++ .../structs/singleormultistring.py | 53 ++ .../dl_attrs_model_mapper/utils.py | 130 +++++ .../dl_attrs_model_mapper_tests/__init__.py | 0 .../unit/__init__.py | 0 .../unit/conftest.py | 0 .../unit/test_attrs_field_processor.py | 163 ++++++ .../unit/test_attrs_model_mapper.py | 515 ++++++++++++++++++ .../unit/test_struct.py | 36 ++ lib/dl_attrs_model_mapper/pyproject.toml | 38 ++ lib/dl_attrs_model_mapper_doc_tools/LICENSE | 201 +++++++ lib/dl_attrs_model_mapper_doc_tools/README.md | 1 + .../__init__.py | 0 .../dl_attrs_model_mapper_doc_tools/domain.py | 31 ++ .../dl_attrs_model_mapper_doc_tools/main.py | 294 ++++++++++ .../md_link_extractor.py | 47 ++ .../operations_builder.py | 111 ++++ .../dl_attrs_model_mapper_doc_tools/py.typed | 0 .../render_units.py | 388 +++++++++++++ .../writer_utils.py | 51 ++ .../__init__.py | 0 .../unit/__init__.py | 0 .../unit/conftest.py | 0 .../pyproject.toml | 38 ++ metapkg/poetry.lock | 48 +- metapkg/pyproject.toml | 2 + 38 files changed, 3754 insertions(+), 1 deletion(-) create mode 100644 lib/dl_attrs_model_mapper/LICENSE create mode 100644 lib/dl_attrs_model_mapper/README.md create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/__init__.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/base.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/domain.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/field_processor.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow_base_schemas.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow_fields.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/pretty_repr.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/py.typed create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/__init__.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/mappings.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/singleormultistring.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper/utils.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/__init__.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/__init__.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/conftest.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_attrs_field_processor.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_attrs_model_mapper.py create mode 100644 lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_struct.py create mode 100644 lib/dl_attrs_model_mapper/pyproject.toml create mode 100644 lib/dl_attrs_model_mapper_doc_tools/LICENSE create mode 100644 lib/dl_attrs_model_mapper_doc_tools/README.md create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/__init__.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/domain.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/main.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/md_link_extractor.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/operations_builder.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/py.typed create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/render_units.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/writer_utils.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/__init__.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/unit/__init__.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/unit/conftest.py create mode 100644 lib/dl_attrs_model_mapper_doc_tools/pyproject.toml diff --git a/lib/dl_attrs_model_mapper/LICENSE b/lib/dl_attrs_model_mapper/LICENSE new file mode 100644 index 000000000..74ba5f6c7 --- /dev/null +++ b/lib/dl_attrs_model_mapper/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 YANDEX LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/dl_attrs_model_mapper/README.md b/lib/dl_attrs_model_mapper/README.md new file mode 100644 index 000000000..3df4ac1b5 --- /dev/null +++ b/lib/dl_attrs_model_mapper/README.md @@ -0,0 +1 @@ +# dl_attrs_model_mapper diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/__init__.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/base.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/base.py new file mode 100644 index 000000000..d1de86054 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/base.py @@ -0,0 +1,254 @@ +from __future__ import annotations + +import abc +import enum +import inspect +from typing import ( + Any, + ClassVar, + List, + Optional, + Sequence, + Type, + TypeVar, + Union, + final, +) + +import attr +from dynamic_enum import DynamicEnum + +from dl_attrs_model_mapper.utils import MText + + +_CLS_T = TypeVar("_CLS_T", bound=type) +_DESCRIPTOR_T = TypeVar("_DESCRIPTOR_T", bound="BaseClassDescriptor") + + +@attr.s(kw_only=True) +class BaseClassDescriptor(metaclass=abc.ABCMeta): + _REGISTRY: ClassVar[dict[Type, BaseClassDescriptor]] + + _registered: bool = attr.ib(init=False, default=False) + _target_cls: Optional[Type] = attr.ib(init=False, default=None) + + def __init_subclass__(cls, **kwargs: Any) -> None: + # Override registry for each subclass + cls._REGISTRY = {} + + @classmethod + def has(cls, the_type: Type) -> bool: + return the_type in cls._REGISTRY + + @classmethod + def get_for_type(cls: Type[_DESCRIPTOR_T], the_type: Type) -> _DESCRIPTOR_T: + if not cls.has(the_type): + raise AssertionError(f"Class {the_type!r} has no a {cls.__name__}") + ret = cls._REGISTRY[the_type] + assert isinstance(ret, cls) + return ret + + @classmethod + def _register(cls, descriptor: _DESCRIPTOR_T) -> None: + the_type = descriptor._target_cls + + assert not descriptor._registered, f"Attempt to reuse {descriptor}" + assert the_type is not None + assert the_type not in cls._REGISTRY, f"Class {the_type!r} already associated with {cls.__name__}" + + cls._REGISTRY[the_type] = descriptor + + @abc.abstractmethod + def pre_registration_hook(self, cls: Type) -> None: + raise NotImplementedError() + + @final + def __call__(self, cls: _CLS_T) -> _CLS_T: + assert not self._registered, "Attempt to reuse model descriptor" + assert attr.has(cls) + self._target_cls = cls + self.pre_registration_hook(cls) + self._register(self) + return cls + + +@attr.s(kw_only=True) +class ModelDescriptor(BaseClassDescriptor): + # TODO FIX: Add on_setattr=attr.setters.frozen after MyPy upgrade in Arcadia + # At this moment: + # bi_external_api/attrs_model_mapper/base.py:16: error: Module has no attribute "setters" + """ + api_types: optional list of enum.Enum where model should be used, if no tag is provided model is used + for all environments + api_types_exclude: optional list of enum.Enum where model should NOT be used, if no tag is provided model is used + for all environments + currently we need specify env_tags, api_types_exclude explicitly, + to avoid registering two sub-models with same kind + + """ + is_abstract: bool = attr.ib(default=False) + type_discriminator: Optional[str] = attr.ib(default=None) + children_type_discriminator_attr_name: Optional[str] = attr.ib(default=None) + children_type_discriminator_aliases_attr_name: Optional[str] = attr.ib(default=None) + api_types: List[enum.Enum] = attr.ib(factory=list) + api_types_exclude: List[enum.Enum] = attr.ib(factory=list) + description: Optional[MText] = attr.ib(default=None) + + # Next fields will be filled during + _effective_type_discriminator: Optional[str] = attr.ib(init=False, default=None) + _effective_type_discriminator_aliases: Optional[tuple[str, ...]] = attr.ib(init=False, default=None) + + @property + def effective_type_discriminator(self) -> str: + ret = self._effective_type_discriminator + assert ret is not None, f"No type discriminator defined for {self._target_cls!r}" + return ret + + @property + def effective_type_discriminator_aliases(self) -> tuple[str, ...]: + ret = self._effective_type_discriminator_aliases + assert ret is not None, f"No type discriminator aliases defined for {self._target_cls!r}" + return ret + + @classmethod + def get_registered_parents_for(cls, the_type: Type) -> Sequence[Type]: + return [parent_cls for parent_cls in inspect.getmro(the_type) if parent_cls in cls._REGISTRY] + + @classmethod + def resolve_type_discriminator_attr_name(cls, model_cls: Type) -> Optional[str]: + registered_parents_mro = cls.get_registered_parents_for(model_cls) + parent_model_descriptors_with_children_type_discriminator_attr_name = [ + ModelDescriptor.get_for_type(parent_model_cls) + for parent_model_cls in registered_parents_mro + if ModelDescriptor.get_for_type(parent_model_cls).children_type_discriminator_attr_name is not None + ] + + assert len(parent_model_descriptors_with_children_type_discriminator_attr_name) < 2, ( + f"type_discriminator_attr_name for {cls} is set in more than one parent:" + f" {parent_model_descriptors_with_children_type_discriminator_attr_name}" + ) + + return next( + ( + md.children_type_discriminator_attr_name + for md in parent_model_descriptors_with_children_type_discriminator_attr_name + ), + None, + ) + + @classmethod + def resolve_type_discriminator_aliases_attr_name(cls, model_cls: Type) -> Optional[str]: + registered_parents_mro = cls.get_registered_parents_for(model_cls) + parent_model_descriptors_with_children_type_discriminator_attr_name = [ + ModelDescriptor.get_for_type(parent_model_cls) + for parent_model_cls in registered_parents_mro + if ModelDescriptor.get_for_type(parent_model_cls).children_type_discriminator_aliases_attr_name is not None + ] + + assert len(parent_model_descriptors_with_children_type_discriminator_attr_name) < 2, ( + f"type_discriminator_attr_name for {cls} is set in more than one parent:" + f" {parent_model_descriptors_with_children_type_discriminator_attr_name}" + ) + + return next( + ( + md.children_type_discriminator_aliases_attr_name + for md in parent_model_descriptors_with_children_type_discriminator_attr_name + ), + None, + ) + + def pre_registration_hook(self, cls: Type) -> None: + if self.is_abstract: + pass + + else: + self.set_effective_type_discriminator(cls) + self.set_effective_type_discriminator_aliases(cls) + + def set_effective_type_discriminator(self, cls: Type) -> None: + assert self.children_type_discriminator_attr_name is None + children_type_discriminator_attr_name = self.resolve_type_discriminator_attr_name(cls) + if children_type_discriminator_attr_name is not None: + assert self.type_discriminator is None, ( + f"Type discriminators should not be set for {cls!r} manually" + f" due to type_discriminator_attr_name set {children_type_discriminator_attr_name!r} in one of parents" + ) + + type_discriminator_candidate = getattr(cls, children_type_discriminator_attr_name) + + if isinstance(type_discriminator_candidate, enum.Enum): + self._effective_type_discriminator = type_discriminator_candidate.name + elif isinstance(type_discriminator_candidate, DynamicEnum): + self._effective_type_discriminator = type_discriminator_candidate.name + elif isinstance(type_discriminator_candidate, str): + self._effective_type_discriminator = type_discriminator_candidate + else: + raise AssertionError(f"Unknown type of type discriminator for {cls}: {type_discriminator_candidate}") + + def set_effective_type_discriminator_aliases(self, cls: Type) -> None: + ret: List[str] = list() + children_type_discriminator_aliases_attr_name = self.resolve_type_discriminator_aliases_attr_name(cls) + if children_type_discriminator_aliases_attr_name is not None: + type_discriminator_alias_candidate = ( + getattr( + cls, + children_type_discriminator_aliases_attr_name, + None, + ) + or tuple() + ) + + for alias in type_discriminator_alias_candidate: + if isinstance(alias, str): + ret.append(alias) + elif isinstance(alias, enum.Enum): + ret.append(alias.name) + else: + raise AssertionError(f"Unknown type of type discriminator alias for {cls}: {alias}") + + self._effective_type_discriminator_aliases = tuple(ret) + + +@attr.s(kw_only=True, frozen=True) +class AttribDescriptor: + METADATA_KEY = "ATTRS_MODEL_MAPPER_META_KEY" + + enum_by_value: bool = attr.ib(default=False) + serialization_key: Optional[str] = attr.ib(default=None) + tags: frozenset[enum.Enum] = attr.ib(default=frozenset()) + load_only: bool = attr.ib(default=False) + skip_none_on_dump: bool = attr.ib(default=False) + _description: Union[str, MText, None] = attr.ib(default=None) + + def to_meta(self) -> dict: + return {self.METADATA_KEY: self} + + @classmethod + def from_attrib(cls, attr_ib: attr.Attribute) -> Optional["AttribDescriptor"]: + meta = attr_ib.metadata + if cls.METADATA_KEY in meta: + may_be_attrib_descriptor = meta[cls.METADATA_KEY] + assert isinstance(may_be_attrib_descriptor, AttribDescriptor) + return may_be_attrib_descriptor + + return None + + @property + def description(self) -> Optional[MText]: + d = self._description + + if isinstance(d, str): + return MText(ru=d, en=None) + return d + + +# TODO FIX: Consider to do not use subclassing in favor of some flags in model descriptor +class MapperBaseModel(metaclass=abc.ABCMeta): # noqa: B024 + @classmethod + def pre_load(cls, data: dict[str, Any]) -> Optional[dict[str, Any]]: + return None + + @classmethod + def post_dump(cls, data: dict[str, Any]) -> Optional[dict[str, Any]]: + return None diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/domain.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/domain.py new file mode 100644 index 000000000..dd8257264 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/domain.py @@ -0,0 +1,195 @@ +import abc +from typing import ( + Any, + ClassVar, + Collection, + Optional, + Sequence, + Type, +) + +import attr + +from dl_attrs_model_mapper.utils import ( + CommonAttributeProps, + MText, +) + + +@attr.s() +class AmmSchemaRegistry: + _map_type_schema: dict[Type, "AmmSchema"] = attr.ib(factory=dict) + _map_type_name: dict[Type, str] = attr.ib(factory=dict) + + def register(self, schema: "AmmSchema") -> None: + self._map_type_schema[schema.clz] = schema + self._map_type_name[schema.clz] = schema.clz.__name__ + + def is_registered(self, clz: Type) -> bool: + return clz in self._map_type_schema + + def get_ref_for_type(self, clz: Type) -> str: + return f"#/components/schemas/{self._map_type_name[clz]}" + + def get_generic_type_schema(self, clz: Type) -> "AmmGenericSchema": + ret = self._map_type_schema[clz] + assert isinstance(ret, AmmGenericSchema) + return ret + + def get_regular_type_schema(self, clz: Type) -> "AmmRegularSchema": + ret = self._map_type_schema[clz] + assert isinstance(ret, AmmRegularSchema) + return ret + + @classmethod + def from_schemas_collection(cls, schema_collection: Collection["AmmSchema"]) -> "AmmSchemaRegistry": + reg = cls() + for schema in schema_collection: + reg.register(schema) + return reg + + def dump_open_api_schemas(self) -> dict[str, dict[str, Any]]: + return { + self._map_type_name[schema.clz]: schema.to_openapi_dict(self) for schema in self._map_type_schema.values() + } + + +@attr.s() +class AmmField: + common_props: CommonAttributeProps = attr.ib() + + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry, *, is_root_prop: bool) -> dict[str, Any]: + ret: dict[str, Any] = dict(nullable=self.common_props.allow_none) + if self.common_props.load_only: + ret["writeOnly"] = True + return ret + + +@attr.s() +class AmmScalarField(AmmField): + scalar_type: Type = attr.ib() + scalar_type_identifier: Optional[str] = attr.ib(default=None) + + TYPE_MAP: ClassVar[dict[Type, str]] = { + int: "number", + str: "string", + float: "number", + bool: "boolean", + } + + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry, *, is_root_prop: bool) -> dict[str, Any]: + return { + "type": self.TYPE_MAP[self.scalar_type], + **super().to_openapi_dict(ref_resolver, is_root_prop=is_root_prop), + } + + +@attr.s() +class AmmEnumField(AmmScalarField): + values: Sequence[Any] = attr.ib(kw_only=True) + + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry, *, is_root_prop: bool) -> dict[str, Any]: + return dict( + **super().to_openapi_dict(ref_resolver, is_root_prop=is_root_prop), + enum=list(sorted(self.values)), + ) + + +@attr.s() +class AmmNestedField(AmmField): + item: "AmmSchema" = attr.ib() + + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry, *, is_root_prop: bool) -> dict[str, Any]: + return { + **super().to_openapi_dict(ref_resolver, is_root_prop=is_root_prop), + "allOf": [{"$ref": ref_resolver.get_ref_for_type(self.item.clz)}], + } + + +@attr.s() +class AmmListField(AmmField): + item: "AmmField" = attr.ib() + + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry, *, is_root_prop: bool) -> dict[str, Any]: + return dict(type="array", items=self.item.to_openapi_dict(ref_resolver, is_root_prop=False)) + + +@attr.s() +class AmmStringMappingField(AmmField): + value: "AmmField" = attr.ib() + + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry, *, is_root_prop: bool) -> dict[str, Any]: + return dict(type="object", additionalProperties=self.value.to_openapi_dict(ref_resolver, is_root_prop=False)) + + +@attr.s() +class AmmOneOfDescriptorField(AmmField): + """ + Workaround for GRPC oneof's with external/scalars that do not suit to OpenAPI like one-of/inheritance. + Assumed that will be used only for generating docs by protospecs. + """ + + field_names: list[str] = attr.ib() + + +@attr.s(kw_only=True) +class AmmEnumMemberDescriptor: + key: str = attr.ib() + description: Optional[MText] = attr.ib(default=None) + + +@attr.s(kw_only=True) +class AmmEnumDescriptor: + type_identifier: str = attr.ib() + description: Optional[MText] = attr.ib(default=None) + members: list[AmmEnumMemberDescriptor] = attr.ib() + + +# +# Schemas +# +@attr.s(kw_only=True) +class AmmSchema(metaclass=abc.ABCMeta): + clz: Type = attr.ib() + identifier: Optional[str] = attr.ib(default=None) + + @abc.abstractmethod + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry) -> dict[str, Any]: + raise NotImplementedError() + + +@attr.s() +class AmmRegularSchema(AmmSchema): + fields: dict[str, AmmField] = attr.ib() + description: Optional[MText] = attr.ib(default=None) + + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry) -> dict[str, Any]: + ret = { + "type": "object", + "properties": { + f_name: field.to_openapi_dict(ref_resolver, is_root_prop=True) for f_name, field in self.fields.items() + }, + } + required_names = [f_name for f_name, field in self.fields.items() if field.common_props.required] + if required_names: + ret.update(required=required_names) + return ret + + +@attr.s() +class AmmGenericSchema(AmmSchema): + discriminator_property_name: str = attr.ib() + mapping: dict[str, AmmRegularSchema] = attr.ib(factory=dict) + + def register_sub_schema(self, schema: AmmRegularSchema, discriminator: str) -> None: + assert discriminator not in self.mapping, f"discriminator f{discriminator} already registered" + self.mapping[discriminator] = schema + + def to_openapi_dict(self, ref_resolver: AmmSchemaRegistry) -> dict[str, Any]: + return { + "oneOf": [{"$ref": ref_resolver.get_ref_for_type(schema.clz)} for schema in self.mapping.values()], + "discriminator": { + "propertyName": self.discriminator_property_name, + "mapping": {discr: ref_resolver.get_ref_for_type(schema.clz) for discr, schema in self.mapping.items()}, + }, + } diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/field_processor.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/field_processor.py new file mode 100644 index 000000000..c4c919aec --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/field_processor.py @@ -0,0 +1,143 @@ +import abc +from typing import ( + Any, + Generic, + Optional, + Sequence, + Type, + TypeVar, + cast, +) + +import attr + +from dl_attrs_model_mapper.base import AttribDescriptor +from dl_attrs_model_mapper.utils import ( + is_sequence, + is_str_mapping, + unwrap_container_stack_with_single_type, +) + + +_TARGET_TV = TypeVar("_TARGET_TV") +_PROCESSING_OBJECT_TV = TypeVar("_PROCESSING_OBJECT_TV") + + +@attr.s(frozen=True) +class FieldMeta: + clz: Type = attr.ib() + attrib_name: str = attr.ib() + container_stack: Sequence[Any] = attr.ib() + attrib_descriptor: Optional[AttribDescriptor] = attr.ib() + + def pop_container(self) -> tuple[Any, "FieldMeta"]: + if len(self.container_stack) == 0: + return None, self + return self.container_stack[0], attr.evolve(self, container_stack=self.container_stack[1:]) + + +# TODO FIX: Split into planing & execution +class Processor(Generic[_PROCESSING_OBJECT_TV]): + """ + This generic is intended for creation of processor class, + with a `.process` method which recursively (w.r.t. class structure defined by attrs.ib) + processes all attributes. + + `.process` checks attributes values and if `._should_process` evaluates true for attr meta + and if `._process_single_object` returns a new value + and evolves instance with replaced attr value + + Parametrized by: + _PROCESSING_OBJECT_TV: type of the attribute value which should be processed + """ + + @abc.abstractmethod + def _should_process(self, meta: FieldMeta) -> bool: + raise NotImplementedError() + + @abc.abstractmethod + def _process_single_object(self, obj: _PROCESSING_OBJECT_TV, meta: FieldMeta) -> Optional[_PROCESSING_OBJECT_TV]: + raise NotImplementedError() + + @classmethod + def _create_field_meta(cls, attr_ib: attr.Attribute) -> FieldMeta: + container_stack, effective_type = unwrap_container_stack_with_single_type(attr_ib.type) + return FieldMeta( + clz=effective_type, + attrib_name=attr_ib.name, + container_stack=container_stack, + attrib_descriptor=AttribDescriptor.from_attrib(attr_ib), + ) + + @classmethod + def _get_changes_key(cls, attr_ib: attr.Attribute) -> str: + return attr_ib.name.removeprefix("_") + + def _process_attr_ib_value( + self, + value: Any, + meta: FieldMeta, + do_processing: bool, + ) -> Any: + container_type, target_meta = meta.pop_container() + + if container_type is None: + if do_processing: + return self._process_single_object(value, target_meta) + else: + effective_nested_type = target_meta.clz + if isinstance(effective_nested_type, type) and attr.has(effective_nested_type): + if value is not None: + # Recursively call entry point for nested object + return self.process(value) + + return value + + elif container_type is Optional: + return self._process_attr_ib_value(value, target_meta, do_processing) + + elif is_sequence(container_type): + if value is None: + return None + + values_sequence = cast(Sequence, value) + + changed = False + processed_values_sequence = [] + + for single_value in values_sequence: + processed_single_value = self._process_attr_ib_value(single_value, target_meta, do_processing) + processed_values_sequence.append(processed_single_value) + if processed_single_value is not single_value: + changed = True + + if changed: + return tuple(processed_values_sequence) + return value + + elif is_str_mapping(container_type): + if do_processing: + raise NotImplementedError("Processing of string mappings is not yet supported") + return value + + raise AssertionError(f"Can not process container type {container_type}") + + def process(self, target: _TARGET_TV) -> _TARGET_TV: + assert attr.has(type(target)) + changes = {} + + for attr_ib in attr.fields(type(target)): + current_value = getattr(target, attr_ib.name) + + meta = self._create_field_meta(attr_ib) + + should_process = self._should_process(meta) + processed_value = self._process_attr_ib_value(current_value, meta, should_process) + + if processed_value is not current_value: + changes[self._get_changes_key(attr_ib)] = processed_value + + if changes: + return attr.evolve(target, **changes) + else: + return target diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow.py new file mode 100644 index 000000000..5dbb0e34b --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow.py @@ -0,0 +1,357 @@ +from __future__ import annotations + +import enum +import json +from typing import ( + Any, + Iterable, + Optional, + Type, +) + +import attr +from dynamic_enum import DynamicEnum +import marshmallow +from marshmallow import fields + +from dl_attrs_model_mapper.base import ( + AttribDescriptor, + ModelDescriptor, +) +from dl_attrs_model_mapper.domain import ( + AmmEnumField, + AmmField, + AmmGenericSchema, + AmmListField, + AmmNestedField, + AmmRegularSchema, + AmmScalarField, + AmmSchema, + AmmSchemaRegistry, + AmmStringMappingField, +) +from dl_attrs_model_mapper.marshmallow_base_schemas import ( + BaseOneOfSchema, + BaseSchema, +) +from dl_attrs_model_mapper.marshmallow_fields import ( + FrozenMappingStrToStrOrStrSeqField, + FrozenStrMappingField, + SingleOrMultiStringField, +) +from dl_attrs_model_mapper.structs.mappings import FrozenMappingStrToStrOrStrSeq +from dl_attrs_model_mapper.structs.singleormultistring import SingleOrMultiString +from dl_attrs_model_mapper.utils import ( + CommonAttributeProps, + is_sequence, + is_str_mapping, + unwrap_typing_container_with_single_type, +) +from dl_model_tools.schema.dynamic_enum_field import DynamicEnumField + + +@attr.s(frozen=True, auto_attribs=True) +class FieldBundle: + ma_field: fields.Field + amm_field: AmmField + + +@attr.s(frozen=True, auto_attribs=True) +class SchemaBundle: + schema_cls: Type[marshmallow.Schema] + amm_schema: AmmSchema + + +@attr.s(frozen=True, auto_attribs=True) +class RegularSchemaBundle(SchemaBundle): + schema_cls: Type[BaseSchema] + amm_schema: AmmRegularSchema + + +@attr.s(frozen=True, auto_attribs=True) +class GenericSchemaBundle(SchemaBundle): + schema_cls: Type[BaseOneOfSchema] + amm_schema: AmmGenericSchema + + +@attr.s +class ModelMapperMarshmallow: + _map_complex_type_schema_bundle: dict[type, SchemaBundle] = attr.ib(factory=dict) + + _map_scalar_type_field_cls: dict[type, Type[marshmallow.fields.Field]] = attr.ib( + factory=lambda: { + int: fields.Integer, + str: fields.String, + float: fields.Float, + bool: fields.Boolean, + FrozenMappingStrToStrOrStrSeq: FrozenMappingStrToStrOrStrSeqField, + SingleOrMultiString: SingleOrMultiStringField, + } + ) + + _map_scalar_type_schema_cls: dict[type, Type[marshmallow.Schema]] = attr.ib(factory=dict) + + def handle_single_attr_ib(self, attr_ib: attr.Attribute) -> FieldBundle: + attrib_descriptor = AttribDescriptor.from_attrib(attr_ib) + return self.create_field_for_type( + the_type=attr_ib.type, + attrib_descriptor=attrib_descriptor, + ma_attribute_name=attr_ib.name, + is_required=attr_ib.default is attr.NOTHING, + ) + + def create_field_for_type( + self, + the_type: Any, + attrib_descriptor: Optional[AttribDescriptor], + ma_attribute_name: Optional[str], + is_required: bool, + is_optional: bool = False, + ) -> FieldBundle: + container_type, effective_type = unwrap_typing_container_with_single_type(the_type) + + if container_type is Optional: + assert is_optional is False + return self.create_field_for_type( + the_type=effective_type, + attrib_descriptor=attrib_descriptor, + ma_attribute_name=ma_attribute_name, + is_required=is_required, + is_optional=True, + ) + + common_props = CommonAttributeProps( + allow_none=is_optional, + attribute_name=ma_attribute_name, + required=is_required, + load_only=attrib_descriptor.load_only if attrib_descriptor is not None else False, + description=attrib_descriptor.description if attrib_descriptor is not None else None, + ) + + if container_type is None: + return self.create_field_for_unwrapped_type(effective_type, attrib_descriptor, common_props) + elif is_sequence(container_type): + nested_field_bundle = self.create_field_for_type( + effective_type, + attrib_descriptor, + ma_attribute_name=None, + is_required=False, + ) + return FieldBundle( + ma_field=fields.List(nested_field_bundle.ma_field, **common_props.to_common_ma_field_kwargs()), + amm_field=AmmListField(common_props, nested_field_bundle.amm_field), + ) + elif is_str_mapping(container_type): + nested_field_bundle = self.create_field_for_type( + effective_type, + attrib_descriptor, + ma_attribute_name=None, + is_required=False, + ) + return FieldBundle( + ma_field=FrozenStrMappingField( + keys=fields.String(), + values=nested_field_bundle.ma_field, + **common_props.to_common_ma_field_kwargs(), + ), + amm_field=AmmStringMappingField(common_props, nested_field_bundle.amm_field), + ) + + else: + raise AssertionError( + f"Got unexpected container type from unwrap_typing_container_with_single_type(): {container_type!r}" + ) + + def create_field_for_unwrapped_type( + self, + the_type: Type, + attrib_descriptor: Optional[AttribDescriptor], + common_ma_field_kwargs: CommonAttributeProps, + ) -> FieldBundle: + if attr.has(the_type): + schema_bundle = self.get_or_create_schema_bundle_for_attrs_class(the_type) + return FieldBundle( + ma_field=fields.Nested(schema_bundle.schema_cls, **common_ma_field_kwargs.to_common_ma_field_kwargs()), + amm_field=AmmNestedField(common_ma_field_kwargs, schema_bundle.amm_schema), + ) + + elif the_type in self._map_scalar_type_schema_cls: + return FieldBundle( + ma_field=fields.Nested( + self._map_scalar_type_schema_cls[the_type], + **common_ma_field_kwargs.to_common_ma_field_kwargs(), + ), + amm_field=AmmScalarField(common_ma_field_kwargs, the_type), + ) + + elif the_type in self._map_scalar_type_field_cls: + return FieldBundle( + ma_field=self._map_scalar_type_field_cls[the_type]( + **common_ma_field_kwargs.to_common_ma_field_kwargs(), + ), + amm_field=AmmScalarField(common_ma_field_kwargs, the_type), + ) + + elif isinstance(the_type, type) and issubclass(the_type, enum.Enum): + enum_by_value = False if attrib_descriptor is None else attrib_descriptor.enum_by_value + + return FieldBundle( + ma_field=fields.Enum( + the_type, + by_value=enum_by_value, + **common_ma_field_kwargs.to_common_ma_field_kwargs(), + ), + amm_field=AmmEnumField( + common_ma_field_kwargs, + str, + values=[str(m.value) if enum_by_value else m.name for m in the_type], + ), + ) + + elif isinstance(the_type, type) and issubclass(the_type, DynamicEnum): + return FieldBundle( + ma_field=DynamicEnumField( + the_type, + **common_ma_field_kwargs.to_common_ma_field_kwargs(), + ), + amm_field=AmmEnumField( + common_ma_field_kwargs, + str, + values=[m.value for m in the_type], + ), + ) + + else: + raise TypeError(f"Can not build field for {the_type!r}") + + def link_to_parents(self, the_type: Type, the_schema_bundle: RegularSchemaBundle) -> None: + the_type_model_descriptor = ModelDescriptor.get_for_type(the_type) + assert not the_type_model_descriptor.is_abstract + + registered_parents_types_with_schema_bundle = [ + (candidate_type, candidate_schema_bundle) + for candidate_type, candidate_schema_bundle in self._map_complex_type_schema_bundle.items() + if issubclass(the_type, candidate_type) and ModelDescriptor.get_for_type(candidate_type).is_abstract + ] + + for _, parent_schema_bundle in registered_parents_types_with_schema_bundle: + assert isinstance(parent_schema_bundle, GenericSchemaBundle) + + parent_schema_bundle.schema_cls.register_type( + the_schema_bundle.schema_cls, + the_type_model_descriptor.effective_type_discriminator, + the_type_model_descriptor.effective_type_discriminator_aliases, + ) + parent_schema_bundle.amm_schema.register_sub_schema( + the_schema_bundle.amm_schema, the_type_model_descriptor.effective_type_discriminator + ) + + def link_to_children(self, the_type: Type, generic_bundle: GenericSchemaBundle) -> None: + the_type_model_descriptor = ModelDescriptor.get_for_type(the_type) + assert the_type_model_descriptor.is_abstract + + registered_children_types_with_schema_bundle = [ + (candidate_type, candidate_schema_bundle) + for candidate_type, candidate_schema_bundle in self._map_complex_type_schema_bundle.items() + if issubclass(candidate_type, the_type) and not ModelDescriptor.get_for_type(candidate_type).is_abstract + ] + + for child_type, child_schema_bundle in registered_children_types_with_schema_bundle: + assert isinstance(child_schema_bundle, RegularSchemaBundle) + + child_type_discriminator = ModelDescriptor.get_for_type(child_type).effective_type_discriminator + discriminator_aliases = ModelDescriptor.get_for_type(child_type).effective_type_discriminator_aliases + + generic_bundle.schema_cls.register_type( + child_schema_bundle.schema_cls, + child_type_discriminator, + discriminator_aliases, + ) + generic_bundle.amm_schema.register_sub_schema( + child_schema_bundle.amm_schema, + child_type_discriminator, + ) + + @staticmethod + def resolve_ma_field_name(attr_ib: attr.Attribute) -> str: + attrib_descriptor = AttribDescriptor.from_attrib(attr_ib) + + if attrib_descriptor is not None: + field_name_override = attrib_descriptor.serialization_key + if field_name_override is not None: + return field_name_override + + return attr_ib.name + + def get_schema_for_attrs_class(self, target_type: Type) -> Type[marshmallow.Schema]: + if target_type in self._map_complex_type_schema_bundle: + return self._map_complex_type_schema_bundle[target_type].schema_cls + raise AssertionError(f"Schema for {type} was not created") + + def get_or_create_schema_for_attrs_class(self, target: Type) -> Type[marshmallow.Schema]: + return self.get_or_create_schema_bundle_for_attrs_class(target).schema_cls + + def get_or_create_schema_bundle_for_attrs_class(self, target: Type) -> SchemaBundle: + assert attr.has(target), f"Schema creation requested for non-attrs class: {target}" + target_model_descriptor = ModelDescriptor.get_for_type(target) + + if target in self._map_complex_type_schema_bundle: + return self._map_complex_type_schema_bundle[target] + + # TODO FIX: Determine why MyPy thinks "error: Module has no attribute "resolve_types" + attr.resolve_types(target) # type: ignore + schema_bundle: SchemaBundle + + if target_model_descriptor.is_abstract: + type_discriminator_field_name: str = target_model_descriptor.children_type_discriminator_attr_name or "type" + + abstract_schema_bundle = GenericSchemaBundle( + schema_cls=BaseOneOfSchema.generate_new_one_of_schema(type_discriminator_field_name), + amm_schema=AmmGenericSchema(clz=target, discriminator_property_name=type_discriminator_field_name), + ) + self.link_to_children(target, abstract_schema_bundle) + + schema_bundle = abstract_schema_bundle + else: + fields_bundle_map: dict[str, FieldBundle] = {} + fields_to_skip_on_none: list[str] = [] + for attr_ib in attr.fields(target): + fields_bundle_map[self.resolve_ma_field_name(attr_ib)] = self.handle_single_attr_ib(attr_ib) + attr_descriptor = AttribDescriptor.from_attrib(attr_ib) + if attr_descriptor and attr_descriptor.skip_none_on_dump: + fields_to_skip_on_none.append(attr_ib.name) + + regular_bundle = RegularSchemaBundle( + schema_cls=BaseSchema.generate_new_regular_schema( + generate_for=target, + field_map={f_name: f_bundle.ma_field for f_name, f_bundle in fields_bundle_map.items()}, + fields_to_skip_on_none=set(fields_to_skip_on_none), + ), + amm_schema=AmmRegularSchema( + clz=target, + fields={f_name: f_bundle.amm_field for f_name, f_bundle in fields_bundle_map.items()}, + description=target_model_descriptor.description, + ), + ) + self.link_to_parents(target, regular_bundle) + + schema_bundle = regular_bundle + + self._map_complex_type_schema_bundle[target] = schema_bundle + return schema_bundle + + def register_models(self, models: Iterable[Any]) -> None: + for model in models: + self.get_or_create_schema_for_attrs_class(model) + + def get_amm_schema_registry(self) -> AmmSchemaRegistry: + return AmmSchemaRegistry.from_schemas_collection( + [bundle.amm_schema for bundle in self._map_complex_type_schema_bundle.values()] + ) + + def dump_external_model_to_str(self, model: Any) -> str: + try: + schema = self.get_or_create_schema_for_attrs_class(type(model))() + return schema.dumps(model) + except Exception: # noqa + return json.dumps({"kind": "serialization_error"}) diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow_base_schemas.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow_base_schemas.py new file mode 100644 index 000000000..5acdaa5a1 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow_base_schemas.py @@ -0,0 +1,129 @@ +from __future__ import annotations + +import collections +import logging +from typing import ( + Any, + ClassVar, + Dict, + Generic, + Optional, + OrderedDict, + Type, + TypeVar, + Union, +) + +import marshmallow +from marshmallow import ( + fields, + post_dump, + post_load, + pre_load, +) +from marshmallow_oneofschema import OneOfSchema + +from dl_attrs_model_mapper.base import MapperBaseModel + + +LOGGER = logging.getLogger(__name__) + +_TARGET_OBJECT_BASE_TV = TypeVar("_TARGET_OBJECT_BASE_TV") +_TARGET_OBJECT_GENERATED_TV = TypeVar("_TARGET_OBJECT_GENERATED_TV") + + +class BaseSchema(marshmallow.Schema, Generic[_TARGET_OBJECT_BASE_TV]): + target_cls: ClassVar[Type[_TARGET_OBJECT_BASE_TV]] + _fields_to_skip_on_none: ClassVar[set[str]] + + # TODO FIX: Make tests + @pre_load(pass_many=False) + def pre_load(self, data: Dict[str, Any], **_: Any) -> dict[str, Any]: + if issubclass(self.target_cls, MapperBaseModel): + preprocessed_data = self.target_cls.pre_load(data) + if preprocessed_data is not None: + return preprocessed_data + + return data + + @post_load(pass_many=False) + def post_load(self, data: Dict[str, Any], **_: Any) -> _TARGET_OBJECT_BASE_TV: + try: + return self.target_cls(**data) # type: ignore + except Exception as exc: + logging.exception(f"Can not instantiate class {self.target_cls}: {exc}") + raise + + @post_dump(pass_many=False) + def post_dump(self, data: Union[Dict[str, Any], OrderedDict[str, Any]], **_: Any) -> dict[str, Any]: + # If Meta.ordered == False MA does not respect keys order at all + # But ordered dict will break some contracts + # So we use that in Py>3.7 any dict is ordered to do not break contracts + ordered_data = {k: v for k, v in data.items()} if isinstance(data, collections.OrderedDict) else data + + if len(self._fields_to_skip_on_none): + ordered_data = { + k: v for k, v in ordered_data.items() if k not in self._fields_to_skip_on_none or v is not None + } + + if issubclass(self.target_cls, MapperBaseModel): + post_processed_data = self.target_cls.post_dump(ordered_data) + if post_processed_data is not None: + return post_processed_data + + return ordered_data + + @classmethod + def generate_new_regular_schema( + cls, + generate_for: Type[_TARGET_OBJECT_GENERATED_TV], + field_map: dict[str, fields.Field], + fields_to_skip_on_none: Optional[set[str]] = None, + ) -> Type[BaseSchema[_TARGET_OBJECT_GENERATED_TV]]: + # TODO FIX: Generate mnemonic class name + class ResultingSchema( + BaseSchema[_TARGET_OBJECT_GENERATED_TV], marshmallow.Schema.from_dict(field_map) # type: ignore + ): + class Meta: + ordered = True + + target_cls = generate_for + _fields_to_skip_on_none = fields_to_skip_on_none or set() + + return ResultingSchema # type: ignore + + +class BaseOneOfSchema(OneOfSchema): + type_field = "type" + + type_schemas: Dict[str, Type[BaseSchema]] = {} + _map_cls_type_discriminator: ClassVar[dict[Type, str]] = {} + + @classmethod + def register_type(cls, schema: Type[BaseSchema], discriminator: str, aliases: tuple[str, ...] = tuple()) -> None: + cls._map_cls_type_discriminator[schema.target_cls] = discriminator + cls.type_schemas[discriminator] = schema + for alias in aliases: + cls.type_schemas[alias] = schema + + def get_obj_type(self, obj: Any) -> str: + return self._map_cls_type_discriminator[type(obj)] + + def _dump(self, obj: Any, *, update_fields: bool = True, **kwargs: Any) -> Dict[str, Any]: + ret = super()._dump(obj, update_fields=update_fields, **kwargs) + type_val = ret.pop(self.type_field) + # Placing type field on top + return { + self.type_field: type_val, + **ret, + } + + @classmethod + def generate_new_one_of_schema(cls, type_discriminator_field_name: str) -> Type[BaseOneOfSchema]: + # TODO FIX: Generate mnemonic class name + class GeneratedOneOfSchema(BaseOneOfSchema): + type_field = type_discriminator_field_name + type_schemas: Dict[str, Type[BaseSchema]] = {} + _map_cls_type_discriminator: ClassVar[dict[Type, str]] = {} + + return GeneratedOneOfSchema diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow_fields.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow_fields.py new file mode 100644 index 000000000..cc2b90d92 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/marshmallow_fields.py @@ -0,0 +1,62 @@ +from marshmallow import ( + ValidationError, + fields, +) + +from dl_attrs_model_mapper.structs.mappings import ( + FrozenMappingStrToStrOrStrSeq, + FrozenStrMapping, +) +from dl_attrs_model_mapper.structs.singleormultistring import SingleOrMultiString + + +# TODO FIX: BI-3005 Add verbosity to error messages or totally remove +class FrozenMappingStrToStrOrStrSeqField(fields.Field): + def _serialize(self, value, attr, obj, **kwargs): # type: ignore + if value is None: + return None + + assert isinstance(value, FrozenMappingStrToStrOrStrSeq) + + return {d_key: d_value if isinstance(d_value, str) else list(d_value) for d_key, d_value in value.items()} + + def _deserialize(self, value, attr, data, **kwargs): # type: ignore + try: + assert isinstance(value, dict) + return FrozenMappingStrToStrOrStrSeq(mapping=value) + except Exception as error: + raise ValidationError("Invalid mapping string to (string|list[string])") from error + + +# TODO FIX: BI-3005 Add verbosity to error messages or totally remove +class SingleOrMultiStringField(fields.Field): + def _serialize(self, value, attr, obj, **kwargs): # type: ignore + if value is None: + return None + + assert isinstance(value, SingleOrMultiString) + + if value.is_single: + return value.as_single() + return value.as_sequence() + + def _deserialize(self, value, attr, data, **kwargs): # type: ignore + try: + if isinstance(value, str): + return SingleOrMultiString.from_string(value) + elif isinstance(value, list): + assert all(isinstance(s, str) for s in value) + return SingleOrMultiString.from_sequence(value) + except Exception as error: + raise ValidationError("Must be a string or list of strings") from error + + +class FrozenStrMappingField(fields.Dict): + def _serialize(self, value, attr, obj, **kwargs): # type: ignore + if isinstance(value, FrozenStrMapping): + value = dict(value) + return super()._serialize(value, attr, obj, **kwargs) + + def _deserialize(self, value, attr, data, **kwargs): # type: ignore + plain_dict = super()._deserialize(value, attr, data, **kwargs) + return FrozenStrMapping(plain_dict) diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/pretty_repr.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/pretty_repr.py new file mode 100644 index 000000000..88119dc83 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/pretty_repr.py @@ -0,0 +1,199 @@ +import enum +import functools +from typing import ( + Any, + Collection, + Mapping, + Optional, + Sequence, + Type, + Union, +) + +import attr + +from dl_attrs_model_mapper.structs.mappings import FrozenMappingStrToStrOrStrSeq +from dl_attrs_model_mapper.structs.singleormultistring import SingleOrMultiString + + +_INDENT = " " * 4 + + +def pretty_repr( + model: Any, + preferred_cls_name_prefixes: Union[Mapping[Any, Optional[str]], Sequence[Any], None] = None, +) -> str: + """ + Generates string with prettily-formatted executable code that will create model passed in `model`. + Classes that are declared as module-level vars in `preferred_cls_name_prefixes` will be prefixed with module name. + Order matters. First appearance takes precedence. Take into account that imports are treated as module-level vars. + """ + effective_preferred_cls_name_prefixes: Mapping[Any, Optional[str]] + + if isinstance(preferred_cls_name_prefixes, dict): + effective_preferred_cls_name_prefixes = preferred_cls_name_prefixes + elif preferred_cls_name_prefixes is None: + effective_preferred_cls_name_prefixes = {} + else: + effective_preferred_cls_name_prefixes = {mod_obj: None for mod_obj in preferred_cls_name_prefixes} + + lines = Renderer(effective_preferred_cls_name_prefixes).get_lines(model) + return "\n".join(lines) + + +@attr.s(frozen=True, auto_attribs=True) +class _DictItem: + key: Any + value: Any + + +@attr.s() +class Renderer: + _preferred_cls_name_prefixes: Mapping[Any, Optional[str]] = attr.ib() + _map_cls_cls_prefix: dict[Type, str] = attr.ib(init=False, factory=dict) + + def __attrs_post_init__(self) -> None: + for module_obj in self._preferred_cls_name_prefixes.keys(): + module_name = module_obj.__name__.split(".")[-1] + declared_type_list: list[Type] = [var for var in vars(module_obj).values() if isinstance(var, type)] + for declared_type in declared_type_list: + preferred_prefix = self._preferred_cls_name_prefixes.get(module_obj) + effective_prefix = module_name if preferred_prefix is None else preferred_prefix + + self._map_cls_cls_prefix[declared_type] = effective_prefix + + def get_type_str(self, t: Type) -> str: + if t in self._map_cls_cls_prefix: + return f"{self._map_cls_cls_prefix[t]}.{t.__name__}" + return t.__name__ + + @functools.singledispatchmethod + def _get_lines_internal(self, model: Any) -> list[str]: + if attr.has(type(model)): + return self._get_lines_for_attrs_object(model) + + raise NotImplementedError(f"No pretty-repr generator for {type(model)=}") + + @_get_lines_internal.register + def _get_lines_internal_dict_item(self, model: _DictItem) -> list[str]: + key_lines = self._get_lines_internal(model.key) + value_lines = self._get_lines_internal(model.value) + + return [ + *key_lines[:-1], + f"{key_lines[-1]}: {value_lines[0]}", + *value_lines[1:], + ] + + @_get_lines_internal.register(int) + @_get_lines_internal.register(float) + @_get_lines_internal.register(str) + @_get_lines_internal.register(type(None)) + @_get_lines_internal.register(SingleOrMultiString) + def _get_lines_internal_primitive(self, model: Union[int, float, str, None]) -> list[str]: + return [repr(model)] + + @_get_lines_internal.register + def _get_lines_internal_enum(self, model: enum.Enum) -> list[str]: + return [f"{self.get_type_str(type(model))}.{model.name}"] + + @_get_lines_internal.register + def _get_lines_internal_list(self, model: list) -> list[str]: + return self._get_lines_for_simple_collection( + model, + start="[", + end="]", + ) + + @_get_lines_internal.register + def _get_lines_internal_tuple(self, model: tuple) -> list[str]: + return self._get_lines_for_simple_collection( + model, + start="(", + end=")", + trailing_comma_required=True, + ) + + @_get_lines_internal.register + def _get_lines_internal_set(self, model: set) -> list[str]: + return self._get_lines_for_simple_collection(model, start="{", end="}", empty_override="set()") + + @_get_lines_internal.register + def _get_lines_internal_dict(self, model: dict) -> list[str]: + return self._get_lines_for_simple_collection( + [_DictItem(key=key, value=value) for key, value in model.items()], + start="{", + end="}", + inline_single_element=False, + ) + + @_get_lines_internal.register + def _get_lines_internal_frozen_mapping_str_to_str_or_str_seq( + self, + model: FrozenMappingStrToStrOrStrSeq, + ) -> list[str]: + under_hood_dict_lines = self._get_lines_internal(dict(model)) + prefix = f"{self.get_type_str(type(model))}(" + suffix = ")" + + if len(under_hood_dict_lines) == 1: + return [f"{prefix}{under_hood_dict_lines[0]}{suffix}"] + + return [ + f"{prefix}{under_hood_dict_lines[0]}", + *under_hood_dict_lines[1:-1], + f"{under_hood_dict_lines[-1]}{suffix}", + ] + + def _get_lines_for_attrs_object(self, model: Any) -> list[str]: + the_type = type(model) + lines: list[str] = [f"{self.get_type_str(the_type)}("] + for field_name, _field in attr.fields_dict(the_type).items(): + nested_lines = self._get_lines_internal(getattr(model, field_name)) + assert len(nested_lines) > 0 + + first_line = f"{_INDENT}{field_name}={nested_lines[0]}" + + if len(nested_lines) == 1: + lines.append(first_line + ",") + elif len(nested_lines) == 2: + lines.append(first_line) + lines.append(f"{_INDENT}{nested_lines[1]},") + else: + lines.append(first_line) + lines.extend(f"{_INDENT}{the_nested_line}" for the_nested_line in nested_lines[1:-1]) + lines.append(f"{_INDENT}{nested_lines[-1]},") + lines.append(")") + return lines + + def _get_lines_for_simple_collection( + self, + model: Collection, + start: str, + end: str, + trailing_comma_required: bool = False, + empty_override: Optional[str] = None, + inline_single_element: bool = True, + ) -> list[str]: + if len(model) == 0: + if empty_override is not None: + return [empty_override] + + return [f"{start}{end}"] + + first_item_lines = self._get_lines_internal(next(iter(model))) + + if len(model) == 1 and len(first_item_lines) == 1 and inline_single_element: + return [f"{start}{first_item_lines[0]}{', ' if trailing_comma_required else ''}{end}"] + + all_lines = [start] + for seq_item in model: + seq_item_lines = self._get_lines_internal(seq_item) + all_lines.extend([f"{_INDENT}{the_seq_item_line}" for the_seq_item_line in seq_item_lines[:-1]]) + all_lines.append(f"{_INDENT}{seq_item_lines[-1]},") + + all_lines.append(end) + return all_lines + + def get_lines(self, model: Any) -> list[str]: + return self._get_lines_internal(model) diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/py.typed b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/__init__.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/mappings.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/mappings.py new file mode 100644 index 000000000..d5b36fff8 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/mappings.py @@ -0,0 +1,67 @@ +from collections.abc import ( + Hashable, + Iterator, + Mapping, + Sequence, +) +from typing import ( + Tuple, + TypeVar, + Union, +) + + +class FrozenMappingStrToStrOrStrSeq(Mapping[str, Union[str, Sequence[str]]], Hashable): + _dict: dict[str, Union[str, Sequence[str]]] + + @staticmethod + def ensure_tuple_of_str(seq: Sequence[str]) -> Tuple[str, ...]: + for idx, item in enumerate(seq): + assert isinstance(item, str), f"Item {idx=} is not a string" + return tuple(seq) + + def __init__(self, mapping: Mapping[str, Union[str, Sequence[str]]]) -> None: + self._dict = {k: v if isinstance(v, str) else self.ensure_tuple_of_str(v) for k, v in mapping.items()} + + def __getitem__(self, k: str) -> Union[str, Sequence[str]]: + return self._dict.__getitem__(k) + + def __len__(self) -> int: + return len(self._dict) + + def __iter__(self) -> Iterator[str]: + return iter(self._dict) + + def __hash__(self) -> int: + return hash(tuple(sorted(self.items()))) + + def __repr__(self) -> str: + return f"FrozenMappingStrToStrOrStrSeq({repr(self._dict)})" + + +_FM_VAL_T = TypeVar("_FM_VAL_T") + + +class FrozenStrMapping(Mapping[str, _FM_VAL_T], Hashable): + _dict: dict[str, _FM_VAL_T] + + def __init__(self, mapping: Mapping[str, _FM_VAL_T]) -> None: + for k, _ in mapping.items(): + assert isinstance(k, str), f"Got non str key for FrozenStrMapping: {k!r}" + + self._dict = dict(mapping) + + def __getitem__(self, k: str) -> _FM_VAL_T: + return self._dict.__getitem__(k) + + def __len__(self) -> int: + return len(self._dict) + + def __iter__(self) -> Iterator[str]: + return iter(self._dict) + + def __hash__(self) -> int: + return hash(tuple(sorted(self.items()))) + + def __repr__(self) -> str: + return f"FrozenStrMapping({repr(self._dict)})" diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/singleormultistring.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/singleormultistring.py new file mode 100644 index 000000000..cf298cb3c --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/structs/singleormultistring.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from typing import ( + Any, + Sequence, +) + + +class SingleOrMultiString: + _value: tuple[str, ...] + _is_single: bool + + def __init__(self, value: tuple[str, ...], is_single: bool) -> None: + if is_single: + assert len(value) == 1 + + self._value = value + self._is_single = is_single + + def __repr__(self) -> str: + return f"SingleOrMultiString({repr(self._value)}, is_single={repr(self._is_single)})" + + @property + def is_single(self) -> bool: + return self._is_single + + @property + def value(self) -> tuple[str, ...]: + return self._value + + def as_single(self) -> str: + assert self.is_single + return self.value[0] + + def as_sequence(self) -> Sequence[str]: + assert not self.is_single + return self.value + + @classmethod + def from_string(cls, s: str) -> SingleOrMultiString: + return SingleOrMultiString((s,), is_single=True) + + @classmethod + def from_sequence(cls, s_seq: Sequence[str]) -> SingleOrMultiString: + return SingleOrMultiString(tuple(s_seq), is_single=False) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SingleOrMultiString): + return self._is_single == other._is_single and self._value == self._value + return False + + def __hash__(self) -> int: + return hash((self._value, self._is_single)) diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/utils.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/utils.py new file mode 100644 index 000000000..7c7863da4 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper/utils.py @@ -0,0 +1,130 @@ +import collections +import typing +from typing import ( + Any, + List, + Literal, + Optional, + Sequence, + Type, + Union, +) + +import attr + +from dl_attrs_model_mapper.structs.mappings import FrozenStrMapping + + +Locale = Union[Literal["ru"], Literal["en"]] + + +@attr.s(frozen=True) +class MText: + ru: str = attr.ib() + en: Optional[str] = attr.ib(default=None) + + def at_locale(self, locale: Locale) -> Optional[str]: + if locale == "ru": + return self.ru + if locale == "en": + return self.en + raise ValueError(f"Unknown locale {locale!r}") + + +class CommonMAFieldKWArgs(typing.TypedDict): + required: bool + allow_none: bool + attribute: Optional[str] + load_only: bool + + +@attr.s(frozen=True, auto_attribs=True) +class CommonAttributeProps: + required: bool + allow_none: bool + attribute_name: Optional[str] + load_only: bool + description: Optional[MText] + + def to_common_ma_field_kwargs(self) -> CommonMAFieldKWArgs: + return dict( + required=self.required, + allow_none=self.allow_none, + attribute=self.attribute_name, + load_only=self.load_only, + ) + + +def is_sequence(container_type: Any) -> bool: + return container_type in (List, Sequence, collections.abc.Sequence, list) + + +def is_str_mapping(container_type: Any) -> bool: + return container_type is FrozenStrMapping + + +def unwrap_typing_container_with_single_type(the_type: Any) -> tuple[Any, Type]: + assert the_type is not None + + origin = typing.get_origin(the_type) + + effective_origin: Any + nested_types: set[Type] + + if origin == Union: + nested_types = set(typing.get_args(the_type)) + + if type(None) in nested_types: + nested_types.remove(type(None)) + effective_origin = Optional + else: + raise ValueError("Unions are not supported") + + elif is_sequence(origin) or is_str_mapping(origin): + nested_types = set(typing.get_args(the_type)) + effective_origin = origin + + else: + nested_types = {the_type} + effective_origin = None + + if len(nested_types) != 1: + raise ValueError("Multiple value in container types is not supported") + + return effective_origin, next(iter(nested_types)) + + +def unwrap_container_stack_with_single_type(the_type: Any) -> tuple[Sequence[Any], Type]: + container_stack: list[Any] = [] + + next_type: Any = the_type + + while True: + container_type, effective_type = unwrap_typing_container_with_single_type(next_type) + if container_type is None: + return tuple(container_stack), effective_type + + container_stack.append(container_type) + next_type = effective_type + + +def ensure_tuple(col: Optional[Sequence]) -> Optional[tuple]: + if col is None: + return None + if isinstance(col, tuple): + return col + if isinstance(col, list): + return tuple(col) + else: + raise TypeError() + + +def ensure_tuple_of_tuples(col: Optional[Sequence[Sequence]]) -> Optional[tuple[Optional[tuple], ...]]: + if col is None: + return None + if isinstance(col, tuple) and all(isinstance(sub_col, tuple) for sub_col in col): + return col + if isinstance(col, (list, tuple)): + return tuple(sub_col if isinstance(sub_col, tuple) else tuple(sub_col) for sub_col in col) + else: + raise TypeError() diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/__init__.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/__init__.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/conftest.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/conftest.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_attrs_field_processor.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_attrs_field_processor.py new file mode 100644 index 000000000..4ffd14205 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_attrs_field_processor.py @@ -0,0 +1,163 @@ +import enum +from typing import ( + Any, + Optional, + Sequence, +) + +import attr + +from dl_attrs_model_mapper.base import AttribDescriptor +from dl_attrs_model_mapper.field_processor import ( + FieldMeta, + Processor, +) +from dl_attrs_model_mapper.utils import ( + ensure_tuple, + ensure_tuple_of_tuples, + unwrap_container_stack_with_single_type, +) + + +class ModelTags(enum.Enum): + the_x = enum.auto() + + +@attr.s(frozen=True, kw_only=True) +class Point: + name: Optional[str] = attr.ib(default=None) + x: int = attr.ib(metadata=AttribDescriptor(tags=frozenset({ModelTags.the_x})).to_meta()) + y: int = attr.ib() + + +@attr.s(frozen=True, kw_only=True) +class Polygon: + name: Optional[str] = attr.ib(default=None) + points: Sequence[Point] = attr.ib(converter=ensure_tuple) + + +@attr.s(frozen=True) +class BigModel: + name: str = attr.ib() + + main_polygon: Polygon = attr.ib() + optional_point: Optional[Point] = attr.ib() + + list_of_lists_of_point: Sequence[Sequence[Point]] = attr.ib(converter=ensure_tuple_of_tuples) + list_of_polygons: Sequence[Polygon] = attr.ib(converter=ensure_tuple) + + +INITIAL_INSTANCE = BigModel( + name="Ololo", + main_polygon=Polygon(name="always has been", points=[Point(x=2, y=2, name="Edge of the Earth"), Point(x=3, y=3)]), + optional_point=None, + list_of_lists_of_point=[ + [Point(x=1, y=1)], + [Point(x=1, y=1), Point(x=2, y=2), Point(x=3, y=3)], + ], + list_of_polygons=[], +) + + +def test_unwrap_container_stack_with_single_type(): + assert unwrap_container_stack_with_single_type(list[Optional[str]]) == ((list, Optional), str) + assert unwrap_container_stack_with_single_type(Polygon) == ((), Polygon) + assert unwrap_container_stack_with_single_type(list[list[Point]]) == ((list, list), Point) + + +def test_field_meta_pop_container(): + container_stack, _ = unwrap_container_stack_with_single_type(list[Point]) + fm = FieldMeta(Point, "foobar", container_stack, None) + + container, sub_fm = fm.pop_container() + assert container == list + assert sub_fm == FieldMeta(Point, "foobar", (), None) + + sub_container, sub_sub_fm = sub_fm.pop_container() + assert sub_container is None + assert sub_sub_fm == sub_fm + + +def test_no_changes(): + class EqualityProcessor(Processor[Any]): + def _should_process(self, meta: FieldMeta) -> bool: + return True + + def _process_single_object(self, obj: Any, meta: FieldMeta) -> Optional[Any]: + return obj + + processor = EqualityProcessor() + result = processor.process(INITIAL_INSTANCE) + + assert result is INITIAL_INSTANCE + + +def test_change_all_names(): + def process_string(i: Optional[str]) -> str: + if i is None: + return "N/A" + return f"The great {i}" + + class NamePrependProcessor(Processor[str]): + def _should_process(self, meta: FieldMeta) -> bool: + return meta.clz == str and meta.attrib_name == "name" + + def _process_single_object(self, obj: Optional[str], meta: FieldMeta) -> Optional[str]: + return process_string(obj) + + processor = NamePrependProcessor() + result = processor.process(INITIAL_INSTANCE) + + def rename_point(p: Optional[Point]) -> Optional[Point]: + if p is None: + return None + return attr.evolve(p, name=process_string(p.name)) + + def rename_polygon(poly: Optional[Polygon]) -> Optional[Polygon]: + if poly is None: + return None + return attr.evolve(poly, name=process_string(poly.name), points=[rename_point(p) for p in poly.points]) + + assert result == attr.evolve( + INITIAL_INSTANCE, + name=process_string(INITIAL_INSTANCE.name), + main_polygon=rename_polygon(INITIAL_INSTANCE.main_polygon), + optional_point=rename_point(INITIAL_INSTANCE.optional_point), + list_of_lists_of_point=[[rename_point(p) for p in lp] for lp in INITIAL_INSTANCE.list_of_lists_of_point], + list_of_polygons=[rename_polygon(poly) for poly in INITIAL_INSTANCE.list_of_polygons], + ) + + +def test_change_by_tag(): + new_x_value = 100500 + + class TagProcessor(Processor[str]): + def _should_process(self, meta: FieldMeta) -> bool: + if meta.attrib_descriptor is not None: + return ModelTags.the_x in meta.attrib_descriptor.tags + + return False + + def _process_single_object(self, obj: Optional[int], meta: FieldMeta) -> Optional[int]: + return new_x_value + + processor = TagProcessor() + result = processor.process(INITIAL_INSTANCE) + + def process_point(p: Optional[Point]) -> Optional[Point]: + if p is None: + return None + return attr.evolve(p, x=100500) + + def process_polygon(poly: Optional[Polygon]) -> Optional[Polygon]: + if poly is None: + return None + return attr.evolve(poly, points=[process_point(p) for p in poly.points]) + + assert result == attr.evolve( + INITIAL_INSTANCE, + main_polygon=process_polygon(INITIAL_INSTANCE.main_polygon), + optional_point=process_point(INITIAL_INSTANCE.optional_point), + list_of_lists_of_point=[[process_point(p) for p in lp] for lp in INITIAL_INSTANCE.list_of_lists_of_point], + list_of_polygons=[process_polygon(poly) for poly in INITIAL_INSTANCE.list_of_polygons], + ) diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_attrs_model_mapper.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_attrs_model_mapper.py new file mode 100644 index 000000000..7c2b69534 --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_attrs_model_mapper.py @@ -0,0 +1,515 @@ +import abc +from copy import deepcopy +import enum +from typing import ( + ClassVar, + List, + Optional, + Type, +) + +import attr +import marshmallow +from marshmallow import fields +import pytest + +from dl_attrs_model_mapper.base import ( + AttribDescriptor, + ModelDescriptor, +) +from dl_attrs_model_mapper.marshmallow import ModelMapperMarshmallow +from dl_attrs_model_mapper.marshmallow_fields import FrozenStrMappingField +from dl_attrs_model_mapper.structs.mappings import ( + FrozenMappingStrToStrOrStrSeq, + FrozenStrMapping, +) + + +class BioKind(enum.Enum): + cat = "cat" + dog = "dog" + salmon = "salmon" + + +class MeowKind(enum.Enum): + cute = enum.auto() + aggressive = enum.auto() + + +@ModelDescriptor( + is_abstract=True, + children_type_discriminator_attr_name="kind", + children_type_discriminator_aliases_attr_name="kind_aliases", +) +@attr.s +class Bio(metaclass=abc.ABCMeta): + kind: ClassVar[BioKind] + + max_weight: int = attr.ib() + + +@ModelDescriptor(is_abstract=True) +@attr.s +class Animal(Bio, metaclass=abc.ABCMeta): + pass + # lungs_volume: int = attr.ib() + + +@ModelDescriptor(is_abstract=True) +@attr.s +class Fish(Bio, metaclass=abc.ABCMeta): + max_depth: int = attr.ib() + + +@ModelDescriptor() +@attr.s +class Cat(Animal): + kind = BioKind.cat + + meow_kind: MeowKind = attr.ib() + + +@ModelDescriptor() +@attr.s +class Dog(Animal): + kind = BioKind.dog + + bark_level: int = attr.ib() + + +@ModelDescriptor() +@attr.s +class Salmon(Fish): + kind = BioKind.salmon + kind_aliases = ("Losos obecný",) + + max_caviar_volume: int = attr.ib() + + +@ModelDescriptor() +@attr.s +class Flat: + aquarium: List[Fish] = attr.ib() + guard_animal: Animal = attr.ib() + owner: Bio = attr.ib() + + +def test_round_trip(): + mapper = ModelMapperMarshmallow() + mapper.register_models([Flat, Dog, Cat, Salmon]) + + schema_cls = mapper.get_or_create_schema_for_attrs_class(Flat) + flat = Flat( + aquarium=[ + Salmon(max_depth=-1, max_weight=100, max_caviar_volume=0), + ], + guard_animal=Cat( + meow_kind=MeowKind.aggressive, + max_weight=10, + ), + owner=Dog( + max_weight=90, + bark_level=100500, + ), + ) + + schema = schema_cls() + + serialized = schema.dump(flat) + restored_flat = schema.load(serialized) + assert restored_flat == flat + + +@ModelDescriptor() +@attr.s +class UnsafeFlat: + guard_animal: Optional[Animal] = attr.ib(metadata=AttribDescriptor(skip_none_on_dump=True).to_meta()) + owner: Bio = attr.ib() + + +def test_dump_with_null_fields(): + mapper = ModelMapperMarshmallow() + mapper.register_models( + [ + UnsafeFlat, + Dog, + ] + ) + schema_cls = mapper.get_or_create_schema_for_attrs_class(UnsafeFlat) + flat = UnsafeFlat( + guard_animal=None, + owner=Dog( + max_weight=90, + bark_level=100500, + ), + ) + + schema = schema_cls() + serialized = schema.dump(flat) + assert serialized == {"owner": {"bark_level": 100500, "kind": "dog", "max_weight": 90}} + + +def test_kind_alias(): + mapper = ModelMapperMarshmallow() + mapper.register_models( + [ + Flat, + Dog, + Cat, + Salmon, + ] + ) + schema_cls = mapper.get_or_create_schema_for_attrs_class(Flat) + flat = Flat( + aquarium=[ + Salmon(max_depth=-1, max_weight=100, max_caviar_volume=0), + ], + guard_animal=Cat( + meow_kind=MeowKind.aggressive, + max_weight=10, + ), + owner=Dog( + max_weight=90, + bark_level=100500, + ), + ) + schema = schema_cls() + + serialized = schema.dump(flat) + + adjusted = deepcopy(serialized) + adjusted["aquarium"][0]["type"] = "Losos obecný" + + restored_flat = schema.load(adjusted) + assert restored_flat == flat + + +def test_enum_by_value(): + class EnumByValue(enum.Enum): + x = "the-x" + y = "the-y" + + class EnumByName(enum.Enum): + a = "the-a" + b = "the-b" + + @ModelDescriptor() + @attr.s + class Target: + axis: EnumByValue = attr.ib(metadata=AttribDescriptor(enum_by_value=True).to_meta()) + ab: EnumByName = attr.ib() + + mapper = ModelMapperMarshmallow() + mapper.register_models( + [ + Target, + ] + ) + + schema_cls = mapper.get_or_create_schema_for_attrs_class(Target) + + target = Target(axis=EnumByValue.x, ab=EnumByName.b) + + serialized = schema_cls().dump(target) + + assert serialized == dict(axis=target.axis.value, ab=target.ab.name) + + restored_target = schema_cls().load(serialized) + assert restored_target == target + + +def test_nested_containers(): + @ModelDescriptor() + @attr.s(auto_attribs=True) + class Point: + x: int + y: int + + @ModelDescriptor() + @attr.s + class Target: + list_of_lists_of_ints: list[list[int]] = attr.ib() + list_of_lists_of_lists_of_points: list[list[list[Point]]] = attr.ib() + + mapper = ModelMapperMarshmallow() + mapper.register_models( + [ + Point, + Target, + ] + ) + + schema_cls = mapper.get_or_create_schema_for_attrs_class(Target) + + target = Target( + list_of_lists_of_ints=[[1, 2, 3], [6, 7, 8]], + list_of_lists_of_lists_of_points=[[[Point(1, 1)], [Point(2, 2)]], [[Point(3, 3), Point(4, 4)]]], + ) + + serialized = schema_cls().dump(target) + + assert serialized == dict( + list_of_lists_of_ints=target.list_of_lists_of_ints, + list_of_lists_of_lists_of_points=[ + [[attr.asdict(point) for point in l2] for l2 in l1] for l1 in target.list_of_lists_of_lists_of_points + ], + ) + + restored_target = schema_cls().load(serialized) + assert restored_target == target + + +def test_FrozenMappingStrToStrOrStrSeqField(): + @ModelDescriptor() + @attr.s(auto_attribs=True) + class Container: + m: FrozenMappingStrToStrOrStrSeq + + mapper = ModelMapperMarshmallow() + mapper.register_models([Container]) + + mapping_data = { + "de_havilland": ["Trident", "Comet"], + "boeing": "Clipper", + "antonov": "AN-124", + } + + target = Container(m=FrozenMappingStrToStrOrStrSeq(mapping_data)) + + schema_cls = mapper.get_or_create_schema_for_attrs_class(Container) + + # Serialization + serialized = schema_cls().dump(target) + assert serialized == dict( + m=mapping_data, + ) + + # Deserialization + deserialized = schema_cls().load(serialized) + assert deserialized == target + + +def test_frozen_str_mapping(): + @ModelDescriptor() + @attr.s(auto_attribs=True) + class ContainerStr: + m: FrozenStrMapping[str] + + mapper = ModelMapperMarshmallow() + mapper.register_models([ContainerStr]) + + mapping_data = { + "de_havilland": "Comet", + "boeing": "Clipper", + "antonov": "AN-124", + } + + target = ContainerStr(m=FrozenStrMapping(mapping_data)) + + schema_cls = mapper.get_or_create_schema_for_attrs_class(ContainerStr) + + # Serialization + serialized = schema_cls().dump(target) + assert serialized == dict( + m=mapping_data, + ) + + # Deserialization + deserialized = schema_cls().load(serialized) + assert deserialized == target + + # Deserialization errors + with pytest.raises(marshmallow.ValidationError) as exc_pack: + schema_cls().load(dict(m={"de_havilland": None})) + + assert exc_pack.value.messages == { + "m": {"de_havilland": {"value": ["Field may not be null."]}}, + } + + +def test_serialization_key(): + @ModelDescriptor() + @attr.s(auto_attribs=True) + class Point: + x: int = attr.ib(metadata=AttribDescriptor(serialization_key="the_x").to_meta()) + y: int + + @ModelDescriptor() + @attr.s + class Target: + list_of_points: list[Point] = attr.ib(metadata=AttribDescriptor(serialization_key="lop").to_meta()) + list_of_ints: list[int] = attr.ib(metadata=AttribDescriptor(serialization_key="loint").to_meta()) + some_long_attr_name: int = attr.ib(metadata=AttribDescriptor(serialization_key="slan").to_meta()) + no_serialization_key: str = attr.ib() + point: Point = attr.ib(metadata=AttribDescriptor(serialization_key="p").to_meta()) + + mapper = ModelMapperMarshmallow() + mapper.register_models([Target]) + schema_cls = mapper.get_or_create_schema_for_attrs_class(Target) + + target = Target( + list_of_points=[Point(x=0, y=0)], + list_of_ints=[1, 2, 3], + some_long_attr_name=1984, + no_serialization_key="no_serialization_key_value", + point=Point(x=-1, y=-1), + ) + + serialized = schema_cls().dump(target) + + assert serialized == dict( + lop=[dict(the_x=0, y=0)], + loint=[1, 2, 3], + slan=1984, + no_serialization_key="no_serialization_key_value", + p=dict(the_x=-1, y=-1), + ) + + restored = schema_cls().load(serialized) + + assert restored == target + + +@attr.s(frozen=True) +class MAFieldProjection: + MA_TYPE: ClassVar[Type[fields.Field]] + MAP_MA_FIELD_CLS_PROJECTION_CLS: ClassVar[dict[Type[fields.Field], Type["MAFieldProjection"]]] = {} + + allow_none: bool = attr.ib() + attribute: str = attr.ib() + required: bool = attr.ib() + + def __init_subclass__(cls, **kwargs): + cls.MAP_MA_FIELD_CLS_PROJECTION_CLS[cls.MA_TYPE] = cls + + @classmethod + def get_default_kwargs(cls, ma_field: fields.Field) -> dict: + return dict( + allow_none=ma_field.allow_none, + attribute=ma_field.attribute, + required=ma_field.required, + ) + + @classmethod + def project(cls, ma_field: fields.Field) -> "MAFieldProjection": + return cls(**cls.get_default_kwargs(ma_field)) + + @classmethod + def project_generic(cls, ma_field: fields.Field) -> "MAFieldProjection": + return cls.MAP_MA_FIELD_CLS_PROJECTION_CLS[type(ma_field)].project(ma_field) + + +@attr.s(frozen=True) +class MAIntProjection(MAFieldProjection): + MA_TYPE = fields.Integer + + +@attr.s() +class MAStringProjection(MAFieldProjection): + MA_TYPE = fields.String + + +@attr.s() +class MABoolProjection(MAFieldProjection): + MA_TYPE = fields.Boolean + + +@attr.s() +class MAListFieldProjection(MAFieldProjection): + MA_TYPE = fields.List + + item: MAFieldProjection = attr.ib() + + @classmethod + def project(cls, ma_field: fields.List) -> "MAFieldProjection": + return cls(item=cls.project_generic(ma_field.inner), **cls.get_default_kwargs(ma_field)) + + +@attr.s() +class MAFrozenMappingProjection(MAFieldProjection): + MA_TYPE = FrozenStrMappingField + + key_field: MAFieldProjection = attr.ib() + value_field: MAFieldProjection = attr.ib() + + @classmethod + def project(cls, ma_field: FrozenStrMappingField) -> "MAFieldProjection": + return cls( + key_field=cls.project_generic(ma_field.key_field), + value_field=cls.project_generic(ma_field.value_field), + **cls.get_default_kwargs(ma_field), + ) + + +def project_schema(schema: marshmallow.Schema) -> dict[str, MAFieldProjection]: + return {name: MAFieldProjection.project_generic(field) for name, field in schema.fields.items()} + + +@ModelDescriptor() +@attr.s(auto_attribs=True, kw_only=True) +class Target1: + class ExpectedSchema(marshmallow.Schema): + a = fields.List(fields.Integer(), attribute="a", required=True) + optional_str = fields.String(attribute="optional_str", allow_none=True, required=True) + defaulted_optional_str = fields.String(attribute="defaulted_optional_str", allow_none=True, required=False) + strict_bool = fields.Boolean(attribute="strict_bool", required=True) + list_of_lists_of_str = fields.List( + fields.List(fields.String()), attribute="list_of_lists_of_str", required=True + ) + optional_list_of_str = fields.List( + fields.String(), attribute="optional_list_of_str", allow_none=True, required=True + ) + list_of_optional_str = fields.List( + fields.String(allow_none=True), attribute="list_of_optional_str", required=True + ) + defaulted_list = fields.List(fields.String(allow_none=True), attribute="defaulted_list", required=False) + + a: List[int] + optional_str: Optional[str] + defaulted_optional_str: Optional[str] = attr.ib(default=None) + strict_bool: bool + list_of_lists_of_str: List[List[str]] + optional_list_of_str: Optional[List[str]] + list_of_optional_str: List[Optional[str]] + defaulted_list: List[Optional[str]] = attr.ib(factory=lambda: [None]) + + +@ModelDescriptor() +@attr.s(auto_attribs=True, kw_only=True) +class TargetVariousMappings: + class ExpectedSchema(marshmallow.Schema): + map_str_str = FrozenStrMappingField( + keys=fields.String(), + values=fields.String(allow_none=False), + attribute="map_str_str", + required=True, + ) + map_str_optional_str = FrozenStrMappingField( + keys=fields.String(), + values=fields.String(allow_none=True), + attribute="map_str_optional_str", + required=True, + ) + + map_str_str: FrozenStrMapping[str] + map_str_optional_str: FrozenStrMapping[Optional[str]] + + +@pytest.mark.parametrize( + "main_cls,extra_cls_list", + [ + [Target1, []], + [TargetVariousMappings, []], + ], +) +def test_schema_generation(main_cls: Type, extra_cls_list: list[Type]): + mapper = ModelMapperMarshmallow() + mapper.register_models(extra_cls_list) + mapper.register_models([main_cls]) + + generated_schema_cls = mapper.get_or_create_schema_for_attrs_class(main_cls) + + generated_schema = generated_schema_cls() + expected_schema = main_cls.ExpectedSchema() + + assert project_schema(generated_schema) == project_schema(expected_schema) diff --git a/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_struct.py b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_struct.py new file mode 100644 index 000000000..b2b7b986c --- /dev/null +++ b/lib/dl_attrs_model_mapper/dl_attrs_model_mapper_tests/unit/test_struct.py @@ -0,0 +1,36 @@ +from dl_attrs_model_mapper.structs.mappings import FrozenMappingStrToStrOrStrSeq + + +def test_FrozenMappingStrToStrOrStrSeq(): + mapping_data = { + "de_havilland": ( + "comet", + "trident", + ), + "boeing": "Clipper", + "antonov": ("AN-124",), + } + + container = FrozenMappingStrToStrOrStrSeq(mapping_data) + + # Keys + assert container.keys() == mapping_data.keys() + # Dict conversion + assert dict(container) == mapping_data + # Items + assert container.items() == mapping_data.items() + # [] + assert {k: container[k] for k in mapping_data.keys()} == mapping_data + # Get + assert {k: container.get(k) for k in mapping_data.keys()} == mapping_data + + # Len + assert len(container) == len(mapping_data) + + # Equality + assert FrozenMappingStrToStrOrStrSeq(mapping_data) == FrozenMappingStrToStrOrStrSeq(mapping_data) + assert FrozenMappingStrToStrOrStrSeq(mapping_data) != FrozenMappingStrToStrOrStrSeq({"a": "1"}) + + # Hash + assert hash(FrozenMappingStrToStrOrStrSeq(mapping_data)) == hash(FrozenMappingStrToStrOrStrSeq(mapping_data)) + assert hash(FrozenMappingStrToStrOrStrSeq(mapping_data)) != hash(FrozenMappingStrToStrOrStrSeq({"a": "1"})) diff --git a/lib/dl_attrs_model_mapper/pyproject.toml b/lib/dl_attrs_model_mapper/pyproject.toml new file mode 100644 index 000000000..4d8a2c7ae --- /dev/null +++ b/lib/dl_attrs_model_mapper/pyproject.toml @@ -0,0 +1,38 @@ + +[tool.poetry] +name = "datalens-attrs-model-mapper" +version = "0.0.1" +description = "" +authors = ["DataLens Team "] +packages = [{include = "dl_attrs_model_mapper"}] +license = "Apache 2.0" +readme = "README.md" + + +[tool.poetry.dependencies] +attrs = ">=22.2.0" +marshmallow = ">=3.19.0" +python = ">=3.10, <3.12" +dynamic-enum = {path = "../dynamic_enum"} + +[tool.poetry.group.tests.dependencies] +pytest = ">=7.2.2" +[build-system] +build-backend = "poetry.core.masonry.api" +requires = [ + "poetry-core", +] + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = [] + +[datalens_ci] +skip_test = true + +[tool.mypy] +warn_unused_configs = true +disallow_untyped_defs = true +check_untyped_defs = true +strict_optional = true diff --git a/lib/dl_attrs_model_mapper_doc_tools/LICENSE b/lib/dl_attrs_model_mapper_doc_tools/LICENSE new file mode 100644 index 000000000..74ba5f6c7 --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 YANDEX LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/dl_attrs_model_mapper_doc_tools/README.md b/lib/dl_attrs_model_mapper_doc_tools/README.md new file mode 100644 index 000000000..92f86c5ca --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/README.md @@ -0,0 +1 @@ +# dl_attrs_model_mapper_doc_tools diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/__init__.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/domain.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/domain.py new file mode 100644 index 000000000..833e30e70 --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/domain.py @@ -0,0 +1,31 @@ +from typing import ( + Any, + Optional, + Sequence, +) + +import attr + +from dl_attrs_model_mapper.domain import AmmRegularSchema +from dl_attrs_model_mapper.utils import MText + + +@attr.s(kw_only=True, auto_attribs=True) +class AmmOperationExample: + title: Optional[MText] = None + description: Optional[MText] = None + rq: dict[str, Any] + rs: dict[str, Any] + + +@attr.s(kw_only=True) +class AmmOperation: + description: MText = attr.ib() + + code: str = attr.ib() + discriminator_attr_name: str = attr.ib() + + amm_schema_rq: AmmRegularSchema = attr.ib() + amm_schema_rs: AmmRegularSchema = attr.ib() + + examples: Sequence[AmmOperationExample] = attr.ib() diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/main.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/main.py new file mode 100644 index 000000000..9d292df49 --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/main.py @@ -0,0 +1,294 @@ +import os +from typing import ( + Optional, + Sequence, + Type, +) + +import attr +import yaml + +from dl_attrs_model_mapper.domain import ( + AmmEnumField, + AmmField, + AmmGenericSchema, + AmmListField, + AmmNestedField, + AmmRegularSchema, + AmmScalarField, +) +from dl_attrs_model_mapper.utils import ( + Locale, + MText, +) +from dl_attrs_model_mapper_doc_tools.domain import ( + AmmOperation, + AmmOperationExample, +) +from dl_attrs_model_mapper_doc_tools.render_units import ( + ClassDoc, + DocHeader, + DocSection, + DocText, + DocUnit, + FieldLine, + GenericClassDoc, + OperationDoc, + OperationExampleDoc, + RegularClassDoc, + RenderContext, +) +from dl_attrs_model_mapper_doc_tools.writer_utils import DocWriter + + +@attr.s() +class Docs: + _operation_per_file: bool = attr.ib() + _elements_dir_name: Optional[str] = attr.ib() + _generate_toc: bool = attr.ib(default=False) + + _map_type_file_name: dict[Type, str] = attr.ib(factory=dict) + _dedicated_class_docs: list[ClassDoc] = attr.ib(factory=list) + _operations: list[tuple[AmmOperation, OperationDoc]] = attr.ib(factory=list) + + def field_to_doc_lines(self, field: AmmField, path: Sequence[str]) -> Sequence[FieldLine]: + cp = field.common_props + + if isinstance(field, AmmScalarField): + scalar_type = field.scalar_type + type_text: str + + if isinstance(field, AmmEnumField): + options = " / ".join([f"`{val}`" for val in field.values]) + type_text = f"enum/{scalar_type.__name__}[{options}]" + else: + type_text = scalar_type.__name__ + + return [ + ( + FieldLine( + path, + type_text=type_text, + nullable=cp.allow_none, + required=cp.required, + description=field.common_props.description, + ) + ) + ] + + elif isinstance(field, AmmNestedField): + nested_schema = field.item + nested_schema_doc_file_path = self.get_file_path_for_type(nested_schema.clz) + + main_line = FieldLine( + path, + type_text=nested_schema.clz.__name__, + type_ref=nested_schema_doc_file_path, + nullable=cp.allow_none, + required=cp.required, + description=field.common_props.description, + ) + if nested_schema_doc_file_path is not None: + return [main_line] + + assert isinstance( + nested_schema, AmmRegularSchema + ), f"Attempt to generate inline field docs for non-regular schema: {nested_schema}" + return [ + main_line, + *self.field_dict_to_doc_lines(nested_schema.fields, path), + ] + + elif isinstance(field, AmmListField): + next_path = [*path[:-1], path[-1] + "[]"] + return [ + FieldLine( + path, + type_text="list", + nullable=cp.allow_none, + required=cp.required, + description=field.common_props.description, + ), + *self.field_to_doc_lines(field.item, next_path), + ] + + raise AssertionError(f"Unexpected type of field: {type(field)}") + + def field_dict_to_doc_lines( + self, + field_dict: dict[str, AmmField], + path: Sequence[str], + ) -> Sequence[FieldLine]: + ret: list[FieldLine] = [] + + for field_name, field in field_dict.items(): + ret.extend(self.field_to_doc_lines(field, [*path, field_name])) + + return ret + + def regular_schema_to_object_doc( + self, + schema: AmmRegularSchema, + generate_header: bool = True, + discriminator_f_name: Optional[str] = None, + discriminator_f_val: Optional[str] = None, + ) -> RegularClassDoc: + return RegularClassDoc( + header=DocHeader(schema.clz.__name__) if generate_header else None, + type=schema.clz, + fields=self.field_dict_to_doc_lines(schema.fields, path=[]), + description=schema.description, + discriminator_field_name=discriminator_f_name, + discriminator_field_value=discriminator_f_val, + ) + + def generic_schema_to_object_doc( + self, + schema: AmmGenericSchema, + # TODO FIX: Make optional when description will be ready in AmmSchema + description_override: Optional[MText], + ) -> GenericClassDoc: + return GenericClassDoc( + header=DocHeader(schema.clz.__name__), + description=description_override, + discriminator_field_name=schema.discriminator_property_name, + map_discriminator_object_doc={ + d: self.regular_schema_to_object_doc( + reg_schema, + discriminator_f_name=schema.discriminator_property_name, + discriminator_f_val=d, + ) + for d, reg_schema in schema.mapping.items() + }, + type=schema.clz, + ) + + def example_to_doc(self, example: AmmOperationExample, idx: int) -> OperationExampleDoc: + return OperationExampleDoc( + title=example.title or str(idx), + description=example.description, + rq=example.rq, + rs=example.rs, + ) + + def operation_to_doc(self, op: AmmOperation) -> OperationDoc: + return OperationDoc( + header=DocHeader(op.code), + description=op.description, + request=self.regular_schema_to_object_doc(op.amm_schema_rq, generate_header=False), + response=self.regular_schema_to_object_doc(op.amm_schema_rs, generate_header=False), + examples=[self.example_to_doc(s, idx + 1) for idx, s in enumerate(op.examples)], + ) + + def get_file_path_for_type(self, clz: Type) -> Optional[str]: + return self._map_type_file_name.get(clz) + + def get_file_path_for_type_strict(self, clz: Type) -> str: + may_be_path = self.get_file_path_for_type(clz) + assert may_be_path is not None, f"File path fot type {clz} is not registered." + return may_be_path + + def register_file_path_for_type(self, clz: Type, file_path: str) -> None: + self._map_type_file_name[clz] = file_path + + def default_file_name_for_type(self, clz: Type) -> str: + file_name = f"{clz.__name__}.md" + if self._elements_dir_name: + return os.path.join(self._elements_dir_name, file_name) + return file_name + + def register_generic_schema( + self, + generic_schema: AmmGenericSchema, + description_override: Optional[MText], + file_path: Optional[str] = None, + ) -> None: + self.register_file_path_for_type( + generic_schema.clz, + file_path or self.default_file_name_for_type(generic_schema.clz), + ) + self._dedicated_class_docs.append( + self.generic_schema_to_object_doc(generic_schema, description_override), + ) + + def register_regular_schema_as_ref( + self, + regular_schema: AmmRegularSchema, + file_path: Optional[str] = None, + ) -> None: + self.register_file_path_for_type( + regular_schema.clz, + file_path or self.default_file_name_for_type(regular_schema.clz), + ) + doc = self.regular_schema_to_object_doc(regular_schema) + self._dedicated_class_docs.append(doc) + + def register_operations(self, operations: Sequence[AmmOperation]) -> None: + self._operations.extend([(op, self.operation_to_doc(op)) for op in operations]) + + def render(self, dir_path: str, locale: Locale) -> None: + initial_ctx = RenderContext( + headers_level=0, + current_file="", + locale=locale, + ) + doc_writer = DocWriter(initial_ctx, base_dir=dir_path) + + for class_doc in self._dedicated_class_docs: + doc_writer.write( + class_doc, + self.get_file_path_for_type_strict(class_doc.type), + append_nl=True, + ) + + if self._operations: + if self._operation_per_file: + root_doc_content: list[DocUnit] = [] + + for operation, op_doc in self._operations: + rel_file_path = os.path.join("operations", f"{operation.code}.md") + + root_doc_content.append( + DocSection( + header=DocHeader(f"[{operation.code}]({rel_file_path})"), + content=[DocText(operation.description), DocText([])], + ) + ) + doc_writer.write(op_doc, rel_file_path) + + doc_writer.write( + DocSection( + header=DocHeader(MText(ru="Список операций", en="Operations list")), + content=root_doc_content, + ), + "all.md", + ) + + else: + doc_writer.write( + DocSection( + header=DocHeader(MText(ru="Описание операций", en="Operations description")), + content=[op[1] for op in self._operations], + ), + "README.md", + ) + else: + if self._generate_toc: + doc_writer.write_text( + yaml.safe_dump( + dict( + title="Configs", + items=[ + dict( + # TODO FIX: Extract TOC title somehow more accurate + name=initial_ctx.localize(class_doc.header.value), # type: ignore + href=self.get_file_path_for_type_strict(class_doc.type), + ) + for class_doc in self._dedicated_class_docs + ], + ), + default_flow_style=False, + sort_keys=False, + ), + "toc-i.yaml", + ) diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/md_link_extractor.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/md_link_extractor.py new file mode 100644 index 000000000..f361f70e5 --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/md_link_extractor.py @@ -0,0 +1,47 @@ +import re +from typing import ( + Callable, + Optional, + Sequence, +) + +from dl_attrs_model_mapper_doc_tools.render_units import DocLink + + +INLINE_LINK_RE = re.compile(r"\[([^]]+)]\(([^)]+)\)") + + +def process_links( + txt: str, + link_processor: Optional[Callable[[DocLink], Optional[DocLink]]] = None, +) -> Sequence[str | DocLink]: + """ + Tokenize `txt` into 2 types of tokens: plain text & links. + Links can be processed by `link_processor`. + If `link_processor` returns `None` + - link will be treated as plain text and will not be extracted as dedicated token. + If `link_processor` is `None` all links will be extracted as-is. + """ + ret: list[str | DocLink] = [] + effective_link_processor: Callable[[DocLink], Optional[DocLink]] = ( + link_processor if link_processor is not None else lambda x: x + ) + pos = 0 + + for matcher in INLINE_LINK_RE.finditer(txt): + span_start = matcher.span()[0] + span_end = matcher.span()[1] + + doc_link = DocLink(text=matcher.group(1), href=matcher.group(2)) + processed_link = effective_link_processor(doc_link) + + if processed_link is not None: + if pos != span_start: + ret.append(txt[pos:span_start]) + ret.append(processed_link) + pos = span_end + + if pos < len(txt): + ret.append(txt[pos : len(txt)]) + + return ret diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/operations_builder.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/operations_builder.py new file mode 100644 index 000000000..d80739d98 --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/operations_builder.py @@ -0,0 +1,111 @@ +import enum +from typing import ( + Any, + Generic, + Optional, + Sequence, + Type, + TypeVar, +) + +import attr + +from dl_attrs_model_mapper.base import ModelDescriptor +from dl_attrs_model_mapper.domain import AmmRegularSchema +from dl_attrs_model_mapper.marshmallow import ModelMapperMarshmallow +from dl_attrs_model_mapper.marshmallow_base_schemas import BaseOneOfSchema +from dl_attrs_model_mapper.utils import MText +from dl_attrs_model_mapper_doc_tools.domain import ( + AmmOperation, + AmmOperationExample, +) + + +_OPERATION_KIND_ENUM_TV = TypeVar("_OPERATION_KIND_ENUM_TV", bound=enum.Enum) + + +@attr.s(auto_attribs=True, kw_only=True) +class OperationExample: + title: Optional[MText] = None + description: Optional[MText] = None + rq: Any + rs: Any + + +@attr.s() +class UserOperationInfo(Generic[_OPERATION_KIND_ENUM_TV]): + kind: _OPERATION_KIND_ENUM_TV = attr.ib() + description: MText = attr.ib() + example_list: list[OperationExample] = attr.ib(factory=list) + + @classmethod + def validate_example_kind(cls, data_name: str, expected_kind: Any, data: Any) -> None: + kind = getattr(data, "kind", None) + + if kind != expected_kind: + raise AssertionError(f"Got unexpected kind for example {data_name}: {kind=}") + + def __attrs_post_init__(self) -> None: + for idx, example in enumerate(self.example_list): + self.validate_example_kind(f"{self.kind.name}/{idx=}/RQ", expected_kind=self.kind, data=example.rq) + self.validate_example_kind(f"{self.kind.name}/{idx=}/RS", expected_kind=self.kind, data=example.rs) + + +@attr.s() +class AmmOperationsBuilder(Generic[_OPERATION_KIND_ENUM_TV]): + operation_kind_enum: Type[_OPERATION_KIND_ENUM_TV] = attr.ib() + user_op_info_list: Sequence[UserOperationInfo[_OPERATION_KIND_ENUM_TV]] = attr.ib() + rq_base_type: Type = attr.ib() + rs_base_type: Type = attr.ib() + model_mapper: ModelMapperMarshmallow = attr.ib() + + def _resolve_regular_schema(self, generic_model_type: Type, discriminator_value: str) -> AmmRegularSchema: + schema_registry = self.model_mapper.get_amm_schema_registry() + return schema_registry.get_generic_type_schema(generic_model_type).mapping[discriminator_value] + + def _get_generic_schema_for_type(self, t: Type) -> BaseOneOfSchema: + schema_cls = self.model_mapper.get_schema_for_attrs_class(t) + assert issubclass(schema_cls, BaseOneOfSchema) + return schema_cls() + + def _create_amm_operation_for_kind( + self, + user_op_info: UserOperationInfo[_OPERATION_KIND_ENUM_TV], + rq_base_model_descriptor: ModelDescriptor, + ) -> AmmOperation: + discriminator_name = rq_base_model_descriptor.children_type_discriminator_attr_name + assert discriminator_name is not None + discriminator_value = user_op_info.kind.name + + amm_examples = [ + AmmOperationExample( + title=user_example.title, + description=user_example.description, + rq=self._get_generic_schema_for_type(self.rq_base_type).dump(user_example.rq), + rs=self._get_generic_schema_for_type(self.rs_base_type).dump(user_example.rs), + ) + for user_example in user_op_info.example_list + ] + + return AmmOperation( + amm_schema_rq=self._resolve_regular_schema(self.rq_base_type, discriminator_value), + amm_schema_rs=self._resolve_regular_schema(self.rs_base_type, discriminator_value), + code=discriminator_value, + discriminator_attr_name=discriminator_name, + description=user_op_info.description, + examples=amm_examples, + ) + + def build(self) -> Sequence[AmmOperation]: + rq_base_model_descriptor = ModelDescriptor.get_for_type(self.rq_base_type) + assert ( + rq_base_model_descriptor is not None + ), f"No model descriptor defined for base request class: {self.rq_base_type}" + + return [ + self._create_amm_operation_for_kind( + user_op_info, + rq_base_model_descriptor, + ) + for user_op_info in self.user_op_info_list + ] diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/py.typed b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/render_units.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/render_units.py new file mode 100644 index 000000000..e3b9ddf71 --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/render_units.py @@ -0,0 +1,388 @@ +import abc +import os.path +from typing import ( + Any, + ClassVar, + Optional, + Sequence, + Type, + Union, +) +from urllib.parse import urlparse + +import attr +import yaml + +from dl_attrs_model_mapper.utils import ( + Locale, + MText, +) + + +@attr.s() +class RenderContext: + headers_level: int = attr.ib() + current_file: str = attr.ib() + locale: Locale = attr.ib(kw_only=True) + + def with_current_file(self, current_file: str) -> "RenderContext": + return attr.evolve(self, current_file=current_file) + + def resolve_rel_ref(self, ref: str) -> str: + ref_url = urlparse(ref) + if ref_url.scheme == "": + assert ref_url.netloc == "" and ref_url.params == "" and ref_url.query == "", f"Unsupported ref: {ref_url}" + + rel_to_root_current_dir = os.path.dirname(self.current_file) + rel_to_root_target_file_path = ref_url.path + normalized_path = os.path.relpath(rel_to_root_target_file_path, rel_to_root_current_dir) + + return ref_url._replace(path=normalized_path).geturl() + + # Not a local relative url + return ref + + def localized_m_text(self, m_text: Optional[MText]) -> Optional[str]: + if m_text is None: + return None + return m_text.at_locale(self.locale) + + def localized_m_text_strict(self, m_text: Optional[MText]) -> str: + if m_text is None: + raise ValueError("No m_text provided for strict localized text getter") + ret = m_text.at_locale(self.locale) + if ret is None: + raise ValueError(f"m_text has no locale {self.locale!r}: {m_text}") + return ret + + def localize(self, src: Union[str, MText, Sequence[Union[str, MText]]]) -> str: + if isinstance(src, str): + return src + elif isinstance(src, MText): + return self.localized_m_text_strict(src) + else: + return "".join([self.localize(sub_src) for sub_src in src]) + + +class DocUnit(metaclass=abc.ABCMeta): + @abc.abstractmethod + def render_md(self, context: RenderContext) -> Sequence[str]: + raise NotImplementedError() + + +@attr.s() +class EmptyLine(DocUnit): + def render_md(self, context: RenderContext) -> Sequence[str]: + return [""] + + +@attr.s() +class DocHeader(DocUnit): + value: Union[str, MText, Sequence[Union[str, MText]]] = attr.ib() + + def render_md(self, render_ctx: RenderContext) -> Sequence[str]: + ret = [] + if render_ctx.headers_level > 1: + ret.append("") + ret.append(f"{'#' * render_ctx.headers_level} {render_ctx.localize(self.value)}\n") + + return ret + + +@attr.s() +class DocLink(DocUnit): + text: Union[str, MText, Sequence[Union[str, MText]]] = attr.ib() + href: str = attr.ib() + + def render_as_single_str(self, render_ctx: RenderContext) -> str: + rendered_link_text = render_ctx.localize(self.text) + assert "\n" not in rendered_link_text + + return f"[{rendered_link_text}]({render_ctx.resolve_rel_ref(self.href)})" + + def render_md(self, render_ctx: RenderContext) -> Sequence[str]: + return [self.render_as_single_str(render_ctx)] + + +@attr.s() +class DocText(DocUnit): + text: Union[str, MText, DocLink, Sequence[Union[str, DocLink, MText]]] = attr.ib() + + def normalize_text_to_sequence(self) -> Sequence[Union[str, DocLink, MText]]: + txt = self.text + if isinstance( + txt, + ( + str, + MText, + DocLink, + ), + ): + return [txt] + return txt + + def render_md(self, render_ctx: RenderContext) -> Sequence[str]: + text_part_list = self.normalize_text_to_sequence() + + # Adopting links to plain text + plain_text_part_list: list[Union[str, MText]] = [ + text_part.render_as_single_str(render_ctx) if isinstance(text_part, DocLink) else text_part + for text_part in text_part_list + ] + return [render_ctx.localize(plain_text_part_list)] + + +@attr.s() +class DocTableHeader(DocUnit): + title_list: Sequence[MText] = attr.ib() + + def render_md(self, context: RenderContext) -> Sequence[str]: + localized_title_list = [context.localized_m_text_strict(m_text) for m_text in self.title_list] + + return [ + "|".join([f" {title} " for title in localized_title_list]).rstrip(" "), + "|".join(["-" * (len(title) + 2) for title in localized_title_list]), + ] + + +class CompositeDocUnit(DocUnit, metaclass=abc.ABCMeta): + add_headers_level: ClassVar[bool] = True + + @abc.abstractmethod + def get_children(self) -> Sequence[Optional[DocUnit]]: + raise NotImplementedError() + + def render_md(self, context: RenderContext) -> Sequence[str]: + child_context = ( + attr.evolve(context, headers_level=context.headers_level + 1) if self.add_headers_level else context + ) + + ret: list[str] = [] + for child in self.get_children(): + if child is not None: + ret.extend(child.render_md(child_context)) + + return ret + + +@attr.s() +class DocUnitGroup(CompositeDocUnit): + add_headers_level = False + + content: Sequence[Optional[DocUnit]] = attr.ib() + + def get_children(self) -> Sequence[Optional[DocUnit]]: + return self.content + + +@attr.s() +class DocSection(CompositeDocUnit): + header: DocHeader = attr.ib() + content: Sequence[Optional[DocUnit]] = attr.ib() + + def get_children(self) -> Sequence[Optional[DocUnit]]: + return [ + self.header, + *self.content, + ] + + +@attr.s() +class MultiLineTableRow(DocUnit): + items: Sequence[Union[str, DocUnit]] = attr.ib() + + @staticmethod + def convert_to_normalized_single_line_cells(cell_lines_list: Sequence[Sequence[str]]) -> Optional[Sequence[str]]: + ret: list[str] = [] + for cell_lines in cell_lines_list: + if len(cell_lines) > 1: + return None + elif len(cell_lines) == 1: + cell_single_line = cell_lines[0] + if "\n" in cell_single_line: + return None + ret.append(cell_single_line) + else: + ret.append("") + + return ret + + def render_md(self, context: RenderContext) -> Sequence[str]: + cells_line_list: list[Sequence[str]] = [] + + for item in self.items: + cell_content: Sequence[str] + + if isinstance(item, DocUnit): + cell_content = item.render_md(context) + else: + cell_content = [item] + cells_line_list.append(cell_content) + + may_be_single_line_cell_list = self.convert_to_normalized_single_line_cells(cells_line_list) + + # If all cells are single line - add + if may_be_single_line_cell_list is not None: + return ["|| " + " | ".join(may_be_single_line_cell_list) + " ||"] + + ret: list[str] = ["||"] + + for idx, cell_lines in enumerate(cells_line_list): + if idx != 0: + ret.append("|") + ret.extend(cell_lines) + + ret.append("||") + + return ret + + +@attr.s() +class MultiLineTable(CompositeDocUnit): + add_headers_level = False + + rows: Sequence[MultiLineTableRow] = attr.ib() + + def get_children(self) -> Sequence[Optional[DocUnit]]: + return [ + EmptyLine(), + DocText("#|"), + *self.rows, + DocText("|#"), + ] + + +@attr.s(auto_attribs=True) +class FieldLine(DocUnit): + path: Sequence[str] + + type_text: str + + nullable: bool + required: bool + + description: Optional[MText] = None + type_ref: Optional[str] = None + + def _get_type_md(self, render_ctx: RenderContext) -> str: + type_text: str + if self.type_ref is not None: + type_text = f"[{self.type_text}]({render_ctx.resolve_rel_ref(self.type_ref)})" + else: + type_text = f"**{self.type_text}**" + + return f"{type_text} {'*' if self.required else ''}" + + @classmethod + def get_table_header(self) -> DocTableHeader: + return DocTableHeader( + [ + MText(ru="Поле", en="Field"), + MText(ru="Тип", en="Type"), + MText(ru="Описание", en="Description"), + ] + ) + + def render_md(self, render_ctx: RenderContext) -> Sequence[str]: + path = ".".join(f"`{part}`" for part in self.path) + + return [ + f"{path}" + f" | {self._get_type_md(render_ctx)}" + f" | {render_ctx.localized_m_text(self.description) or ''}".rstrip(" ").replace("\n", "
") + ] + + +@attr.s() +class OperationExampleDoc(CompositeDocUnit): + title: Union[MText, str] = attr.ib() + description: Optional[MText] = attr.ib() + rq: dict[str, Any] = attr.ib() + rs: Optional[dict[str, Any]] = attr.ib() + + def _dump_dict(self, d: dict[str, Any]) -> str: + yaml_text = yaml.safe_dump(d, default_flow_style=False, sort_keys=False) + return f"```yaml\n{yaml_text}```" + + def get_children(self) -> Sequence[Optional[DocUnit]]: + rs_dict = self.rs + + return [ + DocHeader(["Example: ", self.title]), + DocText(self.description) if self.description else None, + EmptyLine(), + DocText("**Request**"), + EmptyLine(), + DocText(self._dump_dict(self.rq)), + EmptyLine(), + DocText("**Response**"), + EmptyLine(), + DocText(self._dump_dict(rs_dict)) if rs_dict is not None else None, + ] + + +@attr.s() +class ClassDoc(CompositeDocUnit, metaclass=abc.ABCMeta): + type: Type = attr.ib() + header: Optional[DocHeader] = attr.ib() + description: Optional[MText] = attr.ib() + + +@attr.s() +class RegularClassDoc(ClassDoc): + fields: Sequence[FieldLine] = attr.ib() + discriminator_field_name: Optional[str] = attr.ib(default=None) + discriminator_field_value: Optional[str] = attr.ib(default=None) + + def get_children(self) -> Sequence[Optional[DocUnit]]: + discriminator_text: Optional[DocText] = None + if self.discriminator_field_value is not None: + discriminator_text = DocText(f"`{self.discriminator_field_name}`:`{self.discriminator_field_value}`") + + description = self.description + + return [ + self.header, + *([EmptyLine(), discriminator_text, EmptyLine()] if discriminator_text else []), + *([EmptyLine(), DocText(description), EmptyLine()] if description else []), + *([FieldLine.get_table_header(), *self.fields] if self.fields else [DocText("**No parameters**")]), + ] + + +@attr.s() +class GenericClassDoc(ClassDoc): + discriminator_field_name: str = attr.ib() + map_discriminator_object_doc: dict[str, RegularClassDoc] = attr.ib() + + def get_children(self) -> Sequence[Optional[DocUnit]]: + return [ + self.header, + DocText([MText("Поле-дискриминатор: ", en="Discriminator field: "), f"`{self.discriminator_field_name}`"]), + DocText(self.description) if self.description else None, + *[obj_doc for discriminator, obj_doc in self.map_discriminator_object_doc.items()], + ] + + +@attr.s() +class OperationDoc(CompositeDocUnit): + header: DocHeader = attr.ib() + + description: MText = attr.ib() + + request: RegularClassDoc = attr.ib() + response: RegularClassDoc = attr.ib() + examples: list[OperationExampleDoc] = attr.ib() + + def get_children(self) -> Sequence[Optional[DocUnit]]: + return [ + self.header, + DocText(self.description), + DocSection(DocHeader("Request"), [self.request]), + DocSection( + DocHeader("Response"), + [ + self.response, + ], + ), + DocSection(DocHeader("Examples"), self.examples) if self.examples else None, + ] diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/writer_utils.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/writer_utils.py new file mode 100644 index 000000000..f45663b81 --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools/writer_utils.py @@ -0,0 +1,51 @@ +import contextlib +import os +from typing import ( + Iterator, + TextIO, +) + +import attr + +from dl_attrs_model_mapper_doc_tools.render_units import ( + DocUnit, + RenderContext, +) + + +@attr.s(auto_attribs=True) +class DocWriter: + render_ctx: RenderContext + base_dir: str + + _writen_paths: set[str] = attr.ib(init=False, factory=set) + + def was_path_written(self, rel_path: str) -> bool: + return rel_path in self._writen_paths + + @contextlib.contextmanager + def _file_cm(self, rel_path: str) -> Iterator[TextIO]: + if not os.path.isdir(self.base_dir): + raise ValueError(f"Base directory is not exists: {self.base_dir}") + if not os.path.isdir(self.base_dir): + raise ValueError(f"Base directory is not a directory: {self.base_dir}") + if self.was_path_written(rel_path): + raise ValueError(f"Attempt to overwrite file: {rel_path}") + + self._writen_paths.add(rel_path) + + full_path = os.path.join(self.base_dir, rel_path) + os.makedirs(os.path.dirname(full_path), exist_ok=True) + + with open(full_path, "w") as file: + yield file + + def write(self, doc_unit: DocUnit, rel_path: str, append_nl: bool = False) -> None: + with self._file_cm(rel_path) as file: + file.write("\n".join(doc_unit.render_md(self.render_ctx.with_current_file(rel_path)))) + if append_nl: + file.write("\n") + + def write_text(self, txt: str, rel_path: str) -> None: + with self._file_cm(rel_path) as file: + file.write(txt) diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/__init__.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/unit/__init__.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/unit/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/unit/conftest.py b/lib/dl_attrs_model_mapper_doc_tools/dl_attrs_model_mapper_doc_tools_tests/unit/conftest.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_attrs_model_mapper_doc_tools/pyproject.toml b/lib/dl_attrs_model_mapper_doc_tools/pyproject.toml new file mode 100644 index 000000000..78f7e706a --- /dev/null +++ b/lib/dl_attrs_model_mapper_doc_tools/pyproject.toml @@ -0,0 +1,38 @@ + +[tool.poetry] +name = "datalens-attrs-model-mapper-doc-tools" +version = "0.0.1" +description = "" +authors = ["DataLens Team "] +packages = [{include = "dl_attrs_model_mapper_doc_tools"}] +license = "Apache 2.0" +readme = "README.md" + + +[tool.poetry.dependencies] +attrs = ">=22.2.0" +pyyaml = ">=5.3.1" +python = ">=3.10, <3.12" +datalens-attrs-model-mapper = {path = "../dl_attrs_model_mapper"} + +[tool.poetry.group.tests.dependencies] +pytest = ">=7.2.2" +[build-system] +build-backend = "poetry.core.masonry.api" +requires = [ + "poetry-core", +] + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = [] + +[datalens_ci] +skip_test = true + +[tool.mypy] +warn_unused_configs = true +disallow_untyped_defs = true +check_untyped_defs = true +strict_optional = true diff --git a/metapkg/poetry.lock b/metapkg/poetry.lock index 0c3ea5375..bb874cfb1 100644 --- a/metapkg/poetry.lock +++ b/metapkg/poetry.lock @@ -1168,6 +1168,42 @@ sqlalchemy = ">=1.4.46, <2.0" type = "directory" url = "../lib/dl_app_tools" +[[package]] +name = "datalens-attrs-model-mapper" +version = "0.0.1" +description = "" +optional = false +python-versions = ">=3.10, <3.12" +files = [] +develop = false + +[package.dependencies] +attrs = ">=22.2.0" +dynamic-enum = {path = "../dynamic_enum"} +marshmallow = ">=3.19.0" + +[package.source] +type = "directory" +url = "../lib/dl_attrs_model_mapper" + +[[package]] +name = "datalens-attrs-model-mapper-doc-tools" +version = "0.0.1" +description = "" +optional = false +python-versions = ">=3.10, <3.12" +files = [] +develop = false + +[package.dependencies] +attrs = ">=22.2.0" +datalens-attrs-model-mapper = {path = "../dl_attrs_model_mapper"} +pyyaml = ">=5.3.1" + +[package.source] +type = "directory" +url = "../lib/dl_attrs_model_mapper_doc_tools" + [[package]] name = "datalens-compeng-pg" version = "0.0.1" @@ -4870,6 +4906,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -4877,8 +4914,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -4895,6 +4939,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -4902,6 +4947,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -6318,4 +6364,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10, <3.12" -content-hash = "635660602d7db2e63190e4eea32a7ef2cab1cd4d1c0b96651dd9d295a180599d" +content-hash = "08be0db497c668327c51002dc3d5bdbe0c4405cab9d833018c70f16c6c59b6fd" diff --git a/metapkg/pyproject.toml b/metapkg/pyproject.toml index 01006dbe3..45849c60c 100644 --- a/metapkg/pyproject.toml +++ b/metapkg/pyproject.toml @@ -147,6 +147,8 @@ datalens-connector-mysql = {path = "../lib/dl_connector_mysql"} datalens-sqlalchemy-mysql = {path = "../lib/dl_sqlalchemy_mysql"} datalens-maintenance = {path = "../lib/dl_maintenance"} datalens-connector-mssql = {path = "../lib/dl_connector_mssql"} +datalens-attrs-model-mapper = {path = "../lib/dl_attrs_model_mapper"} +datalens-attrs-model-mapper-doc-tools = {path = "../lib/dl_attrs_model_mapper_doc_tools"} [tool.poetry.group.dev.dependencies] black = "==23.3.0"