diff --git a/.coveragerc b/.coveragerc index c91eece7..696f2b03 100644 --- a/.coveragerc +++ b/.coveragerc @@ -12,6 +12,10 @@ exclude_lines = # Conditional code which is dependent on the OS, or `os.name` if name == 'nt': + # This will exclude all lines starting with something like + # if PY311_OR_ABOVE: or if PY310_BETA:. + if PY\d+_\w+: + # Don't complain if tests don't hit defensive assertion code: raise AssertionError raise NotImplementedError diff --git a/HISTORY.rst b/HISTORY.rst index 457019e1..f23bdd69 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,6 +2,36 @@ History ======= +0.34.0 (2024-12-30) +------------------- + +**Features and Improvements** + +- **V1 Opt-in** + - Support for recursive types OOTB for the following Python types: + - ``NamedTuple`` + - ``TypedDict`` + - ``Union`` + - ``Literal`` + - Nested `dataclasses` + - `Type aliases`_ (introduced in Python 3.12+) + - Full support for ``bytes`` and ``bytearray`` in the de/serialization process (fixes :issue:`140`). + - Performance improvements: Optimized Load functions for ``bool``, ``NamedTuple``, ``datetime``, ``date``, and ``time``. + - Added support for `Type aliases`_ (via ``type`` statement in Python 3.12+). + - Improved logic in ``load_to_str`` to better check if it's within an ``Optional[...]`` type. + - Enhanced handling of sub-types in de/serialization (**TODO**: add test cases). + - Show deprecation warning for Meta setting ``debug_enabled`` (replaced by ``v1_debug``). + +- Updated benchmarks for improved accuracy. + +**Bugfixes** + +- Fixed issue where code generation failed to correctly account for indexes, especially when nested collection types like ``dict`` were used within a ``NamedTuple``. +- ``make check`` now works out-of-the-box for validating ``README.rst`` and other RST files for PyPI deployment. +- :pr:`169`: Explicitly added ``utf-8`` encoding for ``setup.py`` to enable installation from source on Windows (shoutout to :user:`birkholz-cubert`!). + +.. _Type aliases: https://docs.python.org/3/library/typing.html#type-aliases + 0.33.0 (2024-12-17) ------------------- diff --git a/Makefile b/Makefile index cd190805..0cd3a762 100644 --- a/Makefile +++ b/Makefile @@ -83,7 +83,7 @@ servedocs: docs ## compile the docs watching for changes release: dist ## package and upload a release twine upload dist/* -check: dist ## verify release before upload to PyPI +check: dist-local ## verify release before upload to PyPI twine check dist/* dist: clean ## builds source and wheel package @@ -91,6 +91,25 @@ dist: clean ## builds source and wheel package python setup.py bdist_wheel ls -l dist +dist-local: clean replace_version ## builds source and wheel package (for local testing) + python setup.py sdist + python setup.py bdist_wheel + ls -l dist + $(MAKE) revert_readme + +replace_version: ## replace |version| in README.rst with the current version + cp README.rst README.rst.bak + python -c "import re; \ +from pathlib import Path; \ +version = re.search(r\"__version__\\s*=\\s*'(.+?)'\", Path('dataclass_wizard/__version__.py').read_text()).group(1); \ +readme_path = Path('README.rst'); \ +readme_content = readme_path.read_text(); \ +readme_path.write_text(readme_content.replace('|version|', version)); \ +print(f'Replaced version in {readme_path}: {version}')" + +revert_readme: ## revert README.rst to its original state + mv README.rst.bak README.rst + install: clean ## install the package to the active Python's site-packages python setup.py install diff --git a/README.rst b/README.rst index d3dd33a5..d7f9f2a8 100644 --- a/README.rst +++ b/README.rst @@ -143,7 +143,7 @@ This library supports **Python 3.9+**. Support for Python 3.6 – 3.8 was available in earlier releases but is no longer maintained, as those versions no longer receive security updates. -For convenience, the table below outlines the last compatible version +For convenience, the table below outlines the last compatible release of *Dataclass Wizard* for unsupported Python versions (3.6 – 3.8): .. list-table:: @@ -153,15 +153,15 @@ of *Dataclass Wizard* for unsupported Python versions (3.6 – 3.8): * - Python Version - Last Version of ``dataclass-wizard`` - Python EOL - * - 3.6 + * - 3.8 - 0.26.1_ - - 2021-12-23 + - 2024-10-07 * - 3.7 - 0.26.1_ - 2023-06-27 - * - 3.8 + * - 3.6 - 0.26.1_ - - 2024-10-07 + - 2021-12-23 .. _0.26.1: https://pypi.org/project/dataclass-wizard/0.26.1/ .. _PyPI: https://pypi.org/project/dataclass-wizard/ @@ -834,10 +834,10 @@ A brief example of the intended usage is shown below: # serialization. In fact, it'll be faster than parsing the custom patterns! assert class_obj == fromdict(MyClass, asdict(class_obj)) -"Recursive" Dataclasses with Cyclic References ----------------------------------------------- +Recursive Types and Dataclasses with Cyclic References +------------------------------------------------------ -Prior to version `v0.27.0`, dataclasses with cyclic references +Prior to version **0.27.0**, dataclasses with cyclic references or self-referential structures were not supported. This limitation is shown in the following toy example: @@ -851,28 +851,24 @@ limitation is shown in the following toy example: a = A(a=A(a=A(a=A()))) -This was a `longstanding issue`_. - -New in ``v0.27.0``: The Dataclass Wizard now extends its support -to cyclic and self-referential dataclass models. +This was a `longstanding issue`_, but starting with ``v0.27.0``, Dataclass Wizard now supports +recursive dataclasses, including cyclic references. -The example below demonstrates recursive dataclasses with cyclic -dependencies, following the pattern ``A -> B -> A -> B``. For more details, see -the `Cyclic or "Recursive" Dataclasses`_ section in the documentation. +The example below demonstrates recursive +dataclasses with cyclic dependencies, following the pattern ``A -> B -> A -> B``. +For more details, see the `Cyclic or "Recursive" Dataclasses`_ section in the documentation. .. code:: python3 from __future__ import annotations # This can be removed in Python 3.10+ from dataclasses import dataclass - from dataclass_wizard import JSONWizard - @dataclass class A(JSONWizard): class _(JSONWizard.Meta): - # enable support for self-referential / recursive dataclasses + # Enable support for self-referential / recursive dataclasses recursive_classes = True b: 'B | None' = None @@ -882,13 +878,132 @@ the `Cyclic or "Recursive" Dataclasses`_ section in the documentation. class B: a: A | None = None - - # confirm that `from_dict` with a recursive, self-referential + # Confirm that `from_dict` with a recursive, self-referential # input `dict` works as expected. a = A.from_dict({'b': {'a': {'b': {'a': None}}}}) assert a == A(b=B(a=A(b=B()))) +Starting with version **0.34.0**, recursive types are supported *out of the box* (OOTB) with ``v1`` opt-in, +removing the need for any ``Meta`` settings like ``recursive_classes = True``. + +This makes working with recursive dataclasses even easier and more streamlined. In addition, recursive types +are now supported for the following Python type constructs: + +- NamedTuple_ +- TypedDict_ +- Union_ +- Literal_ +- Nested dataclasses_ +- `Type aliases`_ (introduced in Python 3.12+) + +.. _NamedTuple: https://docs.python.org/3/library/typing.html#typing.NamedTuple +.. _TypedDict: https://docs.python.org/3/library/typing.html#typing.TypedDict +.. _Union: https://docs.python.org/3/library/typing.html#typing.Union +.. _Literal: https://docs.python.org/3/library/typing.html#typing.Literal +.. _Type aliases: https://docs.python.org/3/library/typing.html#type-aliases + +Example Usage +~~~~~~~~~~~~~ + +Recursive types allow handling complex nested data structures, such as deeply nested JSON objects or lists. +With ``v0.34.0`` of Dataclass Wizard, de/serializing these structures becomes seamless +and more intuitive. + +Recursive ``Union`` +################### + +.. code-block:: python3 + + from dataclasses import dataclass + from dataclass_wizard import JSONWizard + + # For Python 3.9, use this `Union` approach: + from typing_extensions import TypeAlias + JSON: TypeAlias = 'str | int | float | bool | dict[str, JSON] | list[JSON] | None' + + # For Python 3.10 and above, use this simpler approach: + # JSON = str | int | float | bool | dict[str, 'JSON'] | list['JSON'] | None + + # For Python 3.12+, you can use the `type` statement: + # type JSON = str | int | float | bool | dict[str, JSON] | list[JSON] | None + + @dataclass + class MyTestClass(JSONWizard): + + class _(JSONWizard.Meta): + v1 = True + + name: str + meta: str + msg: JSON + + x = MyTestClass.from_dict( + { + "name": "name", + "meta": "meta", + "msg": [{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}], + } + ) + assert x == MyTestClass( + name="name", + meta="meta", + msg=[{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}], + ) + +.. note:: + The ``type`` statement in Python 3.12+ simplifies type alias definitions by avoiding string annotations for recursive references. + +Recursive ``Union`` with Nested ``dataclasses`` +############################################### + +.. code-block:: python3 + + from dataclasses import dataclass, field + from dataclass_wizard import JSONWizard + + @dataclass + class A(JSONWizard): + + class _(JSONWizard.Meta): + v1 = True + + value: int + nested: 'B' + next: 'A | None' = None + + + @dataclass + class B: + items: list[A] = field(default_factory=list) + + + x = A.from_dict( + { + "value": 1, + "next": {"value": 2, "next": None, "nested": {}}, + "nested": {"items": [{"value": 3, "nested": {}}]}, + } + ) + assert x == A( + value=1, + next=A(value=2, next=None, nested=B(items=[])), + nested=B(items=[A(value=3, nested=B())]), + ) + +.. note:: + Nested ``dataclasses`` are particularly useful for representing hierarchical structures, such as trees or graphs, in a readable and maintainable way. + +Official References +~~~~~~~~~~~~~~~~~~~ + +For more information, see: + +- `Typing in Python `_ +- `PEP 695: Type Syntax `_ + +These examples illustrate the power of recursive types in simplifying complex data structures while leveraging the functionality of ``dataclass-wizard``. + Dataclasses in ``Union`` Types ------------------------------ diff --git a/benchmarks/catch_all.png b/benchmarks/catch_all.png index 7f438796..baf46d56 100644 Binary files a/benchmarks/catch_all.png and b/benchmarks/catch_all.png differ diff --git a/benchmarks/complex.py b/benchmarks/complex.py index 1295dbef..205330d9 100644 --- a/benchmarks/complex.py +++ b/benchmarks/complex.py @@ -88,6 +88,7 @@ class Name(NamedTuple): last: str salutation: Optional[str] = 'Mr.' + @dataclass class NameDataclass: first: str @@ -218,14 +219,14 @@ def test_load(request, data, data_2, data_dacite, n): """ [ RESULTS ON MAC OS X ] - benchmarks.complex.complex - [INFO] dataclass-wizard 0.373847 - benchmarks.complex.complex - [INFO] dataclass-factory 0.777164 - benchmarks.complex.complex - [INFO] dataclasses-json 28.177022 - benchmarks.complex.complex - [INFO] dacite 6.619898 - benchmarks.complex.complex - [INFO] mashumaro 0.351623 - benchmarks.complex.complex - [INFO] pydantic 0.563395 - benchmarks.complex.complex - [INFO] jsons 30.564242 - benchmarks.complex.complex - [INFO] jsons (strict) 35.122489 + benchmarks.complex.complex - [INFO] dataclass-wizard 0.325364 + benchmarks.complex.complex - [INFO] dataclass-factory 0.773195 + benchmarks.complex.complex - [INFO] dataclasses-json 28.435088 + benchmarks.complex.complex - [INFO] dacite 6.287875 + benchmarks.complex.complex - [INFO] mashumaro 0.344701 + benchmarks.complex.complex - [INFO] pydantic 0.547749 + benchmarks.complex.complex - [INFO] jsons 29.978993 + benchmarks.complex.complex - [INFO] jsons (strict) 34.052532 """ g = globals().copy() g.update(locals()) @@ -236,9 +237,6 @@ def test_load(request, data, data_2, data_dacite, n): log.info('dataclass-factory %f', timeit('factory.load(data_2, MyClass)', globals=g, number=n)) - log.info('dataclasses-json %f', - timeit('MyClassDJ.from_dict(data_2)', globals=g, number=n)) - log.info('dacite %f', timeit('dacite_from_dict(MyClassDacite, data_dacite, config=dacite_cfg)', globals=g, number=n)) @@ -264,6 +262,9 @@ def test_load(request, data, data_2, data_dacite, n): if not request.config.getoption("--all"): pytest.skip("Skipping benchmarks for the rest by default, unless --all is specified.") + log.info('dataclasses-json %f', + timeit('MyClassDJ.from_dict(data_2)', globals=g, number=n)) + log.info('jsons %f', timeit('MyClassJsons.load(data)', globals=g, number=n)) diff --git a/benchmarks/nested.py b/benchmarks/nested.py index 6041e9ea..0487906a 100644 --- a/benchmarks/nested.py +++ b/benchmarks/nested.py @@ -214,13 +214,13 @@ def test_load(request, data, n): """ [ RESULTS ON MAC OS X ] - benchmarks.nested.nested - [INFO] dataclass-wizard 0.135700 - benchmarks.nested.nested - [INFO] dataclass-factory 0.412265 - benchmarks.nested.nested - [INFO] dataclasses-json 11.448704 - benchmarks.nested.nested - [INFO] mashumaro 0.150680 - benchmarks.nested.nested - [INFO] pydantic 0.328947 - benchmarks.nested.nested - [INFO] jsons 25.052287 - benchmarks.nested.nested - [INFO] jsons (strict) 43.233567 + benchmarks.nested.nested - [INFO] dataclass-wizard 0.130734 + benchmarks.nested.nested - [INFO] dataclass-factory 0.404371 + benchmarks.nested.nested - [INFO] dataclasses-json 11.315233 + benchmarks.nested.nested - [INFO] mashumaro 0.158986 + benchmarks.nested.nested - [INFO] pydantic 0.330295 + benchmarks.nested.nested - [INFO] jsons 25.084872 + benchmarks.nested.nested - [INFO] jsons (strict) 28.306646 """ g = globals().copy() diff --git a/dataclass_wizard/__init__.py b/dataclass_wizard/__init__.py index cddd4a1c..6fb8967b 100644 --- a/dataclass_wizard/__init__.py +++ b/dataclass_wizard/__init__.py @@ -3,7 +3,7 @@ ~~~~~~~~~~~~~~~~ Lightning-fast JSON wizardry for Python dataclasses — effortless -serialization with no external tools required! +serialization right out of the box! Sample Usage: @@ -119,6 +119,7 @@ import logging from .bases_meta import LoadMeta, DumpMeta, EnvMeta +from .constants import PACKAGE_NAME from .dumpers import DumpMixin, setup_default_dumper, asdict from .loaders import LoadMixin, setup_default_loader from .loader_selection import fromlist, fromdict @@ -135,7 +136,7 @@ # Set up logging to ``/dev/null`` like a library is supposed to. # http://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library -logging.getLogger('dataclass_wizard').addHandler(logging.NullHandler()) +logging.getLogger(PACKAGE_NAME).addHandler(logging.NullHandler()) # Setup the default type hooks to use when converting `str` (json) or a Python # `dict` object to a `dataclass` instance. diff --git a/dataclass_wizard/__version__.py b/dataclass_wizard/__version__.py index b5a5571b..0bfe44ac 100644 --- a/dataclass_wizard/__version__.py +++ b/dataclass_wizard/__version__.py @@ -5,7 +5,7 @@ __title__ = 'dataclass-wizard' __description__ = ('Lightning-fast JSON wizardry for Python dataclasses — ' - 'effortless serialization with no external tools required!') + 'effortless serialization right out of the box!') __url__ = 'https://github.com/rnag/dataclass-wizard' __version__ = '0.33.0' __author__ = 'Ritvik Nag' diff --git a/dataclass_wizard/abstractions.py b/dataclass_wizard/abstractions.py index 4ac7940b..75e1f6c7 100644 --- a/dataclass_wizard/abstractions.py +++ b/dataclass_wizard/abstractions.py @@ -9,7 +9,7 @@ from .bases import META from .models import Extras -from .v1.models import TypeInfo +from .v1.models import Extras as V1Extras, TypeInfo from .type_def import T, TT @@ -274,7 +274,7 @@ def transform_json_field(string: str) -> str: @staticmethod @abstractmethod - def default_load_to(tp: TypeInfo, extras: Extras) -> str: + def default_load_to(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code for the default load function if no other types match. Generally, this will be a stub load method. @@ -282,35 +282,28 @@ def default_load_to(tp: TypeInfo, extras: Extras) -> str: @staticmethod @abstractmethod - def load_after_type_check(tp: TypeInfo, extras: Extras) -> str: - """ - Generate code to load an object after confirming its type. - """ - - @staticmethod - @abstractmethod - def load_to_str(tp: TypeInfo, extras: Extras) -> str: + def load_to_str(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code to load a value into a string field. """ @staticmethod @abstractmethod - def load_to_int(tp: TypeInfo, extras: Extras) -> str: + def load_to_int(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code to load a value into an integer field. """ @staticmethod @abstractmethod - def load_to_float(tp: TypeInfo, extras: Extras) -> str: + def load_to_float(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a float field. """ @staticmethod @abstractmethod - def load_to_bool(_: str, extras: Extras) -> str: + def load_to_bool(_: str, extras: V1Extras) -> str: """ Generate code to load a value into a boolean field. Adds a helper function `as_bool` to the local context. @@ -318,28 +311,28 @@ def load_to_bool(_: str, extras: Extras) -> str: @staticmethod @abstractmethod - def load_to_bytes(tp: TypeInfo, extras: Extras) -> str: + def load_to_bytes(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a bytes field. """ @staticmethod @abstractmethod - def load_to_bytearray(tp: TypeInfo, extras: Extras) -> str: + def load_to_bytearray(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a bytearray field. """ @staticmethod @abstractmethod - def load_to_none(tp: TypeInfo, extras: Extras) -> str: + def load_to_none(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code to load a value into a None. """ @staticmethod @abstractmethod - def load_to_literal(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_literal(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to confirm a value is equivalent to one of the provided literals. @@ -347,111 +340,118 @@ def load_to_literal(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': @classmethod @abstractmethod - def load_to_union(cls, tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_union(cls, tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a `Union[X, Y, ...]` (one of [X, Y, ...] possible types) """ @staticmethod @abstractmethod - def load_to_enum(tp: TypeInfo, extras: Extras) -> str: + def load_to_enum(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into an Enum field. """ @staticmethod @abstractmethod - def load_to_uuid(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_uuid(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a UUID field. """ @staticmethod @abstractmethod - def load_to_iterable(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_iterable(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into an iterable field (list, set, etc.). """ @staticmethod @abstractmethod - def load_to_tuple(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_tuple(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a tuple field. """ @staticmethod @abstractmethod - def load_to_named_tuple(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_named_tuple(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a named tuple field. """ @classmethod @abstractmethod - def load_to_named_tuple_untyped(cls, tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_named_tuple_untyped(cls, tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into an untyped named tuple. """ @staticmethod @abstractmethod - def load_to_dict(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_dict(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a dictionary field. """ @staticmethod @abstractmethod - def load_to_defaultdict(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_defaultdict(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a defaultdict field. """ @staticmethod @abstractmethod - def load_to_typed_dict(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_typed_dict(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a typed dictionary field. """ @staticmethod @abstractmethod - def load_to_decimal(tp: TypeInfo, extras: Extras) -> str: + def load_to_decimal(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': + """ + Generate code to load a value into a Decimal field. + """ + + @staticmethod + @abstractmethod + def load_to_path(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a Decimal field. """ @staticmethod @abstractmethod - def load_to_datetime(tp: TypeInfo, extras: Extras) -> str: + def load_to_datetime(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code to load a value into a datetime field. """ @staticmethod @abstractmethod - def load_to_time(tp: TypeInfo, extras: Extras) -> str: + def load_to_time(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code to load a value into a time field. """ @staticmethod @abstractmethod - def load_to_date(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_date(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a date field. """ @staticmethod @abstractmethod - def load_to_timedelta(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_timedelta(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a timedelta field. """ @staticmethod - def load_to_dataclass(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': + def load_to_dataclass(tp: TypeInfo, extras: V1Extras) -> 'str | TypeInfo': """ Generate code to load a value into a `dataclass` type field. """ @@ -460,7 +460,7 @@ def load_to_dataclass(tp: TypeInfo, extras: Extras) -> 'str | TypeInfo': @abstractmethod def get_string_for_annotation(cls, tp: TypeInfo, - extras: Extras) -> 'str | TypeInfo': + extras: V1Extras) -> 'str | TypeInfo': """ Generate code to get the parser (dispatcher) for a given annotation type. diff --git a/dataclass_wizard/abstractions.pyi b/dataclass_wizard/abstractions.pyi index 4f52743c..091040d9 100644 --- a/dataclass_wizard/abstractions.pyi +++ b/dataclass_wizard/abstractions.pyi @@ -11,7 +11,8 @@ from typing import ( Text, Sequence, Iterable, Generic ) -from .models import Extras, TypeInfo +from .models import Extras +from .v1.models import Extras as V1Extras, TypeInfo from .type_def import ( DefFactory, FrozenKeys, ListOfJSONObject, JSONObject, Encoder, M, N, T, TT, NT, E, U, DD, LSQ @@ -443,7 +444,7 @@ class AbstractLoaderGenerator(ABC): @staticmethod @abstractmethod - def default_load_to(tp: TypeInfo, extras: Extras) -> str: + def default_load_to(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code for the default load function if no other types match. Generally, this will be a stub load method. @@ -451,39 +452,28 @@ class AbstractLoaderGenerator(ABC): @staticmethod @abstractmethod - def load_after_type_check(tp: TypeInfo, extras: Extras) -> str: - """ - Generate code to load an object after confirming its type. - - :param tp: The type information (including annotation) of the field as a string. - :param extras: Additional context or dependencies for code generation. - :raises ParseError: If the object type is not as expected. - """ - - @staticmethod - @abstractmethod - def load_to_str(tp: TypeInfo, extras: Extras) -> str: + def load_to_str(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code to load a value into a string field. """ @staticmethod @abstractmethod - def load_to_int(tp: TypeInfo, extras: Extras) -> str: + def load_to_int(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code to load a value into an integer field. """ @staticmethod @abstractmethod - def load_to_float(tp: TypeInfo, extras: Extras) -> str: + def load_to_float(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a float field. """ @staticmethod @abstractmethod - def load_to_bool(_: str, extras: Extras) -> str: + def load_to_bool(_: str, extras: V1Extras) -> str: """ Generate code to load a value into a boolean field. Adds a helper function `as_bool` to the local context. @@ -491,28 +481,28 @@ class AbstractLoaderGenerator(ABC): @staticmethod @abstractmethod - def load_to_bytes(tp: TypeInfo, extras: Extras) -> str: + def load_to_bytes(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a bytes field. """ @staticmethod @abstractmethod - def load_to_bytearray(tp: TypeInfo, extras: Extras) -> str: + def load_to_bytearray(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a bytearray field. """ @staticmethod @abstractmethod - def load_to_none(tp: TypeInfo, extras: Extras) -> str: + def load_to_none(tp: TypeInfo, extras: V1Extras) -> str: """ Generate code to load a value into a None. """ @staticmethod @abstractmethod - def load_to_literal(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_literal(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to confirm a value is equivalent to one of the provided literals. @@ -520,111 +510,118 @@ class AbstractLoaderGenerator(ABC): @classmethod @abstractmethod - def load_to_union(cls, tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_union(cls, tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a `Union[X, Y, ...]` (one of [X, Y, ...] possible types) """ @staticmethod @abstractmethod - def load_to_enum(tp: TypeInfo, extras: Extras) -> str: + def load_to_enum(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into an Enum field. """ @staticmethod @abstractmethod - def load_to_uuid(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_uuid(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a UUID field. """ @staticmethod @abstractmethod - def load_to_iterable(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_iterable(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into an iterable field (list, set, etc.). """ @staticmethod @abstractmethod - def load_to_tuple(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_tuple(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a tuple field. """ @classmethod @abstractmethod - def load_to_named_tuple(cls, tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_named_tuple(cls, tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a named tuple field. """ @classmethod @abstractmethod - def load_to_named_tuple_untyped(cls, tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_named_tuple_untyped(cls, tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into an untyped named tuple. """ @staticmethod @abstractmethod - def load_to_dict(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_dict(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a dictionary field. """ @staticmethod @abstractmethod - def load_to_defaultdict(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_defaultdict(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a defaultdict field. """ @staticmethod @abstractmethod - def load_to_typed_dict(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_typed_dict(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a typed dictionary field. """ @staticmethod @abstractmethod - def load_to_decimal(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_decimal(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a Decimal field. """ @staticmethod @abstractmethod - def load_to_datetime(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_path(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ - Generate code to load a value into a datetime field. + Generate code to load a value into a Path field. """ @staticmethod @abstractmethod - def load_to_time(tp: TypeInfo, extras: Extras) -> str: + def load_to_date(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ - Generate code to load a value into a time field. + Generate code to load a value into a date field. """ @staticmethod @abstractmethod - def load_to_date(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_datetime(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ - Generate code to load a value into a date field. + Generate code to load a value into a datetime field. + """ + + @staticmethod + @abstractmethod + def load_to_time(tp: TypeInfo, extras: V1Extras) -> str: + """ + Generate code to load a value into a time field. """ @staticmethod @abstractmethod - def load_to_timedelta(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_timedelta(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a timedelta field. """ @staticmethod - def load_to_dataclass(tp: TypeInfo, extras: Extras) -> str | TypeInfo: + def load_to_dataclass(tp: TypeInfo, extras: V1Extras) -> str | TypeInfo: """ Generate code to load a value into a `dataclass` type field. """ @@ -633,7 +630,7 @@ class AbstractLoaderGenerator(ABC): @abstractmethod def get_string_for_annotation(cls, tp: TypeInfo, - extras: Extras) -> str | TypeInfo: + extras: V1Extras) -> str | TypeInfo: """ Generate code to get the parser (dispatcher) for a given annotation type. diff --git a/dataclass_wizard/bases.py b/dataclass_wizard/bases.py index e7c38381..d556a1cc 100644 --- a/dataclass_wizard/bases.py +++ b/dataclass_wizard/bases.py @@ -154,7 +154,7 @@ class AbstractMeta(metaclass=ABCOrAndMeta): # one that does not have a known mapping to a dataclass field. # # The default is to only log a "warning" for such cases, which is visible - # when `debug_enabled` is true and logging is properly configured. + # when `v1_debug` is true and logging is properly configured. raise_on_unknown_json_key: ClassVar[bool] = False # A customized mapping of JSON keys to dataclass fields, that is used @@ -275,7 +275,7 @@ class AbstractMeta(metaclass=ABCOrAndMeta): # # Valid options are: # - `"ignore"` (default): Silently ignore unknown keys. - # - `"warn"`: Log a warning for each unknown key. Requires `debug_enabled` + # - `"warn"`: Log a warning for each unknown key. Requires `v1_debug` # to be `True` and properly configured logging. # - `"raise"`: Raise an `UnknownKeyError` for the first unknown key encountered. v1_on_unknown_key: ClassVar[KeyAction] = None diff --git a/dataclass_wizard/bases_meta.py b/dataclass_wizard/bases_meta.py index 3a63e801..5d79408e 100644 --- a/dataclass_wizard/bases_meta.py +++ b/dataclass_wizard/bases_meta.py @@ -20,7 +20,7 @@ from .enums import DateTimeTo, LetterCase, LetterCasePriority from .v1.enums import KeyAction, KeyCase from .environ.loaders import EnvLoader -from .errors import ParseError +from .errors import ParseError, show_deprecation_warning from .loader_selection import get_loader from .log import LOG from .type_def import E @@ -132,6 +132,11 @@ def bind_to(cls, dataclass: type, create=True, is_default=True, _enable_debug_mode_if_needed(cls_loader, cls.v1_debug) elif cls.debug_enabled: + show_deprecation_warning( + 'debug_enabled', + fmt="Deprecated Meta setting {name} ({reason}).", + reason='Use `v1_debug` instead', + ) _enable_debug_mode_if_needed(cls_loader, cls.debug_enabled) if cls.json_key_to_field is not None: diff --git a/dataclass_wizard/bases_meta.pyi b/dataclass_wizard/bases_meta.pyi index 968965ab..8b170ada 100644 --- a/dataclass_wizard/bases_meta.pyi +++ b/dataclass_wizard/bases_meta.pyi @@ -64,6 +64,7 @@ def LoadMeta(*, debug_enabled: 'bool | int | str' = MISSING, tag_key: str = TAG, auto_assign_tags: bool = MISSING, v1: bool = MISSING, + v1_debug: bool | int | str = False, v1_key_case: KeyCase | str | None = MISSING, v1_field_to_alias: dict[str, str] = MISSING, v1_on_unknown_key: KeyAction | str | None = KeyAction.IGNORE, diff --git a/dataclass_wizard/class_helper.py b/dataclass_wizard/class_helper.py index b751045f..acc1667f 100644 --- a/dataclass_wizard/class_helper.py +++ b/dataclass_wizard/class_helper.py @@ -2,7 +2,7 @@ from dataclasses import MISSING, fields from .bases import AbstractMeta -from .constants import CATCH_ALL +from .constants import CATCH_ALL, PACKAGE_NAME from .errors import InvalidConditionError from .models import JSONField, JSON, Extras, PatternedDT, CatchAll, Condition from .type_def import ExplicitNull @@ -421,7 +421,7 @@ def _setup_v1_load_config_for_cls( return load_dataclass_field_to_alias -def call_meta_initializer_if_needed(cls, package_name='dataclass_wizard'): +def call_meta_initializer_if_needed(cls, package_name=PACKAGE_NAME): """ Calls the Meta initializer when the inner :class:`Meta` is sub-classed. """ diff --git a/dataclass_wizard/class_helper.pyi b/dataclass_wizard/class_helper.pyi index 6c117418..0f5005a7 100644 --- a/dataclass_wizard/class_helper.pyi +++ b/dataclass_wizard/class_helper.pyi @@ -4,6 +4,7 @@ from typing import Any, Callable, Literal, overload from .abstractions import W, AbstractLoader, AbstractDumper, AbstractParser, E, AbstractLoaderGenerator from .bases import META, AbstractMeta +from .constants import PACKAGE_NAME from .models import Condition from .type_def import ExplicitNullType, T from .utils.dict_helper import DictWithLowerStore @@ -215,7 +216,7 @@ def _setup_v1_load_config_for_cls(cls: type): def call_meta_initializer_if_needed(cls: type[W | E], - package_name='dataclass_wizard') -> None: + package_name=PACKAGE_NAME) -> None: """ Calls the Meta initializer when the inner :class:`Meta` is sub-classed. """ diff --git a/dataclass_wizard/constants.py b/dataclass_wizard/constants.py index 682f181a..b681b163 100644 --- a/dataclass_wizard/constants.py +++ b/dataclass_wizard/constants.py @@ -2,6 +2,9 @@ import sys +# Package name +PACKAGE_NAME = 'dataclass_wizard' + # Library Log Level LOG_LEVEL = os.getenv('WIZARD_LOG_LEVEL', 'ERROR').upper() @@ -14,6 +17,9 @@ # Check if currently running Python 3.11 or higher PY311_OR_ABOVE = _PY_VERSION >= (3, 11) +# Check if currently running Python 3.12 or higher +PY312_OR_ABOVE = _PY_VERSION >= (3, 12) + # Check if currently running Python 3.13 or higher PY313_OR_ABOVE = _PY_VERSION >= (3, 13) diff --git a/dataclass_wizard/dumpers.py b/dataclass_wizard/dumpers.py index 0919ec21..c42cf183 100644 --- a/dataclass_wizard/dumpers.py +++ b/dataclass_wizard/dumpers.py @@ -8,6 +8,7 @@ See the end of this file for the original Apache license from this library. """ +from base64 import b64encode from collections import defaultdict, deque # noinspection PyProtectedMember,PyUnresolvedReferences from dataclasses import _is_dataclass_instance @@ -36,7 +37,7 @@ from .log import LOG from .models import get_skip_if_condition, finalize_skip_if from .type_def import ( - ExplicitNull, NoneType, JSONObject, + Buffer, ExplicitNull, NoneType, JSONObject, DD, LSQ, E, U, LT, NT, T ) from .utils.dict_helper import NestedDict @@ -77,6 +78,10 @@ def dump_with_null(o: None, *_): def dump_with_str(o: str, *_): return o + @staticmethod + def dump_with_bytes(o: Buffer, *_) -> str: + return b64encode(o).decode() + @staticmethod def dump_with_int(o: int, *_): return o @@ -159,8 +164,8 @@ def setup_default_dumper(cls=DumpMixin): cls.register_dump_hook(int, cls.dump_with_int) cls.register_dump_hook(float, cls.dump_with_float) cls.register_dump_hook(bool, cls.dump_with_bool) - cls.register_dump_hook(bytes, cls.default_dump_with) - cls.register_dump_hook(bytearray, cls.default_dump_with) + cls.register_dump_hook(bytes, cls.dump_with_bytes) + cls.register_dump_hook(bytearray, cls.dump_with_bytes) cls.register_dump_hook(NoneType, cls.dump_with_null) # Complex types cls.register_dump_hook(Enum, cls.dump_with_enum) diff --git a/dataclass_wizard/errors.py b/dataclass_wizard/errors.py index ecda39dc..809d811b 100644 --- a/dataclass_wizard/errors.py +++ b/dataclass_wizard/errors.py @@ -3,6 +3,7 @@ from typing import (Any, Type, Dict, Tuple, ClassVar, Optional, Union, Iterable, Callable, Collection, Sequence) +from .constants import PACKAGE_NAME from .utils.string_conv import normalize @@ -24,7 +25,7 @@ def type_name(obj: type) -> str: def show_deprecation_warning( - fn: Callable, + fn: 'Callable | str', reason: str, fmt: str = "Deprecated function {name} ({reason})." ) -> None: @@ -38,7 +39,7 @@ def show_deprecation_warning( import warnings warnings.simplefilter('always', DeprecationWarning) warnings.warn( - fmt.format(name=fn.__name__, reason=reason), + fmt.format(name=getattr(fn, '__name__', fn), reason=reason), category=DeprecationWarning, stacklevel=2, ) @@ -220,7 +221,7 @@ class MissingFields(JSONWizardError): ' Input JSON: {json_string}' '{e}') - def __init__(self, base_err: Exception, + def __init__(self, base_err: 'Exception | None', obj: JSONObject, cls: Type, cls_fields: Tuple[Field, ...], @@ -251,6 +252,7 @@ def __init__(self, base_err: Exception, self.kwargs = kwargs self.class_name: str = self.name(cls) self.parent_cls = cls + self.all_fields = cls_fields @property def message(self) -> str: @@ -262,10 +264,16 @@ def message(self) -> str: meta = get_meta(self.parent_cls) v1 = meta.v1 + if isinstance(self.obj, list): + keys = [f.name for f in self.all_fields] + obj = dict(zip(keys, self.obj)) + else: + obj = self.obj + # check if any field names match, and where the key transform could be the cause # see https://github.com/rnag/dataclass-wizard/issues/54 for more info - normalized_json_keys = [normalize(key) for key in self.obj] + normalized_json_keys = [normalize(key) for key in obj] if next((f for f in self.missing_fields if normalize(f) in normalized_json_keys), None): from .enums import LetterCase from .v1.enums import KeyCase @@ -424,7 +432,7 @@ class RecursiveClassError(JSONWizardError): _TEMPLATE = ('Failure parsing class `{cls}`. ' 'Consider updating the Meta config to enable ' 'the `recursive_classes` flag.\n\n' - 'Example with `dataclass_wizard.LoadMeta`:\n' + f'Example with `{PACKAGE_NAME}.LoadMeta`:\n' ' >>> LoadMeta(recursive_classes=True).bind_to({cls})\n\n' 'For more info, please see:\n' ' https://github.com/rnag/dataclass-wizard/issues/62') diff --git a/dataclass_wizard/errors.pyi b/dataclass_wizard/errors.pyi index db3e5d28..88f587e6 100644 --- a/dataclass_wizard/errors.pyi +++ b/dataclass_wizard/errors.pyi @@ -13,7 +13,7 @@ def type_name(obj: type) -> str: def show_deprecation_warning( - fn: Callable, + fn: Callable | str, reason: str, fmt: str = "Deprecated function {name} ({reason})." ) -> None: @@ -132,14 +132,15 @@ class MissingFields(JSONWizardError): obj: JSONObject fields: list[str] + all_fields: tuple[Field, ...] missing_fields: Collection[str] - base_error: Exception + base_error: Exception | None missing_keys: Collection[str] | None kwargs: dict[str, Any] class_name: str parent_cls: type - def __init__(self, base_err: Exception, + def __init__(self, base_err: Exception | None, obj: JSONObject, cls: type, cls_fields: tuple[Field, ...], diff --git a/dataclass_wizard/loader_selection.py b/dataclass_wizard/loader_selection.py index 296aaff2..eb0e20b3 100644 --- a/dataclass_wizard/loader_selection.py +++ b/dataclass_wizard/loader_selection.py @@ -48,13 +48,14 @@ def fromlist(cls: type[T], list_of_dict: list[JSONObject]) -> list[T]: def _get_load_fn_for_dataclass(cls: type[T], v1=None) -> Callable[[JSONObject], T]: + meta = get_meta(cls) if v1 is None: - v1 = getattr(get_meta(cls), 'v1', False) + v1 = getattr(meta, 'v1', False) if v1: from .v1.loaders import load_func_for_dataclass as V1_load_func_for_dataclass # noinspection PyTypeChecker - load = V1_load_func_for_dataclass(cls, {}) + load = V1_load_func_for_dataclass(cls) else: from .loaders import load_func_for_dataclass load = load_func_for_dataclass(cls) diff --git a/dataclass_wizard/log.py b/dataclass_wizard/log.py index e54f43f2..7a6b6ab7 100644 --- a/dataclass_wizard/log.py +++ b/dataclass_wizard/log.py @@ -1,7 +1,7 @@ from logging import getLogger -from .constants import LOG_LEVEL +from .constants import LOG_LEVEL, PACKAGE_NAME -LOG = getLogger('dataclass_wizard') +LOG = getLogger(PACKAGE_NAME) LOG.setLevel(LOG_LEVEL) diff --git a/dataclass_wizard/serial_json.py b/dataclass_wizard/serial_json.py index 53e6e9ab..e1834ead 100644 --- a/dataclass_wizard/serial_json.py +++ b/dataclass_wizard/serial_json.py @@ -83,8 +83,8 @@ def __init_subclass__(cls, str=True, debug=False, logging.basicConfig(level=default_lvl) # minimum logging level for logs by this library min_level = default_lvl if isinstance(debug, bool) else debug - # set `debug_enabled` flag for the class's Meta - load_meta_kwargs['debug_enabled'] = min_level + # set `v1_debug` flag for the class's Meta + load_meta_kwargs['v1_debug'] = min_level # Calls the Meta initializer when inner :class:`Meta` is sub-classed. call_meta_initializer_if_needed(cls) diff --git a/dataclass_wizard/type_def.py b/dataclass_wizard/type_def.py index dbbb45aa..815981ff 100644 --- a/dataclass_wizard/type_def.py +++ b/dataclass_wizard/type_def.py @@ -1,4 +1,5 @@ __all__ = [ + 'Buffer', 'PyForwardRef', 'PyProtocol', 'PyDeque', @@ -55,8 +56,7 @@ ) from uuid import UUID -from .constants import PY310_OR_ABOVE, PY311_OR_ABOVE, PY313_OR_ABOVE - +from .constants import PY310_OR_ABOVE, PY311_OR_ABOVE, PY313_OR_ABOVE, PY312_OR_ABOVE # The class of the `None` singleton, cached for re-usability if PY310_OR_ABOVE: @@ -153,22 +153,29 @@ # wrappers from `typing_extensions`. if PY313_OR_ABOVE: # pragma: no cover + from collections.abc import Buffer + from typing import (Required as PyRequired, NotRequired as PyNotRequired, ReadOnly as PyReadOnly, LiteralString as PyLiteralString, dataclass_transform) - elif PY311_OR_ABOVE: # pragma: no cover + if PY312_OR_ABOVE: + from collections.abc import Buffer + else: + from typing_extensions import Buffer + from typing import (Required as PyRequired, NotRequired as PyNotRequired, LiteralString as PyLiteralString, dataclass_transform) from typing_extensions import ReadOnly as PyReadOnly else: - from typing_extensions import (Required as PyRequired, + from typing_extensions import (Buffer, + Required as PyRequired, NotRequired as PyNotRequired, - ReadOnly as PyReadOnly, + ReadOnly as PyReadOnly, LiteralString as PyLiteralString, dataclass_transform) diff --git a/dataclass_wizard/utils/function_builder.py b/dataclass_wizard/utils/function_builder.py index 4de55a98..5477db0d 100644 --- a/dataclass_wizard/utils/function_builder.py +++ b/dataclass_wizard/utils/function_builder.py @@ -61,14 +61,18 @@ def function(self, name: str, args: list, return_type=MISSING, def _with_new_block(self, name: str, - condition: 'str | None' = None) -> 'FunctionBuilder': + condition: 'str | None' = None, + comment: str = '') -> 'FunctionBuilder': """Creates a new block. Used with a context manager (with).""" indent = ' ' * self.indent_level + if comment: + comment = f' # {comment}' + if condition is not None: - self.current_function["body"].append(f"{indent}{name} {condition}:") + self.current_function["body"].append(f"{indent}{name} {condition}:{comment}") else: - self.current_function["body"].append(f"{indent}{name}:") + self.current_function["body"].append(f"{indent}{name}:{comment}") return self @@ -88,7 +92,7 @@ def for_(self, condition: str) -> 'FunctionBuilder': """ return self._with_new_block('for', condition) - def if_(self, condition: str) -> 'FunctionBuilder': + def if_(self, condition: str, comment: str = '') -> 'FunctionBuilder': """Equivalent to the `if` statement in Python. Sample Usage: @@ -102,7 +106,7 @@ def if_(self, condition: str) -> 'FunctionBuilder': >>> ... """ - return self._with_new_block('if', condition) + return self._with_new_block('if', condition, comment) def elif_(self, condition: str) -> 'FunctionBuilder': """Equivalent to the `elif` statement in Python. diff --git a/dataclass_wizard/utils/type_conv.py b/dataclass_wizard/utils/type_conv.py index 561cbdc9..71e5edd0 100644 --- a/dataclass_wizard/utils/type_conv.py +++ b/dataclass_wizard/utils/type_conv.py @@ -4,27 +4,33 @@ 'as_list', 'as_dict', 'as_enum', + 'as_datetime_v1', + 'as_date_v1', + 'as_time_v1', 'as_datetime', 'as_date', 'as_time', 'as_timedelta', - 'date_to_timestamp'] + 'date_to_timestamp', + 'TRUTHY_VALUES', + ] import json -from datetime import datetime, time, date, timedelta, timezone +from collections.abc import Callable +from datetime import datetime, time, date, timedelta, timezone, tzinfo from numbers import Number -from typing import Union, Type, AnyStr, Optional, Iterable +from typing import Union, Type, AnyStr, Optional, Iterable, Any from ..errors import ParseError from ..lazy_imports import pytimeparse from ..type_def import E, N, NUMBERS - # What values are considered "truthy" when converting to a boolean type. # noinspection SpellCheckingInspection -_TRUTHY_VALUES = frozenset({'true', 't', 'yes', 'y', 'on', '1'}) +TRUTHY_VALUES = frozenset({'true', 't', 'yes', 'y', 'on', '1'}) +# TODO Remove: Unused in V1 def as_bool(o: Union[str, bool, N]): """ Return `o` if already a boolean, otherwise return the boolean value @@ -34,7 +40,7 @@ def as_bool(o: Union[str, bool, N]): return o if t is str: - return o.lower() in _TRUTHY_VALUES + return o.lower() in TRUTHY_VALUES return o == 1 @@ -97,6 +103,7 @@ def as_int(o: Union[str, int, float, bool, None], base_type=int, return default +# TODO Remove: Unused in V1 def as_str(o: Union[str, None], base_type=str): """ Return `o` if already a str, otherwise return the string value for `o`. @@ -199,6 +206,106 @@ def as_enum(o: Union[AnyStr, N], return None +def as_datetime_v1(o: Union[int, float, datetime], + __from_timestamp: Callable[[float, tzinfo], datetime], + __utc=timezone.utc): + """ + V1: Attempt to convert an object `o` to a :class:`datetime` object using the + below logic. + + * ``Number`` (int or float): Convert a numeric timestamp via the + built-in ``fromtimestamp`` method, and return a UTC datetime. + * ``base_type``: Return object `o` if it's already of this type. + + Note: It is assumed that `o` is not a ``str`` (in ISO format), as + de-serialization in ``v1`` already checks for this. + + Otherwise, if we're unable to convert the value of `o` to a + :class:`datetime` as expected, raise an error. + + """ + try: + # We can assume that `o` is a number, as generally this will be the + # case. + return __from_timestamp(o, __utc) + + except Exception: + # Note: the `__self__` attribute refers to the class bound + # to the class method `fromtimestamp`. + # + # See: https://stackoverflow.com/a/41258933/10237506 + # + # noinspection PyUnresolvedReferences + if o.__class__ is __from_timestamp.__self__: + return o + + # Check `type` explicitly, because `bool` is a sub-class of `int` + if o.__class__ not in NUMBERS: + raise TypeError(f'Unsupported type, value={o!r}, type={type(o)}') + + raise + + +def as_date_v1(o: Union[int, float, date], + __from_timestamp: Callable[[float], date]): + """ + V1: Attempt to convert an object `o` to a :class:`date` object using the + below logic. + + * ``Number`` (int or float): Convert a numeric timestamp via the + built-in ``fromtimestamp`` method, and return a date. + * ``base_type``: Return object `o` if it's already of this type. + + Note: It is assumed that `o` is not a ``str`` (in ISO format), as + de-serialization in ``v1`` already checks for this. + + Otherwise, if we're unable to convert the value of `o` to a + :class:`date` as expected, raise an error. + + """ + try: + # We can assume that `o` is a number, as generally this will be the + # case. + return __from_timestamp(o) + + except Exception: + # Note: the `__self__` attribute refers to the class bound + # to the class method `fromtimestamp`. + # + # See: https://stackoverflow.com/a/41258933/10237506 + # + # noinspection PyUnresolvedReferences + if o.__class__ is __from_timestamp.__self__: + return o + + # Check `type` explicitly, because `bool` is a sub-class of `int` + if o.__class__ not in NUMBERS: + raise TypeError(f'Unsupported type, value={o!r}, type={type(o)}') + + raise + + +def as_time_v1(o: Union[time, Any], base_type: type[time]): + """ + V1: Attempt to convert an object `o` to a :class:`time` object using the + below logic. + + * ``base_type``: Return object `o` if it's already of this type. + + Note: It is assumed that `o` is not a ``str`` (in ISO format), as + de-serialization in ``v1`` already checks for this. + + Otherwise, if we're unable to convert the value of `o` to a + :class:`time` as expected, raise an error. + + """ + if o.__class__ is base_type: + return o + + raise TypeError(f'Unsupported type, value={o!r}, type={type(o)}') + + +# TODO Remove: Unused in V1 def as_datetime(o: Union[str, Number, datetime], base_type=datetime, default=None, raise_=True): """ @@ -247,6 +354,7 @@ def as_datetime(o: Union[str, Number, datetime], return default +# TODO Remove: Unused in V1 def as_date(o: Union[str, Number, date], base_type=date, default=None, raise_=True): """ @@ -295,6 +403,7 @@ def as_date(o: Union[str, Number, date], return default +# TODO Remove: Unused in V1 def as_time(o: Union[str, time], base_type=time, default=None, raise_=True): """ Attempt to convert an object `o` to a :class:`time` object using the diff --git a/dataclass_wizard/utils/typing_compat.py b/dataclass_wizard/utils/typing_compat.py index 7a42e11a..26c3a976 100644 --- a/dataclass_wizard/utils/typing_compat.py +++ b/dataclass_wizard/utils/typing_compat.py @@ -4,6 +4,7 @@ __all__ = [ 'is_literal', + 'is_union', 'get_origin', 'get_origin_v2', 'is_typed_dict_type_qualifier', diff --git a/dataclass_wizard/v1/decorators.py b/dataclass_wizard/v1/decorators.py new file mode 100644 index 00000000..0db01037 --- /dev/null +++ b/dataclass_wizard/v1/decorators.py @@ -0,0 +1,133 @@ +from dataclasses import MISSING +from functools import wraps +from typing import Callable, Union + +from .models import Extras, TypeInfo +from ..utils.function_builder import FunctionBuilder + + +def setup_recursive_safe_function( + func: Callable = None, + *, + fn_name: Union[str, None] = None, + is_generic: bool = False, +) -> Callable: + """ + A decorator to ensure recursion safety and facilitate dynamic function generation + with `FunctionBuilder`, supporting both generic and non-generic types. + + The decorated function can define the logic for dynamically generated functions. + If `fn_name` is provided, the decorator assumes that the function generation + context (e.g., `with fn_gen.function(...)`) has already been handled externally + and will not apply it again. + + :param func: The function to decorate. If None, the decorator is applied with arguments. + :type func: Callable, optional + :param fn_name: A format string for dynamically generating function names, or None. + :type fn_name: str, optional + :param is_generic: Whether the function deals with generic types. + :type is_generic: bool, optional + :return: The decorated function with recursion safety and dynamic function generation. + :rtype: Callable + """ + + if func is None: + return lambda f: setup_recursive_safe_function( + f, fn_name=fn_name, is_generic=is_generic + ) + + def _wrapper_logic(tp: TypeInfo, extras: Extras, _cls=None) -> str: + """ + Shared logic for both class and regular methods. Ensures recursion safety + and integrates `FunctionBuilder` to dynamically create functions. + + :param tp: The type or generic type being processed. + :param extras: A context dictionary containing auxiliary information like + recursion guards and function builders. + :type extras: dict + :param _cls: The class context for class methods. Defaults to None. + :return: The generated function call expression as a string. + :rtype: str + """ + cls = tp.args if is_generic else tp.origin + recursion_guard = extras['recursion_guard'] + + if (_fn_name := recursion_guard.get(cls)) is None: + cls_name = extras['cls_name'] + tp_name = func.__name__.split('_', 2)[-1] + + # Generate the function name + if fn_name: + _fn_name = fn_name.format(cls_name=tp.name) + else: + _fn_name = ( + f'_load_{cls_name}_{tp_name}_{tp.field_i}' if is_generic + else f'_load_{cls_name}_{tp_name}_{tp.name}' + ) + + recursion_guard[cls] = _fn_name + + # Retrieve the main FunctionBuilder + main_fn_gen = extras['fn_gen'] + + # Prepare a new FunctionBuilder for this function + updated_extras = extras.copy() + updated_extras['locals'] = _locals = {'cls': cls} + updated_extras['fn_gen'] = new_fn_gen = FunctionBuilder() + + # Apply the decorated function logic + if fn_name: + # Assume `with fn_gen.function(...)` is already handled + func(_cls, tp, updated_extras) if _cls else func(tp, updated_extras) + else: + # Apply `with fn_gen.function(...)` explicitly + with new_fn_gen.function(_fn_name, ['v1'], MISSING, _locals): + func(_cls, tp, updated_extras) if _cls else func(tp, updated_extras) + + # Merge the new FunctionBuilder into the main one + main_fn_gen |= new_fn_gen + + return f'{_fn_name}({tp.v()})' + + # Determine if the function is a class method + # noinspection PyUnresolvedReferences + is_class_method = func.__code__.co_argcount == 3 + + if is_class_method: + def wrapper_class_method(_cls, tp, extras) -> str: + """ + Wrapper logic for class methods. Passes the class context to `_wrapper_logic`. + + :param _cls: The class instance. + :param tp: The type or generic type being processed. + :param extras: A context dictionary with auxiliary information. + :type extras: dict + :return: The generated function call expression as a string. + :rtype: str + """ + return _wrapper_logic(tp, extras, _cls) + + wrapper = wraps(func)(wrapper_class_method) + else: + wrapper = wraps(func)(_wrapper_logic) + + return wrapper + + +def setup_recursive_safe_function_for_generic(func: Callable) -> Callable: + """ + A helper decorator to handle generic types using + `setup_recursive_safe_function`. + + Parameters + ---------- + func : Callable + The function to be decorated, responsible for returning the + generated function name. + + Returns + ------- + Callable + A wrapped function ensuring recursion safety for generic types. + """ + return setup_recursive_safe_function(func, is_generic=True) diff --git a/dataclass_wizard/v1/enums.py b/dataclass_wizard/v1/enums.py index fd9e406a..ea4c4fc8 100644 --- a/dataclass_wizard/v1/enums.py +++ b/dataclass_wizard/v1/enums.py @@ -6,39 +6,48 @@ class KeyAction(Enum): """ - Defines the action to take when an unknown key is encountered during deserialization. + Specifies how to handle unknown keys encountered during deserialization. + + Actions: + - `IGNORE`: Skip unknown keys silently. + - `RAISE`: Raise an exception upon encountering the first unknown key. + - `WARN`: Log a warning for each unknown key. + + For capturing unknown keys (e.g., including them in a dataclass), use the `CatchAll` field. + More details: https://dataclass-wizard.readthedocs.io/en/latest/common_use_cases/handling_unknown_json_keys.html#capturing-unknown-keys-with-catchall """ IGNORE = 0 # Silently skip unknown keys. RAISE = 1 # Raise an exception for the first unknown key. WARN = 2 # Log a warning for each unknown key. - # INCLUDE = 3 class KeyCase(Enum): """ - By default, performs no conversion on strings. - ex: `MY_FIELD_NAME` -> `MY_FIELD_NAME` - + Defines transformations for string keys, commonly used for mapping JSON keys to dataclass fields. + + Key transformations: + - `CAMEL`: Converts snake_case to camelCase. + Example: `my_field_name` -> `myFieldName` + - `PASCAL`: Converts snake_case to PascalCase (UpperCamelCase). + Example: `my_field_name` -> `MyFieldName` + - `KEBAB`: Converts camelCase or snake_case to kebab-case. + Example: `myFieldName` -> `my-field-name` + - `SNAKE`: Converts camelCase to snake_case. + Example: `myFieldName` -> `my_field_name` + - `AUTO`: Automatically maps JSON keys to dataclass fields by + attempting all valid key casing transforms at runtime. + Example: `My-Field-Name` -> `my_field_name` (cached for future lookups) + + By default, no transformation is applied: + Example: `MY_FIELD_NAME` -> `MY_FIELD_NAME` """ - # Converts strings (generally in snake case) to camel case. - # ex: `my_field_name` -> `myFieldName` - CAMEL = C = FuncWrapper(to_camel_case) - - # Converts strings to "upper" camel case. - # ex: `my_field_name` -> `MyFieldName` - PASCAL = P = FuncWrapper(to_pascal_case) - # Converts strings (generally in camel or snake case) to lisp case. - # ex: `myFieldName` -> `my-field-name` - KEBAB = K = FuncWrapper(to_lisp_case) - # Converts strings (generally in camel case) to snake case. - # ex: `myFieldName` -> `my_field_name` - SNAKE = S = FuncWrapper(to_snake_case) - # Auto-maps JSON keys to dataclass fields. - # - # All valid key casing transforms are attempted at runtime, - # and the result is cached for subsequent lookups. - # ex: `My-Field-Name` -> `my_field_name` - AUTO = A = None + # Key casing options + CAMEL = C = FuncWrapper(to_camel_case) # Convert to `camelCase` + PASCAL = P = FuncWrapper(to_pascal_case) # Convert to `PascalCase` + KEBAB = K = FuncWrapper(to_lisp_case) # Convert to `kebab-case` + SNAKE = S = FuncWrapper(to_snake_case) # Convert to `snake_case` + AUTO = A = None # Attempt all valid casing transforms at runtime. def __call__(self, *args): + """Apply the key transformation.""" return self.value.f(*args) diff --git a/dataclass_wizard/v1/loaders.py b/dataclass_wizard/v1/loaders.py index 167bef67..560d330d 100644 --- a/dataclass_wizard/v1/loaders.py +++ b/dataclass_wizard/v1/loaders.py @@ -1,7 +1,8 @@ # TODO cleanup imports import collections.abc as abc -from base64 import decodebytes +import dataclasses +from base64 import b64decode from collections import defaultdict, deque from dataclasses import is_dataclass, MISSING, Field from datetime import datetime, time, date, timedelta @@ -12,26 +13,26 @@ from typing import ( Any, Type, Dict, List, Tuple, Iterable, Sequence, Union, NamedTupleMeta, - SupportsFloat, AnyStr, Text, Callable, Optional, cast, Literal, Annotated + SupportsFloat, AnyStr, Text, Callable, Optional, cast, Literal, Annotated, NamedTuple ) from uuid import UUID -from .models import TypeInfo +from .decorators import (setup_recursive_safe_function, + setup_recursive_safe_function_for_generic) +from .enums import KeyAction, KeyCase +from .models import Extras, TypeInfo from ..abstractions import AbstractLoaderGenerator -from ..bases import BaseLoadHook, AbstractMeta +from ..bases import BaseLoadHook, AbstractMeta, META from ..class_helper import ( - v1_dataclass_field_to_alias, json_field_to_dataclass_field, - CLASS_TO_LOAD_FUNC, dataclass_fields, get_meta, is_subclass_safe, DATACLASS_FIELD_TO_ALIAS_PATH_FOR_LOAD, + v1_dataclass_field_to_alias, CLASS_TO_LOAD_FUNC, dataclass_fields, get_meta, is_subclass_safe, + DATACLASS_FIELD_TO_ALIAS_PATH_FOR_LOAD, dataclass_init_fields, dataclass_field_to_default, create_meta, dataclass_init_field_names, ) -from ..constants import CATCH_ALL, TAG -from ..decorators import _identity -from .enums import KeyAction, KeyCase +from ..constants import CATCH_ALL, TAG, PY311_OR_ABOVE, PACKAGE_NAME from ..errors import (ParseError, MissingFields, UnknownKeysError, MissingData, JSONWizardError) from ..loader_selection import get_loader, fromdict from ..log import LOG -from ..models import Extras from ..type_def import ( DefFactory, NoneType, JSONObject, PyLiteralString, @@ -43,7 +44,8 @@ from ..utils.object_path import safe_get from ..utils.string_conv import to_json_key from ..utils.type_conv import ( - as_bool, as_datetime, as_date, as_time, as_int, as_timedelta, + as_datetime_v1, as_date_v1, as_time_v1, + as_int, as_timedelta, TRUTHY_VALUES, ) from ..utils.typing_compat import ( is_typed_dict, get_args, is_annotated, @@ -98,86 +100,81 @@ def __init_subclass__(cls, **kwargs): transform_json_field = None @staticmethod - @_identity def default_load_to(tp: TypeInfo, extras: Extras) -> str: # identity: o return tp.v() @staticmethod - def load_after_type_check(tp: TypeInfo, extras: Extras) -> str: - ... - # return f'{tp.v()} if instance({tp.v()}, {tp.t()}' + def load_to_str(tp: TypeInfo, extras: Extras) -> str: + tn = tp.type_name(extras) + o = tp.v() - # if isinstance(o, base_type): - # return o - # - # e = ValueError(f'data type is not a {base_type!s}') - # raise ParseError(e, o, base_type) + if tp.in_optional: # str(v) + return f'{tn}({o})' - @staticmethod - def load_to_str(tp: TypeInfo, extras: Extras) -> str: - # TODO skip None check if in Optional - # return f'{tp.name}({tp.v()})' - return f"'' if {(v := tp.v())} is None else {tp.name}({v})" + # '' if v is None else str(v) + default = "''" if tp.origin is str else f'{tn}()' + return f'{default} if {o} is None else {tn}({o})' @staticmethod def load_to_int(tp: TypeInfo, extras: Extras) -> str: - # TODO - extras['locals'].setdefault('as_int', as_int) + # alias: as_int + tn = tp.type_name(extras) + tp.ensure_in_locals(extras, as_int) - # TODO - return f"as_int({tp.v()}, {tp.name})" + return f"as_int({tp.v()}, {tn})" @staticmethod - def load_to_float(tp: TypeInfo, extras: Extras) -> str: - # alias: base_type(o) - return f'{tp.name}({tp.v()})' + def load_to_float(tp: TypeInfo, extras: Extras): + # alias: float(o) + return tp.wrap_builtin(float, tp.v(), extras) @staticmethod def load_to_bool(tp: TypeInfo, extras: Extras) -> str: - extras['locals'].setdefault('as_bool', as_bool) - return f"as_bool({tp.v()})" - # Uncomment for efficiency! - # extras['locals']['_T'] = _TRUTHY_VALUES - # return f'{tp.v()} if (t := type({tp.v()})) is bool else ({tp.v()}.lower() in _T if t is str else {tp.v()} == 1)' + o = tp.v() + tp.ensure_in_locals(extras, __TRUTHY=TRUTHY_VALUES) - @staticmethod - def load_to_bytes(tp: TypeInfo, extras: Extras) -> str: - extras['locals'].setdefault('decodebytes', decodebytes) - return f'decodebytes({tp.v()}.encode())' + return (f'{o}.lower() in __TRUTHY ' + f'if {o}.__class__ is str ' + f'else {o} == 1') @staticmethod - def load_to_bytearray(tp: TypeInfo, extras: Extras) -> str: - extras['locals'].setdefault('decodebytes', decodebytes) - return f'{tp.name}(decodebytes({tp.v()}.encode()))' + def load_to_bytes(tp: TypeInfo, extras: Extras): + tp.ensure_in_locals(extras, b64decode) + return f'b64decode({tp.v()})' + + @classmethod + def load_to_bytearray(cls, tp: TypeInfo, extras: Extras): + as_bytes = cls.load_to_bytes(tp, extras) + return tp.wrap_builtin(bytearray, as_bytes, extras) @staticmethod def load_to_none(tp: TypeInfo, extras: Extras) -> str: return 'None' @staticmethod - def load_to_enum(tp: TypeInfo, extras: Extras) -> str: - # alias: base_type(o) - return tp.v() + def load_to_enum(tp: TypeInfo, extras: Extras): + # alias: enum_cls(o) + return tp.wrap(tp.v(), extras) - # load_to_uuid = load_to_enum @staticmethod def load_to_uuid(tp: TypeInfo, extras: Extras): - # alias: base_type(o) - return tp.wrap_builtin(tp.v(), extras) + # alias: UUID(o) + return tp.wrap_builtin(UUID, tp.v(), extras) @classmethod def load_to_iterable(cls, tp: TypeInfo, extras: Extras): v, v_next, i_next = tp.v_and_next() gorg = tp.origin + # noinspection PyBroadException try: elem_type = tp.args[0] except: elem_type = Any string = cls.get_string_for_annotation( - tp.replace(origin=elem_type, i=i_next), extras) + tp.replace(origin=elem_type, i=i_next, index=None), extras) # TODO if issubclass(gorg, (set, frozenset)): @@ -202,37 +199,30 @@ def load_to_tuple(cls, tp: TypeInfo, extras: Extras): if args: is_variadic = args[-1] is ... else: + # Annotated without args, as simply `tuple` args = (Any, ...) is_variadic = True if is_variadic: - # Parser that handles the variadic form of :class:`Tuple`'s, - # i.e. ``Tuple[str, ...]`` + # Logic that handles the variadic form of :class:`Tuple`'s, + # i.e. ``Tuple[str, ...]`` # - # Per `PEP 484`_, only **one** required type is allowed before the - # ``Ellipsis``. That is, ``Tuple[int, ...]`` is valid whereas - # ``Tuple[int, str, ...]`` would be invalid. `See here`_ for more info. + # Per `PEP 484`_, only **one** required type is allowed before the + # ``Ellipsis``. That is, ``Tuple[int, ...]`` is valid whereas + # ``Tuple[int, str, ...]`` would be invalid. `See here`_ for more info. # - # .. _PEP 484: https://www.python.org/dev/peps/pep-0484/ - # .. _See here: https://github.com/python/typing/issues/180 + # .. _PEP 484: https://www.python.org/dev/peps/pep-0484/ + # .. _See here: https://github.com/python/typing/issues/180 v, v_next, i_next = tp.v_and_next() + # Given `Tuple[T, ...]`, we only need the generated string for `T` string = cls.get_string_for_annotation( - tp.replace(origin=args[0], i=i_next), extras) - - # A one-element tuple containing the parser for the first type - # argument. - # Given `Tuple[T, ...]`, we only need a parser for `T` - # self.first_elem_parser = get_parser(elem_types[0], cls, extras), - # Total count should be `Infinity` here, since the variadic form - # accepts any number of possible arguments. - # self.total_count: N = float('inf') - # self.required_count = 0 + tp.replace(origin=args[0], i=i_next, index=None), extras) result = f'[{string} for {v_next} in {v}]' + # Wrap because we need to create a tuple from list comprehension force_wrap = True - else: string = ', '.join([ cls.get_string_for_annotation( @@ -241,48 +231,72 @@ def load_to_tuple(cls, tp: TypeInfo, extras: Extras): for k, arg in enumerate(args)]) result = f'({string}, )' + force_wrap = False return tp.wrap(result, extras, force=force_wrap) @classmethod + @setup_recursive_safe_function def load_to_named_tuple(cls, tp: TypeInfo, extras: Extras): + fn_gen = extras['fn_gen'] + nt_tp = cast(NamedTuple, tp.origin) + + _locals = extras['locals'] + _locals['cls'] = nt_tp + _locals['msg'] = "`dict` input is not supported for NamedTuple, use a dataclass instead." + + req_field_to_assign = {} + field_assigns = [] + optional_fields = set(nt_tp._field_defaults) + has_optionals = True if optional_fields else False + only_optionals = has_optionals and len(optional_fields) == len(nt_tp.__annotations__) + num_fields = 0 - fn_gen = FunctionBuilder() - - extras_cp: Extras = extras.copy() - extras_cp['locals'] = _locals = { - 'msg': "`dict` input is not supported for NamedTuple, use a dataclass instead." - } + for field, field_tp in nt_tp.__annotations__.items(): + string = cls.get_string_for_annotation( + tp.replace(origin=field_tp, index=num_fields), extras) - fn_name = f'_load_{extras["cls_name"]}_nt_typed_{tp.name}' + if has_optionals and field in optional_fields: + field_assigns.append(string) + else: + req_field_to_assign[f'__{field}'] = string - field_names = [] - result_list = [] - num_fields = 0 - # TODO set __annotations__? - for x, y in tp.origin.__annotations__.items(): - result_list.append(cls.get_string_for_annotation( - tp.replace(origin=y, index=num_fields), extras_cp)) - field_names.append(x) num_fields += 1 - with fn_gen.function(fn_name, ['v1'], None, _locals): - fn_gen.add_line('fields = []') - with fn_gen.try_(): - for i, string in enumerate(result_list): - fn_gen.add_line(f'fields.append({string})') - with fn_gen.except_(IndexError): - fn_gen.add_line('pass') - with fn_gen.except_(KeyError): + params = ', '.join(req_field_to_assign) + + with fn_gen.try_(): + + for field, string in req_field_to_assign.items(): + fn_gen.add_line(f'{field} = {string}') + + if has_optionals: + opt_start = len(req_field_to_assign) + fn_gen.add_line(f'L = len(v1); has_opt = L > {opt_start}') + with fn_gen.if_(f'has_opt'): + fn_gen.add_line(f'fields = [{field_assigns.pop(0)}]') + for i, string in enumerate(field_assigns, start=opt_start + 1): + fn_gen.add_line(f'if L > {i}: fields.append({string})') + + if only_optionals: + fn_gen.add_line(f'return cls(*fields)') + else: + fn_gen.add_line(f'return cls({params}, *fields)') + + fn_gen.add_line(f'return cls({params})') + + with fn_gen.except_(Exception, 'e'): + with fn_gen.if_('(e_cls := e.__class__) is IndexError'): + # raise `MissingFields`, as required NamedTuple fields + # are not present in the input object `o`. + fn_gen.add_line("raise_missing_fields(locals(), v1, cls, None)") + with fn_gen.if_('e_cls is KeyError and type(v1) is dict'): # Input object is a `dict` # TODO should we support dict for namedtuple? fn_gen.add_line('raise TypeError(msg) from None') - fn_gen.add_line(f'return {tp.wrap("*fields", extras_cp, prefix="nt_")}') - - extras['fn_gen'] |= fn_gen - - return f'{fn_name}({tp.v()})' + # re-raise + fn_gen.add_line('raise e from None') @classmethod def load_to_named_tuple_untyped(cls, tp: TypeInfo, extras: Extras): @@ -297,10 +311,10 @@ def load_to_named_tuple_untyped(cls, tp: TypeInfo, extras: Extras): @classmethod def _build_dict_comp(cls, tp, v, i_next, k_next, v_next, kt, vt, extras): - tp_k_next = tp.replace(origin=kt, i=i_next, prefix='k') + tp_k_next = tp.replace(origin=kt, i=i_next, prefix='k', index=None) string_k = cls.get_string_for_annotation(tp_k_next, extras) - tp_v_next = tp.replace(origin=vt, i=i_next, prefix='v') + tp_v_next = tp.replace(origin=vt, i=i_next, prefix='v', index=None) string_v = cls.get_string_for_annotation(tp_v_next, extras) return f'{{{string_k}: {string_v} for {k_next}, {v_next} in {v}.items()}}' @@ -318,7 +332,6 @@ def load_to_dict(cls, tp: TypeInfo, extras: Extras): result = cls._build_dict_comp( tp, v, i_next, k_next, v_next, kt, vt, extras) - # TODO return tp.wrap(result, extras) @classmethod @@ -339,15 +352,12 @@ def load_to_defaultdict(cls, tp: TypeInfo, extras: Extras): return tp.wrap_dd(default_factory, result, extras) @classmethod + @setup_recursive_safe_function def load_to_typed_dict(cls, tp: TypeInfo, extras: Extras): - fn_gen = FunctionBuilder() + fn_gen = extras['fn_gen'] req_keys, opt_keys = get_keys_for_typed_dict(tp.origin) - - extras_cp: Extras = extras.copy() - extras_cp['locals'] = _locals = {} - - fn_name = f'_load_{extras["cls_name"]}_typeddict_{tp.name}' + # _locals = extras['locals'] result_list = [] # TODO set __annotations__? @@ -359,199 +369,185 @@ def load_to_typed_dict(cls, tp: TypeInfo, extras: Extras): field_name = repr(k) string = cls.get_string_for_annotation( tp.replace(origin=field_tp, - index=field_name), extras_cp) + index=field_name), extras) result_list.append(f'{field_name}: {string}') - with fn_gen.function(fn_name, ['v1'], None, _locals): - with fn_gen.try_(): - fn_gen.add_lines('result = {', - *(f' {r},' for r in result_list), - '}') - - # Set optional keys for the `TypedDict` (if they exist) - for k in opt_keys: - field_tp = annotations[k] - field_name = repr(k) - string = cls.get_string_for_annotation( - tp.replace(origin=field_tp, - i=2), extras_cp) - with fn_gen.if_(f'(v2 := v1.get({field_name}, MISSING)) is not MISSING'): - fn_gen.add_line(f'result[{field_name}] = {string}') - fn_gen.add_line('return result') - with fn_gen.except_(Exception, 'e'): - with fn_gen.if_('type(e) is KeyError'): - fn_gen.add_line('name = e.args[0]; e = KeyError(f"Missing required key: {name!r}")') - with fn_gen.elif_('not isinstance(v1, dict)'): - fn_gen.add_line('e = TypeError("Incorrect type for object")') - fn_gen.add_line('raise ParseError(e, v1, {}) from None') - - extras['fn_gen'] |= fn_gen - - return f'{fn_name}({tp.v()})' + with fn_gen.try_(): + fn_gen.add_lines('result = {', + *(f' {r},' for r in result_list), + '}') + + # Set optional keys for the `TypedDict` (if they exist) + for k in opt_keys: + field_tp = annotations[k] + field_name = repr(k) + string = cls.get_string_for_annotation( + tp.replace(origin=field_tp, i=2, index=None), extras) + with fn_gen.if_(f'(v2 := v1.get({field_name}, MISSING)) is not MISSING'): + fn_gen.add_line(f'result[{field_name}] = {string}') + fn_gen.add_line('return result') + + with fn_gen.except_(Exception, 'e'): + with fn_gen.if_('type(e) is KeyError'): + fn_gen.add_line('name = e.args[0]; e = KeyError(f"Missing required key: {name!r}")') + with fn_gen.elif_('not isinstance(v1, dict)'): + fn_gen.add_line('e = TypeError("Incorrect type for object")') + fn_gen.add_line('raise ParseError(e, v1, {}) from None') @classmethod + @setup_recursive_safe_function_for_generic def load_to_union(cls, tp: TypeInfo, extras: Extras): - fn_gen = FunctionBuilder() + fn_gen = extras['fn_gen'] config = extras['config'] + actual_cls = extras['cls'] tag_key = config.tag_key or TAG auto_assign_tags = config.auto_assign_tags - fields = f'fields_{tp.field_i}' - - extras_cp: Extras = extras.copy() - extras_cp['locals'] = _locals = { - fields: tp.args, - 'tag_key': tag_key, - } - - actual_cls = extras['cls'] - - fn_name = f'load_to_{extras["cls_name"]}_union_{tp.field_i}' - - # TODO handle dataclasses in union (tag) - - with fn_gen.function(fn_name, ['v1'], None, _locals): - - dataclass_tag_to_lines: dict[str, list] = {} - - type_checks = [] - try_parse_at_end = [] - - for possible_tp in tp.args: + i = tp.field_i + fields = f'fields_{i}' - possible_tp = eval_forward_ref_if_needed(possible_tp, actual_cls) + args = tp.args + in_optional = NoneType in args - tp_new = TypeInfo(possible_tp, field_i=tp.field_i) + _locals = extras['locals'] + _locals[fields] = args + _locals['tag_key'] = tag_key - if possible_tp is NoneType: - with fn_gen.if_('v1 is None'): - fn_gen.add_line('return None') - continue + dataclass_tag_to_lines: dict[str, list] = {} - if is_dataclass(possible_tp): - # we see a dataclass in `Union` declaration - meta = get_meta(possible_tp) - tag = meta.tag - assign_tags_to_cls = auto_assign_tags or meta.auto_assign_tags - cls_name = possible_tp.__name__ - - if assign_tags_to_cls and not tag: - tag = cls_name - # We don't want to mutate the base Meta class here - if meta is AbstractMeta: - create_meta(possible_tp, cls_name, tag=tag) - else: - meta.tag = cls_name + type_checks = [] + try_parse_at_end = [] - if tag: - string = cls.get_string_for_annotation(tp_new, extras_cp) + for possible_tp in args: - dataclass_tag_to_lines[tag] = [ - f'if tag == {tag!r}:', - f' return {string}' - ] - continue + possible_tp = eval_forward_ref_if_needed(possible_tp, actual_cls) - elif not config.v1_unsafe_parse_dataclass_in_union: - e = ValueError(f'Cannot parse dataclass types in a Union without one of the following `Meta` settings:\n\n' - ' * `auto_assign_tags = True`\n' - f' - Set on class `{extras["cls_name"]}`.\n\n' - f' * `tag = "{cls_name}"`\n' - f' - Set on class `{possible_tp.__qualname__}`.\n\n' - ' * `v1_unsafe_parse_dataclass_in_union = True`\n' - f' - Set on class `{extras["cls_name"]}`\n\n' - 'For more information, refer to:\n' - ' https://dataclass-wizard.readthedocs.io/en/latest/common_use_cases/dataclasses_in_union_types.html') - raise e from None + tp_new = TypeInfo(possible_tp, field_i=i) + tp_new.in_optional = in_optional - string = cls.get_string_for_annotation(tp_new, extras_cp) + if possible_tp is NoneType: + with fn_gen.if_('v1 is None'): + fn_gen.add_line('return None') + continue - try_parse_lines = [ - 'try:', - f' return {string}', - 'except Exception:', - ' pass', - ] + if is_dataclass(possible_tp): + # we see a dataclass in `Union` declaration + meta = get_meta(possible_tp) + tag = meta.tag + assign_tags_to_cls = auto_assign_tags or meta.auto_assign_tags + cls_name = possible_tp.__name__ - # TODO disable for dataclasses + if assign_tags_to_cls and not tag: + tag = cls_name + # We don't want to mutate the base Meta class here + if meta is AbstractMeta: + create_meta(possible_tp, cls_name, tag=tag) + else: + meta.tag = cls_name - if possible_tp in _SIMPLE_TYPES or is_subclass_safe(get_origin_v2(possible_tp), _SIMPLE_TYPES): - tn = tp_new.type_name(extras_cp) - type_checks.extend([ - f'if tp is {tn}:', - ' return v1' - ]) - list_to_add = try_parse_at_end - else: - list_to_add = type_checks + if tag: + string = cls.get_string_for_annotation(tp_new, extras) - list_to_add.extend(try_parse_lines) + dataclass_tag_to_lines[tag] = [ + f'if tag == {tag!r}:', + f' return {string}' + ] + continue - if dataclass_tag_to_lines: + elif not config.v1_unsafe_parse_dataclass_in_union: + e = ValueError(f'Cannot parse dataclass types in a Union without one of the following `Meta` settings:\n\n' + ' * `auto_assign_tags = True`\n' + f' - Set on class `{extras["cls_name"]}`.\n\n' + f' * `tag = "{cls_name}"`\n' + f' - Set on class `{possible_tp.__qualname__}`.\n\n' + ' * `v1_unsafe_parse_dataclass_in_union = True`\n' + f' - Set on class `{extras["cls_name"]}`\n\n' + 'For more information, refer to:\n' + ' https://dataclass-wizard.readthedocs.io/en/latest/common_use_cases/dataclasses_in_union_types.html') + raise e from None + + string = cls.get_string_for_annotation(tp_new, extras) + + try_parse_lines = [ + 'try:', + f' return {string}', + 'except Exception:', + ' pass', + ] + + # TODO disable for dataclasses + + if (possible_tp in _SIMPLE_TYPES + or is_subclass_safe( + get_origin_v2(possible_tp), _SIMPLE_TYPES)): + + tn = tp_new.type_name(extras) + type_checks.extend([ + f'if tp is {tn}:', + ' return v1' + ]) + list_to_add = try_parse_at_end + else: + list_to_add = type_checks - with fn_gen.try_(): - fn_gen.add_line(f'tag = v1[tag_key]') + list_to_add.extend(try_parse_lines) - with fn_gen.except_(Exception): - fn_gen.add_line('pass') + if dataclass_tag_to_lines: - with fn_gen.else_(): + with fn_gen.try_(): + fn_gen.add_line(f'tag = v1[tag_key]') - for lines in dataclass_tag_to_lines.values(): - fn_gen.add_lines(*lines) + with fn_gen.except_(Exception): + fn_gen.add_line('pass') - fn_gen.add_line( - "raise ParseError(" - "TypeError('Object with tag was not in any of Union types')," - f"v1,{fields}," - "input_tag=tag," - "tag_key=tag_key," - f"valid_tags={list(dataclass_tag_to_lines)})" - ) + with fn_gen.else_(): - fn_gen.add_line('tp = type(v1)') + for lines in dataclass_tag_to_lines.values(): + fn_gen.add_lines(*lines) - if type_checks: - fn_gen.add_lines(*type_checks) + fn_gen.add_line( + "raise ParseError(" + "TypeError('Object with tag was not in any of Union types')," + f"v1,{fields}," + "input_tag=tag," + "tag_key=tag_key," + f"valid_tags={list(dataclass_tag_to_lines)})" + ) - if try_parse_at_end: - fn_gen.add_lines(*try_parse_at_end) + fn_gen.add_line('tp = type(v1)') - # Invalid type for Union - fn_gen.add_line("raise ParseError(" - "TypeError('Object was not in any of Union types')," - f"v1,{fields}," - "tag_key=tag_key" - ")") + if type_checks: + fn_gen.add_lines(*type_checks) - extras['fn_gen'] |= fn_gen + if try_parse_at_end: + fn_gen.add_lines(*try_parse_at_end) - return f'{fn_name}({tp.v()})' + # Invalid type for Union + fn_gen.add_line("raise ParseError(" + "TypeError('Object was not in any of Union types')," + f"v1,{fields}," + "tag_key=tag_key" + ")") @staticmethod + @setup_recursive_safe_function_for_generic def load_to_literal(tp: TypeInfo, extras: Extras): - fn_gen = FunctionBuilder() + fn_gen = extras['fn_gen'] fields = f'fields_{tp.field_i}' - extras_cp: Extras = extras.copy() - extras_cp['locals'] = _locals = { - fields: frozenset(tp.args), - } + _locals = extras['locals'] + _locals[fields] = frozenset(tp.args) - fn_name = f'load_to_{extras["cls_name"]}_literal_{tp.field_i}' + with fn_gen.if_(f'{tp.v()} in {fields}', comment=repr(tp.args)): + fn_gen.add_line('return v1') - with fn_gen.function(fn_name, ['v1'], None, _locals): - - with fn_gen.if_(f'{tp.v()} in {fields}'): - fn_gen.add_line('return v1') - - # No such Literal with the value of `o` - fn_gen.add_line("e = ValueError('Value not in expected Literal values')") - fn_gen.add_line(f'raise ParseError(e, v1, {fields}, ' - f'allowed_values=list({fields}))') + # No such Literal with the value of `o` + fn_gen.add_line("e = ValueError('Value not in expected Literal values')") + fn_gen.add_line(f'raise ParseError(e, v1, {fields}, ' + f'allowed_values=list({fields}))') # TODO Checks for Literal equivalence, as mentioned here: # https://www.python.org/dev/peps/pep-0586/#equivalence-of-two-literals @@ -584,48 +580,93 @@ def load_to_literal(tp: TypeInfo, extras: Extras): # f'e, v1, {fields}, allowed_values=list({fields})' # f')') - extras['fn_gen'] |= fn_gen - - return f'{fn_name}({tp.v()})' - @staticmethod def load_to_decimal(tp: TypeInfo, extras: Extras): - s = f'str({tp.v()}) if isinstance({tp.v()}, float) else {tp.v()}' - return tp.wrap_builtin(s, extras) + o = tp.v() + s = f'str({o}) if {o}.__class__ is float else {o}' - # alias: base_type(o) - load_to_path = load_to_uuid + return tp.wrap_builtin(Decimal, s, extras) @staticmethod - def load_to_datetime(tp: TypeInfo, extras: Extras): - # alias: as_datetime - tp.ensure_in_locals(extras, as_datetime, datetime) - return f'as_datetime({tp.v()}, {tp.name})' + def load_to_path(tp: TypeInfo, extras: Extras): + # alias: Path(o) + return tp.wrap_builtin(Path, tp.v(), extras) + + @classmethod + def load_to_date(cls, tp: TypeInfo, extras: Extras): + return cls._load_to_date(tp, extras, date) + + @classmethod + def load_to_datetime(cls, tp: TypeInfo, extras: Extras): + return cls._load_to_date(tp, extras, datetime) @staticmethod def load_to_time(tp: TypeInfo, extras: Extras): - # alias: as_time - tp.ensure_in_locals(extras, as_time, time) - return f'as_time({tp.v()}, {tp.name})' + o = tp.v() + tn = tp.type_name(extras, bound=time) + tp_time = cast('type[time]', tp.origin) + + __fromisoformat = f'__{tn}_fromisoformat' + + tp.ensure_in_locals( + extras, + __as_time=as_time_v1, + **{__fromisoformat: tp_time.fromisoformat} + ) + + if PY311_OR_ABOVE: + _parse_iso_string = f'{__fromisoformat}({o})' + else: # pragma: no cover + _parse_iso_string = f"{__fromisoformat}({o}.replace('Z', '+00:00', 1))" + + return (f'{_parse_iso_string} if {o}.__class__ is str ' + f'else __as_time({o}, {tn})') @staticmethod - def load_to_date(tp: TypeInfo, extras: Extras): - # alias: as_date - tp.ensure_in_locals(extras, as_date, date) - return f'as_date({tp.v()}, {tp.name})' + def _load_to_date(tp: TypeInfo, extras: Extras, + cls: 'Union[type[date], type[datetime]]'): + o = tp.v() + tn = tp.type_name(extras, bound=cls) + tp_date_or_datetime = cast('type[date]', tp.origin) + + _fromisoformat = f'__{tn}_fromisoformat' + _fromtimestamp = f'__{tn}_fromtimestamp' + + name_to_func = { + _fromisoformat: tp_date_or_datetime.fromisoformat, + _fromtimestamp: tp_date_or_datetime.fromtimestamp, + } + + if cls is datetime: + _as_func = '__as_datetime' + name_to_func[_as_func] = as_datetime_v1 + else: + _as_func = '__as_date' + name_to_func[_as_func] = as_date_v1 + + tp.ensure_in_locals(extras, **name_to_func) + + if PY311_OR_ABOVE: + _parse_iso_string = f'{_fromisoformat}({o})' + else: # pragma: no cover + _parse_iso_string = f"{_fromisoformat}({o}.replace('Z', '+00:00', 1))" + + return (f'{_parse_iso_string} if {o}.__class__ is str ' + f'else {_as_func}({o}, {_fromtimestamp})') @staticmethod def load_to_timedelta(tp: TypeInfo, extras: Extras): # alias: as_timedelta - tp.ensure_in_locals(extras, as_timedelta, timedelta) - return f'as_timedelta({tp.v()}, {tp.name})' + tn = tp.type_name(extras, bound=timedelta) + tp.ensure_in_locals(extras, as_timedelta) + + return f'as_timedelta({tp.v()}, {tn})' @staticmethod + @setup_recursive_safe_function( + fn_name=f'__{PACKAGE_NAME}_from_dict_{{cls_name}}__') def load_to_dataclass(tp: TypeInfo, extras: Extras): - fn_name = load_func_for_dataclass( - tp.origin, extras, False) - - return f'{fn_name}({tp.v()})' + load_func_for_dataclass(tp.origin, extras) @classmethod def get_string_for_annotation(cls, @@ -641,7 +682,6 @@ def get_string_for_annotation(cls, name = getattr(origin, '__name__', origin) args = None - wrap = False if is_annotated(type_ann) or is_typed_dict_type_qualifier(origin): # Given `Required[T]` or `NotRequired[T]`, we only need `T` @@ -651,33 +691,23 @@ def get_string_for_annotation(cls, name = getattr(origin, '__name__', origin) # origin = type_ann.__args__[0] - # -> Union[x] - if is_union(origin): - args = get_args(type_ann) - - # Special case for Optional[x], which is actually Union[x, None] - if NoneType in args and len(args) == 2: - string = cls.get_string_for_annotation( - tp.replace(origin=args[0], args=None, name=None), extras) - return f'None if {tp.v()} is None else {string}' - - load_hook = cls.load_to_union - - # raise NotImplementedError('`Union` support is not yet fully implemented!') - - elif origin is Literal: - load_hook = cls.load_to_literal - args = get_args(type_ann) + # TypeAliasType: Type aliases are created through + # the `type` statement + if (value := getattr(origin, '__value__', None)) is not None: + type_ann = value + origin = get_origin_v2(type_ann) + name = getattr(origin, '__name__', origin) + # `LiteralString` enforces stricter rules at + # type-checking but behaves like `str` at runtime. # TODO maybe add `load_to_literal_string` - elif origin is PyLiteralString: + if origin is PyLiteralString: load_hook = cls.load_to_str origin = str name = 'str' # -> Atomic, immutable types which don't require # any iterative / recursive handling. - # TODO use subclass safe elif origin in _SIMPLE_TYPES or is_subclass_safe(origin, _SIMPLE_TYPES): load_hook = hooks.get(origin) @@ -688,21 +718,34 @@ def get_string_for_annotation(cls, except ValueError: args = Any, + # -> Union[x] + elif is_union(origin): + load_hook = cls.load_to_union + args = get_args(type_ann) + + # Special case for Optional[x], which is actually Union[x, None] + if len(args) == 2 and NoneType in args: + new_tp = tp.replace(origin=args[0], args=None, name=None) + new_tp.in_optional = True + + string = cls.get_string_for_annotation(new_tp, extras) + + return f'None if {tp.v()} is None else {string}' + + # -> Literal[X, Y, ...] + elif origin is Literal: + load_hook = cls.load_to_literal + args = get_args(type_ann) + # https://stackoverflow.com/questions/76520264/dataclasswizard-after-upgrading-to-python3-11-is-not-working-as-expected elif origin is Any: load_hook = cls.default_load_to - elif issubclass(origin, tuple) and hasattr(origin, '_fields'): + elif is_subclass_safe(origin, tuple) and hasattr(origin, '_fields'): if getattr(origin, '__annotations__', None): # Annotated as a `typing.NamedTuple` subtype load_hook = cls.load_to_named_tuple - - # load_hook = hooks.get(NamedTupleMeta) - # return NamedTupleParser( - # base_cls, extras, base_type, load_hook, - # cls.get_parser_for_annotation - # ) else: # Annotated as a `collections.namedtuple` subtype load_hook = cls.load_to_named_tuple_untyped @@ -716,6 +759,9 @@ def get_string_for_annotation(cls, # for the `cls` (base_type) load_hook = cls.load_to_dataclass + elif is_subclass_safe(origin, Enum): + load_hook = cls.load_to_enum + elif origin in (abc.Sequence, abc.MutableSequence, abc.Collection): if origin is abc.Sequence: load_hook = cls.load_to_tuple @@ -752,10 +798,7 @@ def get_string_for_annotation(cls, for t in hooks: if issubclass(origin, (t,)): load_hook = hooks[t] - wrap = True break - else: - wrap = False tp.origin = origin tp.args = args @@ -763,11 +806,6 @@ def get_string_for_annotation(cls, if load_hook is not None: result = load_hook(tp, extras) - # Only wrap result if not already wrapped - if wrap: - if (wrapped := getattr(result, '_wrapped', None)) is not None: - return wrapped - return tp.wrap(result, extras) return result # No matching hook is found for the type. @@ -791,10 +829,6 @@ def setup_default_loader(cls=LoadMixin): """ # TODO maybe `dict.update` might be better? - # Technically a complex type, however check this - # first, since `StrEnum` and `IntEnum` are subclasses - # of `str` and `int` - cls.register_load_hook(Enum, cls.load_to_enum) # Simple types cls.register_load_hook(str, cls.load_to_str) cls.register_load_hook(float, cls.load_to_float) @@ -810,9 +844,6 @@ def setup_default_loader(cls=LoadMixin): cls.register_load_hook(deque, cls.load_to_iterable) cls.register_load_hook(list, cls.load_to_iterable) cls.register_load_hook(tuple, cls.load_to_tuple) - # `typing` Generics - # cls.register_load_hook(Literal, cls.load_to_literal) - # noinspection PyTypeChecker cls.register_load_hook(defaultdict, cls.load_to_defaultdict) cls.register_load_hook(dict, cls.load_to_dict) cls.register_load_hook(Decimal, cls.load_to_decimal) @@ -824,38 +855,51 @@ def setup_default_loader(cls=LoadMixin): cls.register_load_hook(timedelta, cls.load_to_timedelta) -def add_to_missing_fields(missing_fields: 'list[str] | None', field: str): - if missing_fields is None: - missing_fields = [field] - else: - missing_fields.append(field) - return missing_fields +def check_and_raise_missing_fields( + _locals, o, cls, + fields: 'Union[tuple[Field, ...], None]'): + if fields is None: # named tuple + nt_tp = cast(NamedTuple, cls) + field_to_default = nt_tp._field_defaults -def check_and_raise_missing_fields( - _locals, o, cls, fields: tuple[Field, ...]): + fields = tuple([ + dataclasses.field( + default=field_to_default.get(field, MISSING), + ) + for field in cls.__annotations__]) - missing_fields = [f.name for f in fields - if f.init - and f'__{f.name}' not in _locals - and (f.default is MISSING - and f.default_factory is MISSING)] + for field, name in zip(fields, cls.__annotations__): + field.name = name - missing_keys = [v1_dataclass_field_to_alias(cls)[field] - for field in missing_fields] + missing_fields = [f for f in cls.__annotations__ + if f'__{f}' not in _locals + and f not in field_to_default] + + missing_keys = None + + else: + missing_fields = [f.name for f in fields + if f.init + and f'__{f.name}' not in _locals + and (f.default is MISSING + and f.default_factory is MISSING)] + + missing_keys = [v1_dataclass_field_to_alias(cls)[field] + for field in missing_fields] raise MissingFields( None, o, cls, fields, None, missing_fields, missing_keys ) from None + def load_func_for_dataclass( cls: type, - extras: Extras, - is_main_class: bool = True, + extras: 'Extras | None' = None, loader_cls=LoadMixin, base_meta_cls: type = AbstractMeta, -) -> Union[Callable[[JSONObject], T], str]: +) -> Optional[Callable[[JSONObject], T]]: # TODO dynamically generate for multiple nested classes at once @@ -872,52 +916,74 @@ def load_func_for_dataclass( # Get the loader for the class, or create a new one as needed. cls_loader = get_loader(cls, base_cls=loader_cls, v1=True) + cls_name = cls.__name__ + + fn_name = f'__{PACKAGE_NAME}_from_dict_{cls_name}__' + # Get the meta config for the class, or the default config otherwise. meta = get_meta(cls, base_meta_cls) - if is_main_class: # we are being run for the main dataclass + if extras is None: # we are being run for the main dataclass + is_main_class = True + # If the `recursive` flag is enabled and a Meta config is provided, # apply the Meta recursively to any nested classes. # # Else, just use the base `AbstractMeta`. - config = meta if meta.recursive else base_meta_cls + config: META = meta if meta.recursive else base_meta_cls + + # Initialize the FuncBuilder + fn_gen = FunctionBuilder() + + new_locals = { + 'cls': cls, + 'fields': fields, + } + + extras: Extras = { + 'config': config, + 'cls': cls, + 'cls_name': cls_name, + 'locals': new_locals, + 'recursion_guard': {cls: fn_name}, + 'fn_gen': fn_gen, + } _globals = { - 'add': add_to_missing_fields, - 're_raise': re_raise, + 'MISSING': MISSING, 'ParseError': ParseError, - # 'LOG': LOG, 'raise_missing_fields': check_and_raise_missing_fields, - 'MISSING': MISSING, + 're_raise': re_raise, } # we are being run for a nested dataclass else: + is_main_class = False + # config for nested dataclasses config = extras['config'] + # Initialize the FuncBuilder + fn_gen = extras['fn_gen'] + if config is not base_meta_cls: # we want to apply the meta config from the main dataclass # recursively. meta = meta | config meta.bind_to(cls, is_default=False) + new_locals = extras['locals'] + new_locals['fields'] = fields + + # TODO need a way to auto-magically do this + extras['cls'] = cls + extras['cls_name'] = cls_name + key_case: 'V1LetterCase | None' = cls_loader.transform_json_field field_to_alias = v1_dataclass_field_to_alias(cls) check_aliases = True if field_to_alias else False - # This contains a mapping of the original field name to the parser for its - # annotated type; the item lookup *can* be case-insensitive. - # try: - # field_to_parser = dataclass_field_to_load_parser(cls_loader, cls, config) - # except RecursionError: - # if meta.recursive_classes: - # # recursion-safe loader is already in use; something else must have gone wrong - # raise - # else: - # raise RecursiveClassError(cls) from None - field_to_path = DATACLASS_FIELD_TO_ALIAS_PATH_FOR_LOAD[cls] has_alias_paths = True if field_to_path else False @@ -933,14 +999,9 @@ def load_func_for_dataclass( else: expect_tag_as_unknown_key = False - _locals = { - 'cls': cls, - 'fields': fields, - } - if key_case is KeyCase.AUTO: - _locals['f2k'] = field_to_alias - _locals['to_key'] = to_json_key + new_locals['f2k'] = field_to_alias + new_locals['to_key'] = to_json_key on_unknown_key = meta.v1_on_unknown_key @@ -966,27 +1027,12 @@ def load_func_for_dataclass( should_raise = should_warn = None if has_alias_paths: - _locals['safe_get'] = safe_get + new_locals['safe_get'] = safe_get - # Initialize the FuncBuilder - fn_gen = FunctionBuilder() - - cls_name = cls.__name__ - # noinspection PyTypeChecker - new_extras: Extras = { - 'config': config, - 'locals': _locals, - 'cls': cls, - 'cls_name': cls_name, - 'fn_gen': fn_gen, - } - - fn_name = f'__dataclass_wizard_from_dict_{cls_name}__' - - with fn_gen.function(fn_name, ['o'], MISSING, _locals): + with fn_gen.function(fn_name, ['o'], MISSING, new_locals): if (_pre_from_dict := getattr(cls, '_pre_from_dict', None)) is not None: - _locals['__pre_from_dict__'] = _pre_from_dict + new_locals['__pre_from_dict__'] = _pre_from_dict fn_gen.add_line('o = __pre_from_dict__(o)') # Need to create a separate dictionary to copy over the constructor @@ -1036,7 +1082,7 @@ def load_func_for_dataclass( field_to_alias[name] = key = key_case(name) f_assign = f'field={name!r}; key={key!r}; {val}=o.get(key, MISSING)' - string = generate_field_code(cls_loader, new_extras, f, i) + string = generate_field_code(cls_loader, extras, f, i) if name in field_to_default: fn_gen.add_line(f_assign) @@ -1063,14 +1109,14 @@ def load_func_for_dataclass( # add an alias for the tag key, so we don't capture it field_to_alias['...'] = meta.tag_key - if 'f2k' in _locals: + if 'f2k' in new_locals: # If this is the case, then `AUTO` key transform mode is enabled # line = 'extra_keys = o.keys() - f2k.values()' aliases_var = 'f2k.values()' else: aliases_var = 'aliases' - _locals['aliases'] = set(field_to_alias.values()) + new_locals['aliases'] = set(field_to_alias.values()) catch_all_def = f'{{k: o[k] for k in o if k not in {aliases_var}}}' @@ -1087,22 +1133,22 @@ def load_func_for_dataclass( # add an alias for the tag key, so we don't raise an error when we see it field_to_alias['...'] = meta.tag_key - if 'f2k' in _locals: + if 'f2k' in new_locals: # If this is the case, then `AUTO` key transform mode is enabled line = 'extra_keys = o.keys() - f2k.values()' else: - _locals['aliases'] = set(field_to_alias.values()) + new_locals['aliases'] = set(field_to_alias.values()) line = 'extra_keys = set(o) - aliases' with fn_gen.if_('len(o) != i'): fn_gen.add_line(line) if should_raise: # Raise an error here (if needed) - _locals['UnknownKeysError'] = UnknownKeysError + new_locals['UnknownKeysError'] = UnknownKeysError fn_gen.add_line("raise UnknownKeysError(extra_keys, o, cls, fields) from None") elif should_warn: # Show a warning here - _locals['LOG'] = LOG + new_locals['LOG'] = LOG fn_gen.add_line(r"LOG.warning('Found %d unknown keys %r not mapped to the dataclass schema.\n" r" Class: %r\n Dataclass fields: %r', len(extra_keys), extra_keys, cls.__qualname__, [f.name for f in fields])") @@ -1137,20 +1183,16 @@ def load_func_for_dataclass( _set_new_attribute(cls, 'from_dict', cls_fromdict) _set_new_attribute( - cls, '__dataclass_wizard_from_dict__', cls_fromdict) + cls, f'__{PACKAGE_NAME}_from_dict__', cls_fromdict) LOG.debug( - "setattr(%s, '__dataclass_wizard_from_dict__', %s)", - cls_name, fn_name) + "setattr(%s, '__%s_from_dict__', %s)", + cls_name, PACKAGE_NAME, fn_name) # TODO in `v1`, we will use class attribute (set above) instead. CLASS_TO_LOAD_FUNC[cls] = cls_fromdict return cls_fromdict - # Update the FunctionBuilder - extras['fn_gen'] |= fn_gen - - return fn_name def generate_field_code(cls_loader: LoadMixin, extras: Extras, @@ -1165,8 +1207,11 @@ def generate_field_code(cls_loader: LoadMixin, TypeInfo(field_type, field_i=field_i), extras ) + # except Exception as e: + # re_raise(e, cls, None, dataclass_init_fields(cls), field, None) except ParseError as pe: pe.class_name = cls + # noinspection PyPropertyAccess pe.field_name = field.name raise pe from None diff --git a/dataclass_wizard/v1/models.py b/dataclass_wizard/v1/models.py index 3fd6378f..a5f18f03 100644 --- a/dataclass_wizard/v1/models.py +++ b/dataclass_wizard/v1/models.py @@ -1,12 +1,13 @@ +from collections import defaultdict from dataclasses import MISSING, Field as _Field from typing import Any, TypedDict from ..constants import PY310_OR_ABOVE from ..log import LOG -from ..type_def import DefFactory, ExplicitNull +from ..type_def import DefFactory, ExplicitNull, PyNotRequired # noinspection PyProtectedMember from ..utils.object_path import split_object_path -from ..utils.typing_compat import get_origin_v2, PyNotRequired +from ..utils.typing_compat import get_origin_v2 _BUILTIN_COLLECTION_TYPES = frozenset({ @@ -38,6 +39,9 @@ class TypeInfo: # optional attribute, that indicates if we should wrap the # assignment with `name` -- ex. `(1, 2)` -> `deque((1, 2))` '_wrapped', + # optional attribute, that indicates if we are currently in Optional, + # e.g. `typing.Optional[...]` *or* `typing.Union[T, ...*T2, None]` + '_in_opt', ) def __init__(self, origin, @@ -73,18 +77,33 @@ def replace(self, **changes): # noinspection PyArgumentList return TypeInfo(**current_values) + @property + def in_optional(self): + return getattr(self, '_in_opt', False) + + # noinspection PyUnresolvedReferences + @in_optional.setter + def in_optional(self, value): + # noinspection PyAttributeOutsideInit + self._in_opt = value + @staticmethod - def ensure_in_locals(extras, *types): + def ensure_in_locals(extras, *tps, **name_to_tp): locals = extras['locals'] - for tp in types: + + for tp in tps: locals.setdefault(tp.__name__, tp) - def type_name(self, extras): + for name, tp in name_to_tp.items(): + locals.setdefault(name, tp) + + def type_name(self, extras, bound=None): """Return type name as string (useful for `Union` type checks)""" if self.name is None: self.name = get_origin_v2(self.origin).__name__ - return self._wrap_inner(extras, force=True) + return self._wrap_inner( + extras, force=True, bound=bound) def v(self): return (f'{self.prefix}{self.i}' if (idx := self.index) is None @@ -99,8 +118,8 @@ def v_and_next_k_v(self): return self.v(), f'k{next_i}', f'v{next_i}', next_i def wrap_dd(self, default_factory: DefFactory, result: str, extras): - tn = self._wrap_inner(extras, is_builtin=True) - tn_df = self._wrap_inner(extras, default_factory, 'df_') + tn = self._wrap_inner(extras, is_builtin=True, bound=defaultdict) + tn_df = self._wrap_inner(extras, default_factory) result = f'{tn}({tn_df}, {result})' setattr(self, '_wrapped', result) return self @@ -112,15 +131,17 @@ def multi_wrap(self, extras, prefix='', *result, force=False): return result - def wrap(self, result: str, extras, force=False, prefix=''): - if (tn := self._wrap_inner(extras, prefix=prefix, force=force)) is not None: + def wrap(self, result: str, extras, force=False, prefix='', bound=None): + if (tn := self._wrap_inner( + extras, prefix=prefix, force=force, + bound=bound)) is not None: result = f'{tn}({result})' setattr(self, '_wrapped', result) return self - def wrap_builtin(self, result: str, extras): - tn = self._wrap_inner(extras, is_builtin=True) + def wrap_builtin(self, bound, result, extras): + tn = self._wrap_inner(extras, is_builtin=True, bound=bound) result = f'{tn}({result})' setattr(self, '_wrapped', result) @@ -130,27 +151,31 @@ def _wrap_inner(self, extras, tp=None, prefix='', is_builtin=False, - force=False) -> 'str | None': + force=False, + bound=None) -> 'str | None': if tp is None: tp = self.origin name = self.name - return_name = False + return_name = force else: name = tp.__name__ return_name = True - if force: - return_name = True + # This ensures we don't create a "unique" name + # if it's a non-subclass, e.g. ensures we end + # up with `date` instead of `date_123`. + if bound is not None: + is_builtin = tp is bound if tp not in _BUILTIN_COLLECTION_TYPES: - # TODO? - if is_builtin or (mod := tp.__module__) == 'collections': + if (mod := tp.__module__) == 'builtins': + tn = name + elif (is_builtin + or mod == 'collections'): tn = name LOG.debug(f'Ensuring %s=%s', tn, name) extras['locals'].setdefault(tn, tp) - elif mod == 'builtins': - tn = name else: tn = f'{prefix}{name}_{self.field_i}' LOG.debug(f'Adding %s=%s', tn, name) @@ -181,6 +206,7 @@ class Extras(TypedDict): fn_gen: 'FunctionBuilder' locals: dict[str, Any] pattern: PyNotRequired['PatternedDT'] + recursion_guard: dict[type, str] # Instances of Field are only ever created from within this module, diff --git a/dataclass_wizard/v1/models.pyi b/dataclass_wizard/v1/models.pyi index 1d57857e..df41453f 100644 --- a/dataclass_wizard/v1/models.pyi +++ b/dataclass_wizard/v1/models.pyi @@ -35,23 +35,32 @@ class TypeInfo: prefix: str = 'v' # index of assignment (ex. `2 -> v1[2]`, *or* a string `"key" -> v4["key"]`) index: int | None = None + # indicates if we are currently in Optional, + # e.g. `typing.Optional[...]` *or* `typing.Union[T, ...*T2, None]` + in_optional: bool = False def replace(self, **changes) -> TypeInfo: ... @staticmethod - def ensure_in_locals(extras: dict[str, Any], *types: Callable) -> None: ... - def type_name(self, extras: dict[str, Any]) -> str: ... + def ensure_in_locals(extras: Extras, *tps: Callable, **name_to_tp: Callable[..., Any]) -> None: ... + def type_name(self, extras: Extras, + *, bound: type | None = None) -> str: ... def v(self) -> str: ... def v_and_next(self) -> tuple[str, str, int]: ... def v_and_next_k_v(self) -> tuple[str, str, str, int]: ... def multi_wrap(self, extras, prefix='', *result, force=False) -> list[str]: ... - def wrap(self, result: str, extras: Extras, force=False, prefix='') -> Self: ... - def wrap_builtin(self, result: str, extras: Extras) -> Self: ... + def wrap(self, result: str, + extras: Extras, + force=False, + prefix='', + *, bound: type | None = None) -> Self: ... + def wrap_builtin(self, bound: type, result: str, extras: Extras) -> Self: ... def wrap_dd(self, default_factory: DefFactory, result: str, extras: Extras) -> Self: ... def _wrap_inner(self, extras: Extras, tp: type | DefFactory | None = None, prefix: str = '', is_builtin: bool = False, - force=False) -> str | None: ... + force=False, + bound: type | None = None) -> str | None: ... class Extras(TypedDict): """ @@ -63,6 +72,7 @@ class Extras(TypedDict): fn_gen: FunctionBuilder locals: dict[str, Any] pattern: NotRequired[PatternedDT] + recursion_guard: dict[type, str] # noinspection PyPep8Naming diff --git a/dataclass_wizard/wizard_cli/schema.py b/dataclass_wizard/wizard_cli/schema.py index be1dc50d..eb638ae9 100644 --- a/dataclass_wizard/wizard_cli/schema.py +++ b/dataclass_wizard/wizard_cli/schema.py @@ -68,11 +68,12 @@ ) from .. import property_wizard +from ..constants import PACKAGE_NAME from ..class_helper import get_class_name from ..type_def import PyDeque, JSONList, JSONObject, JSONValue, T from ..utils.string_conv import to_snake_case, to_pascal_case # noinspection PyProtectedMember -from ..utils.type_conv import _TRUTHY_VALUES +from ..utils.type_conv import TRUTHY_VALUES from ..utils.type_conv import as_datetime, as_date, as_time @@ -83,7 +84,7 @@ # Merge both the "truthy" and "falsy" values, so we can determine the criteria # under which a string can be considered as a boolean value. _FALSY_VALUES = {'false', 'f', 'no', 'n', 'off', '0'} -_BOOL_VALUES = _TRUTHY_VALUES | _FALSY_VALUES +_BOOL_VALUES = TRUTHY_VALUES | _FALSY_VALUES # Valid types for JSON contents; this can be either a list of any type, # or a dictionary with `string` keys and values of any type. @@ -830,7 +831,7 @@ def __or__(self, other): def get_lines(self) -> List[str]: if self.is_root: ModuleImporter.register_import_by_name( - 'dataclass_wizard', 'JSONWizard', level=2) + PACKAGE_NAME, 'JSONWizard', level=2) class_name = f'class {self.name}(JSONWizard):' else: class_name = f'class {self.name}:' diff --git a/docs/overview.rst b/docs/overview.rst index 480d6c56..6d271b1b 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -47,63 +47,103 @@ Supported Types ~~~~~~~~~~~~~~~ .. tip:: - See the below section on `Special Cases`_ for additional info - on the JSON load/dump process for special Python types. + See the section on `Special Cases`_ for additional information on how Dataclass Wizard handles JSON + load/dump for special Python types. -* Strings - - ``str`` - - ``bytes`` - - ``bytearray`` +Dataclass Wizard supports a wide range of Python types, making it easier to work with complex data structures. +This includes built-in types, collections, and more advanced type annotations. +The following types are supported: -* Numerics - - ``int`` - - ``float`` - - ``Decimal`` +- **Basic Types**: -* Booleans (``bool``) + - ``str`` + - ``int`` + - ``float`` + - ``bool`` + - ``None`` (`docs `_) -* Sequences (and their equivalents in the ``typing`` module) - - ``list`` - - ``deque`` - - ``tuple`` - - ``NamedTuple`` +- **Binary Types**: -* Sets (and their equivalents in the ``typing`` module) - - ``set`` - - ``frozenset`` + - ``bytes`` (`docs `_) + - ``bytearray`` (`docs `_) -* Mappings (and their equivalents in the ``typing`` module) - - ``dict`` - - ``defaultdict`` - - ``TypedDict`` - - ``OrderedDict`` +- **Decimal Type**: -* ``Enum`` subclasses + - ``Decimal`` (`docs `_) -* ``UUID`` +- **Pathlib**: -* *date* and *time* objects - - ``datetime`` - - ``date`` - - ``time`` - - ``timedelta`` + - ``Path`` (`docs `_) -* Special `typing primitives`_ from the ``typing`` module - - ``Any`` - - ``Union`` - Also supports `using dataclasses`_. - - ``Optional`` +- **Typed Collections**: + Typed collections are supported for structured data, including: -- `ABC Containers`_ in ``typing`` and ``collections.abc`` - - ``Collection`` -- instantiated as ``list`` - - ``MutableSequence`` -- mapped to ``list`` - - ``Sequence`` -- mapped to ``tuple`` + - ``TypedDict`` (`docs `_) + - ``NamedTuple`` (`docs `_) + - ``namedtuple`` (`docs `_) -* Recently introduced Generic types - - ``Annotated`` - - ``Literal`` +- **ABC Containers** (`docs `_): + - ``Sequence`` (`docs `_) -- instantiated as ``tuple`` + - ``MutableSequence`` (`docs `_) -- mapped to ``list`` + - ``Collection`` (`docs `_) -- instantiated as ``list`` -.. _typing primitives: https://docs.python.org/3/library/typing.html#special-typing-primitives +- **Type Annotations and Qualifiers**: + + - ``Required``, ``NotRequired``, ``ReadOnly`` (`docs `_) + - ``Annotated`` (`docs `_) + - ``Literal`` (`docs `_) + - ``LiteralString`` (`docs `_) + - ``Union`` (`docs `_) -- Also supports `using dataclasses`_. + - ``Optional`` (`docs `_) + - ``Any`` (`docs `_) + +- **Enum Types**: + + - ``Enum`` (`docs `_) + - ``StrEnum`` (`docs `_) + - ``IntEnum`` (`docs `_) + +- **Sets**: + + - ``set`` (`docs `_) + - ``frozenset`` (`docs `_) + +- **Mappings**: + + - ``dict`` (`docs `_) + - ``defaultdict`` (`docs `_) + - ``OrderedDict`` (`docs `_) + +- **Sequences**: + + - ``list`` (`docs `_) + - ``deque`` (`docs `_) + - ``tuple`` (`docs `_) + +- **UUID**: + + - ``UUID`` (`docs `_) + +- **Date and Time**: + + - ``datetime`` (`docs `_) + - ``date`` (`docs `_) + - ``time`` (`docs `_) + - ``timedelta`` (`docs `_) + +- **Nested Dataclasses**: Nested dataclasses are supported, allowing you to serialize and deserialize + nested data structures. + +Starting with **v0.34.0**, recursive and self-referential dataclasses are supported out of the box +when the ``v1`` option is enabled in the ``Meta`` setting (i.e., ``v1 = True``). This removes the +need for custom settings like ``recursive_classes`` and expands type support beyond what is +available in ``v0.x``. + +For more advanced functionality and additional types, enabling ``v1`` is recommended. It forms +the basis for more complex cases and will evolve into the standard model for Dataclass Wizard. + +For more info, see the `Field Guide to V1 Opt-in `_. Special Cases ------------- @@ -183,4 +223,3 @@ Special Cases .. _using dataclasses: https://dataclass-wizard.readthedocs.io/en/latest/common_use_cases/dataclasses_in_union_types.html .. _pytimeparse: https://pypi.org/project/pytimeparse/ -.. _ABC Containers: https://docs.python.org/3/library/typing.html#aliases-to-container-abcs-in-collections-abc diff --git a/pytest.ini b/pytest.ini index 8ec89833..60b6f751 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,5 @@ [pytest] +addopts = -s log_cli = 1 log_cli_format = %(name)s.%(module)s - [%(levelname)s] %(message)s log_cli_level = INFO diff --git a/recipe/meta.yaml b/recipe/meta.yaml index c1637054..9b4df2bf 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -65,7 +65,7 @@ about: # (even if the license doesn't require it) using the license_file entry. # See https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#license-file license_file: LICENSE - summary: Lightning-fast JSON wizardry for Python dataclasses — effortless serialization with no external tools required! + summary: Lightning-fast JSON wizardry for Python dataclasses — effortless serialization right out of the box! # The remaining entries in this section are optional, but recommended. description: | The dataclass-wizard library provides a set of simple, yet diff --git a/tests/conftest.py b/tests/conftest.py index d54c8b1a..8c15ecf9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,10 +4,12 @@ 'data_file_path', 'PY310_OR_ABOVE', 'PY311_OR_ABOVE', + 'PY312_OR_ABOVE', 'TypedDict', # For compatibility with Python 3.9 and 3.10 'Required', - 'NotRequired' + 'NotRequired', + 'LiteralString', ] import sys @@ -27,6 +29,9 @@ # Check if we are running Python 3.11+ PY311_OR_ABOVE = sys.version_info[:2] >= (3, 11) +# Check if we are running Python 3.12+ +PY312_OR_ABOVE = sys.version_info[:2] >= (3, 12) + # Check if we are running Python 3.9 or 3.10 PY310_OR_EARLIER = not PY311_OR_ABOVE @@ -41,10 +46,19 @@ if PY311_OR_ABOVE: from typing import Required from typing import NotRequired + from typing import LiteralString else: from typing_extensions import Required from typing_extensions import NotRequired + from typing_extensions import LiteralString + +# Ignore test files if the Python version is below 3.12 +if not PY312_OR_ABOVE: + print("Python version is below 3.12. Ignoring test files.") + collect_ignore = [ + Path('unit', 'v1', 'test_union_as_type_alias_recursive.py').as_posix(), + ] def data_file_path(name: str) -> str: """Returns the full path to a test file.""" diff --git a/tests/unit/test_dump.py b/tests/unit/test_dump.py index 781ab7d2..ad9d4af0 100644 --- a/tests/unit/test_dump.py +++ b/tests/unit/test_dump.py @@ -1,5 +1,6 @@ import logging from abc import ABC +from base64 import b64decode from collections import deque, defaultdict from dataclasses import dataclass, field from datetime import datetime, timedelta @@ -508,3 +509,23 @@ class Config: assert fromdict(Config, config) == Config( tests={'test_a': Test(field='a'), 'test_b': Test(field='b')}) + + +def test_bytes_and_bytes_array_are_supported(): + """Confirm dump with `bytes` and `bytesarray` is supported.""" + + @dataclass + class Foo(JSONWizard): + b: bytes = None + barray: bytearray = None + s: str = None + + data = {'b': 'AAAA', 'barray': 'SGVsbG8sIFdvcmxkIQ==', 's': 'foobar'} + + # noinspection PyTypeChecker + foo = Foo(b=b64decode('AAAA'), + barray=bytearray(b'Hello, World!'), + s='foobar') + + # noinspection PyTypeChecker + assert foo.to_dict() == data diff --git a/tests/unit/v1/test_loaders.py b/tests/unit/v1/test_loaders.py index eeb974bf..2dc3d402 100644 --- a/tests/unit/v1/test_loaders.py +++ b/tests/unit/v1/test_loaders.py @@ -3,11 +3,15 @@ Note: I might refactor this into a separate `test_parsers.py` as time permits. """ +import enum import logging from abc import ABC +from base64 import b64decode from collections import namedtuple, defaultdict, deque from dataclasses import dataclass, field from datetime import datetime, date, time, timedelta +from decimal import Decimal +from pathlib import Path from typing import ( List, Optional, Union, Tuple, Dict, NamedTuple, DefaultDict, Set, FrozenSet, Annotated, Literal, Sequence, MutableSequence, Collection @@ -21,6 +25,7 @@ ParseError, MissingFields, UnknownKeysError, MissingData, InvalidConditionError ) from dataclass_wizard.models import _PatternBase +from dataclass_wizard.type_def import NoneType from dataclass_wizard.v1 import * from ..conftest import MyUUIDSubclass from ...conftest import * @@ -908,6 +913,94 @@ class _(JSONWizard.Meta): log.debug('Parsed object: %r', result) +def test_literal_recursive(): + """Test case for recursive or self-referential `typing.Literal` usage.""" + + L1 = Literal['A', 'B'] + L2 = Literal['C', 'D', L1] + L2_FINAL = Union[L1, L2] + L3 = Literal[Literal[Literal[1, 2, 3], "foo"], 5, None] # Literal[1, 2, 3, "foo", 5, None] + + @dataclass + class A(JSONWizard, debug=True): + class _(JSONWizard.Meta): + v1 = True + + test1: L1 + test2: L2_FINAL + test3: L3 + + a = A.from_dict({'test1': 'B', 'test2': 'D', 'test3': 'foo'}) + assert a == A(test1='B', test2='D', test3='foo') + + a = A.from_dict({'test1': 'A', 'test2': 'B', 'test3': None}) + assert a == A(test1='A', test2='B', test3=None) + + with pytest.raises(ParseError): + A.from_dict({'test1': 'C', 'test2': 'D', 'test3': 'foo'}) + + with pytest.raises(ParseError): + A.from_dict({'test1': 'A', 'test2': 'E', 'test3': 'foo'}) + + with pytest.raises(ParseError): + A.from_dict({'test1': 'A', 'test2': 'B', 'test3': 'None'}) + + +def test_union_recursive(): + """Recursive or self-referential `Union` types are supported.""" + JSON = Union[str, int, float, bool, dict[str, 'JSON'], list['JSON'], None] + + @dataclass + class MyClass(JSONWizard): + + class _(JSONWizard.Meta): + v1 = True + + x: str + y: JSON + + # Fix for local tests + globals().update(locals()) + + assert MyClass( + x="x", y={"x": [{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}]} + ).to_dict() == { + "x": "x", + "y": {"x": [{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}]}, + } + + assert MyClass.from_dict( + { + "x": "x", + "y": {"x": [{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}]}, + } + ) == MyClass( + x="x", y={"x": [{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}]} + ) + + +def test_multiple_union(): + """Test case for a dataclass with multiple `Union` fields.""" + + @dataclass + class A(JSONWizard): + + class _(JSONWizard.Meta): + v1 = True + + a: Union[int, float, list[str]] + b: Union[float, bool] + + a = A.from_dict({'a': '123', 'b': '456'}) + assert a == A(a=['1', '2', '3'], b=456.0) + + a = A.from_dict({'a': 123, 'b': 'True'}) + assert a == A(a=123, b=True) + + a = A.from_dict({'a': 3.21, 'b': '0'}) + assert a == A(a=3.21, b=0.0) + + @pytest.mark.parametrize( 'input,expected', [ @@ -1087,7 +1180,7 @@ class C: ('2020-01-02T01:02:03Z', does_not_raise()), ('2010-12-31 23:59:59-04:00', does_not_raise()), (123456789, does_not_raise()), - (True, pytest.raises(ParseError)), + (True, does_not_raise()), (datetime(2010, 12, 31, 23, 59, 59), does_not_raise()), ] ) @@ -1115,7 +1208,7 @@ class _(JSONWizard.Meta): ('2020-01-02', does_not_raise()), ('2010-12-31', does_not_raise()), (123456789, does_not_raise()), - (True, pytest.raises(ParseError)), + (True, does_not_raise()), (date(2010, 12, 31), does_not_raise()), ] ) @@ -1897,6 +1990,68 @@ class _(JSONWizard.Meta): assert result.my_typed_dict == expected +def test_typed_dict_recursive(): + """Test case for recursive or self-referential `TypedDict`s.""" + + class TD(TypedDict): + key_one: str + key_two: Union['TD', None] + key_three: NotRequired[dict[int, list['TD']]] + key_four: NotRequired[list['TD']] + + @dataclass + class MyContainer(JSONWizard, debug=True): + class _(JSONWizard.Meta): + v1 = True + + test1: TD + + # Fix for local test cases so the forward reference works + globals().update(locals()) + + d = { + 'test1': { + 'key_one': 'S1', + 'key_two': {'key_one': 'S2', 'key_two': None}, + 'key_three': { + '123': [ + {'key_one': 'S3', + 'key_two': {'key_one': 'S4', 'key_two': None}, + 'key_three': {}} + ] + }, + 'key_four': [ + {'key_one': 'test', + 'key_two': {'key_one': 'S5', + 'key_two': {'key_one': 'S6', 'key_two': None} + } + } + ] + } + } + a = MyContainer.from_dict(d) + print(repr(a)) + + assert a == MyContainer( + test1={'key_one': 'S1', + 'key_two': {'key_one': 'S2', 'key_two': None}, + 'key_three': {123: [{'key_one': 'S3', + 'key_two': {'key_one': 'S4', 'key_two': None}, + 'key_three': {}}]}, + 'key_four': [ + { + 'key_one': 'test', + 'key_two': { + 'key_one': 'S5', + 'key_two': { + 'key_one': 'S6', + 'key_two': None + } + } + } + ]}) + + @pytest.mark.parametrize( 'input,expectation,expected', [ @@ -1995,6 +2150,50 @@ class _(JSONWizard.Meta): assert result.my_nt == expected +def test_named_tuple_recursive(): + """Test case for recursive or self-referential `NamedTuple`s.""" + + class NT(NamedTuple): + field_one: str + field_two: Union['NT', None] + field_three: dict[int, list['NT']] = {} + field_four: list['NT'] = [] + + @dataclass + class MyContainer(JSONWizard, debug=True): + class _(JSONWizard.Meta): + v1 = True + + test1: NT + + # Fix for local test cases so the forward reference works + globals().update(locals()) + + d = { + 'test1': [ + 'S1', + ['S2', None], + { + '123': [ + ['S3', ['S4', None], {}] + ] + }, + [['test', ['S5', ['S6', None]]]] + ] + } + a = MyContainer.from_dict(d) + print(repr(a)) + + assert a == MyContainer( + test1=NT(field_one='S1', + field_two=NT('S2', None), + field_three={123: [NT('S3', NT('S4', None))]}, + field_four=[ + NT('test', NT('S5', NT('S6', None))) + ]) + ) + + @pytest.mark.parametrize( 'input,expectation,expected', [ @@ -2173,7 +2372,6 @@ class _(JSONSerializable.Meta): assert item.b is item.c is None -@pytest.mark.skip(reason='TODO add support in v1') def test_with_self_referential_dataclasses_1(): """ Test loading JSON data, when a dataclass model has cyclic @@ -2183,8 +2381,8 @@ def test_with_self_referential_dataclasses_1(): class A: a: Optional['A'] = None - # enable support for self-referential / recursive dataclasses - LoadMeta(v1=True, recursive_classes=True).bind_to(A) + # enable `v1` opt-in` + LoadMeta(v1=True).bind_to(A) # Fix for local test cases so the forward reference works globals().update(locals()) @@ -2195,7 +2393,6 @@ class A: assert a == A(a=A(a=A(a=None))) -@pytest.mark.skip(reason='TODO add support in v1') def test_with_self_referential_dataclasses_2(): """ Test loading JSON data, when a dataclass model has cyclic @@ -2205,8 +2402,6 @@ def test_with_self_referential_dataclasses_2(): class A(JSONWizard): class _(JSONWizard.Meta): v1 = True - # enable support for self-referential / recursive dataclasses - recursive_classes = True b: Optional['B'] = None @@ -3060,3 +3255,165 @@ class _(JSONWizard.Meta): with pytest.raises(TypeError, match=".*Test\.__init__\(\) missing 1 required positional argument: 'my_field'"): Test() + + +def test_bytes_and_bytes_array_are_supported(): + """Confirm `bytes` and `bytesarray` are supported.""" + + @dataclass + class Foo(JSONWizard): + class _(JSONWizard.Meta): + v1 = True + + b: bytes = None + barray: bytearray = None + s: str = None + + data = {'b': 'AAAA', 'barray': 'SGVsbG8sIFdvcmxkIQ==', 's': 'foobar'} + + foo = Foo.from_dict(data) + + # noinspection PyTypeChecker + assert foo == Foo(b=b64decode('AAAA'), + barray=bytearray(b'Hello, World!'), + s='foobar') + assert foo.to_dict() == data + + # Check data consistency + assert Foo.from_dict(foo.to_dict()).to_dict() == data + + +def test_literal_string(): + """Confirm `literal` strings (typing.LiteralString) are supported.""" + + @dataclass + class Test(JSONWizard): + class _(JSONWizard.Meta): + v1 = True + + s: LiteralString + + t = Test.from_dict({'s': 'value'}) + assert t.s == 'value' + assert Test.from_dict(t.to_dict()).s == 'value' + + +def test_decimal(): + """Confirm `Decimal` is supported.""" + + @dataclass + class Test(JSONWizard): + class _(JSONWizard.Meta): + v1 = True + + d1: Decimal + d2: Decimal + d3: Decimal + + d = {'d1': 123, + 'd2': 3.14, + 'd3': '42.7'} + + t = Test.from_dict(d) + + assert t.d1 == Decimal(123) + assert t.d2 == Decimal('3.14') + assert t.d3 == Decimal('42.7') + + assert t.to_dict() == { + 'd1': '123', + 'd2': '3.14', + 'd3': '42.7', + } + + +def test_path(): + """Confirm `Path` objects are supported.""" + + @dataclass + class Test(JSONWizard): + class _(JSONWizard.Meta): + v1 = True + + p: Path + + t = Test.from_dict({'p': 'a/b/c'}) + assert t.p == Path('a/b/c') + assert Test.from_dict(t.to_dict()).p == Path('a/b/c') + + +def test_none(): + """Confirm `None` type annotation is supported.""" + + @dataclass + class Test(JSONWizard): + class _(JSONWizard.Meta): + v1 = True + + x: NoneType + + t = Test.from_dict({'x': None}) + assert t.x is None + + t = Test.from_dict({'x': 'test'}) + assert t.x is None + + +def test_enum(): + """Confirm `Enum` objects are supported.""" + + class MyEnum(enum.Enum): + A = 'the A' + B = 'the B' + C = 'the C' + + @dataclass + class Test(JSONWizard): + class _(JSONWizard.Meta): + v1 = True + + e: MyEnum + + with pytest.raises(ParseError): + Test.from_dict({'e': 'the D'}) + + t = Test.from_dict({'e': 'the B'}) + assert t.e is MyEnum.B + assert Test.from_dict(t.to_dict()).e is MyEnum.B + + +@pytest.mark.skipif(not PY311_OR_ABOVE, reason='Requires Python 3.11 or higher') +def test_str_and_int_enum(): + """Confirm `StrEnum` objects are supported.""" + + class MyStrEnum(enum.StrEnum): + A = 'the A' + B = 'the B' + C = 'the C' + + class MyIntEnum(enum.IntEnum): + X = enum.auto() + Y = enum.auto() + Z = enum.auto() + + @dataclass + class Test(JSONPyWizard): + class _(JSONPyWizard.Meta): + v1 = True + + str_e: MyStrEnum + int_e: MyIntEnum + + with pytest.raises(ParseError): + Test.from_dict({'str_e': 'the D', 'int_e': 3}) + + with pytest.raises(ParseError): + Test.from_dict({'str_e': 'the C', 'int_e': 4}) + + t = Test.from_dict({'str_e': 'the B', 'int_e': 3}) + assert t.str_e is MyStrEnum.B + assert t.int_e is MyIntEnum.Z + + t2 = Test.from_dict(t.to_dict()) + assert t2.str_e is MyStrEnum.B + assert t2.int_e is MyIntEnum.Z diff --git a/tests/unit/v1/test_union_as_type_alias_recursive.py b/tests/unit/v1/test_union_as_type_alias_recursive.py new file mode 100644 index 00000000..80bf9e5f --- /dev/null +++ b/tests/unit/v1/test_union_as_type_alias_recursive.py @@ -0,0 +1,35 @@ +from dataclasses import dataclass + +from dataclass_wizard import JSONWizard + + +# noinspection PyCompatibility +def test_union_as_type_alias_recursive(): + """ + Recursive or self-referential `Union` (defined as `TypeAlias`) + types are supported. + """ + type JSON = str | int | float | bool | dict[str, JSON] | list[JSON] | None + + @dataclass + class MyTestClass(JSONWizard): + + class _(JSONWizard.Meta): + v1 = True + + name: str + meta: str + msg: JSON + + x = MyTestClass.from_dict( + { + "name": "name", + "meta": "meta", + "msg": [{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}], + } + ) + assert x == MyTestClass( + name="name", + meta="meta", + msg=[{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}], + )