efro.dataclassio

Functionality for importing, exporting, and validating dataclasses.

This allows complex nested dataclasses to be flattened to json-compatible data and restored from said data. It also gracefully handles and preserves unrecognized attribute data, allowing older clients to interact with newer data formats in a nondestructive manner.

 1# Released under the MIT License. See LICENSE for details.
 2#
 3"""Functionality for importing, exporting, and validating dataclasses.
 4
 5This allows complex nested dataclasses to be flattened to json-compatible
 6data and restored from said data. It also gracefully handles and preserves
 7unrecognized attribute data, allowing older clients to interact with newer
 8data formats in a nondestructive manner.
 9"""
10
11from __future__ import annotations
12
13from efro.util import set_canonical_module_names
14from efro.dataclassio._base import Codec, IOAttrs, IOExtendedData
15from efro.dataclassio._prep import (
16    ioprep,
17    ioprepped,
18    will_ioprep,
19    is_ioprepped_dataclass,
20)
21from efro.dataclassio._pathcapture import DataclassFieldLookup
22from efro.dataclassio._api import (
23    JsonStyle,
24    dataclass_to_dict,
25    dataclass_to_json,
26    dataclass_from_dict,
27    dataclass_from_json,
28    dataclass_validate,
29)
30
31__all__ = [
32    'JsonStyle',
33    'Codec',
34    'IOAttrs',
35    'IOExtendedData',
36    'ioprep',
37    'ioprepped',
38    'will_ioprep',
39    'is_ioprepped_dataclass',
40    'DataclassFieldLookup',
41    'dataclass_to_dict',
42    'dataclass_to_json',
43    'dataclass_from_dict',
44    'dataclass_from_json',
45    'dataclass_validate',
46]
47
48# Have these things present themselves cleanly as 'thismodule.SomeClass'
49# instead of 'thismodule._internalmodule.SomeClass'
50set_canonical_module_names(globals())
class JsonStyle(enum.Enum):
27class JsonStyle(Enum):
28    """Different style types for json."""
29
30    # Single line, no spaces, no sorting. Not deterministic.
31    # Use this for most storage purposes.
32    FAST = 'fast'
33
34    # Single line, no spaces, sorted keys. Deterministic.
35    # Use this when output may be hashed or compared for equality.
36    SORTED = 'sorted'
37
38    # Multiple lines, spaces, sorted keys. Deterministic.
39    # Use this for pretty human readable output.
40    PRETTY = 'pretty'

Different style types for json.

FAST = <JsonStyle.FAST: 'fast'>
SORTED = <JsonStyle.SORTED: 'sorted'>
PRETTY = <JsonStyle.PRETTY: 'pretty'>
Inherited Members
enum.Enum
name
value
class Codec(enum.Enum):
45class Codec(Enum):
46    """Specifies expected data format exported to or imported from."""
47
48    # Use only types that will translate cleanly to/from json: lists,
49    # dicts with str keys, bools, ints, floats, and None.
50    JSON = 'json'
51
52    # Mostly like JSON but passes bytes and datetime objects through
53    # as-is instead of converting them to json-friendly types.
54    FIRESTORE = 'firestore'

Specifies expected data format exported to or imported from.

JSON = <Codec.JSON: 'json'>
FIRESTORE = <Codec.FIRESTORE: 'firestore'>
Inherited Members
enum.Enum
name
value
class IOAttrs:
103class IOAttrs:
104    """For specifying io behavior in annotations.
105
106    'storagename', if passed, is the name used when storing to json/etc.
107    'store_default' can be set to False to avoid writing values when equal
108        to the default value. Note that this requires the dataclass field
109        to define a default or default_factory or for its IOAttrs to
110        define a soft_default value.
111    'whole_days', if True, requires datetime values to be exactly on day
112        boundaries (see efro.util.utc_today()).
113    'whole_hours', if True, requires datetime values to lie exactly on hour
114        boundaries (see efro.util.utc_this_hour()).
115    'whole_minutes', if True, requires datetime values to lie exactly on minute
116        boundaries (see efro.util.utc_this_minute()).
117    'soft_default', if passed, injects a default value into dataclass
118        instantiation when the field is not present in the input data.
119        This allows dataclasses to add new non-optional fields while
120        gracefully 'upgrading' old data. Note that when a soft_default is
121        present it will take precedence over field defaults when determining
122        whether to store a value for a field with store_default=False
123        (since the soft_default value is what we'll get when reading that
124        same data back in when the field is omitted).
125    'soft_default_factory' is similar to 'default_factory' in dataclass
126        fields; it should be used instead of 'soft_default' for mutable types
127        such as lists to prevent a single default object from unintentionally
128        changing over time.
129    """
130
131    # A sentinel object to detect if a parameter is supplied or not.  Use
132    # a class to give it a better repr.
133    class _MissingType:
134        pass
135
136    MISSING = _MissingType()
137
138    storagename: str | None = None
139    store_default: bool = True
140    whole_days: bool = False
141    whole_hours: bool = False
142    whole_minutes: bool = False
143    soft_default: Any = MISSING
144    soft_default_factory: Callable[[], Any] | _MissingType = MISSING
145
146    def __init__(
147        self,
148        storagename: str | None = storagename,
149        store_default: bool = store_default,
150        whole_days: bool = whole_days,
151        whole_hours: bool = whole_hours,
152        whole_minutes: bool = whole_minutes,
153        soft_default: Any = MISSING,
154        soft_default_factory: Callable[[], Any] | _MissingType = MISSING,
155    ):
156        # Only store values that differ from class defaults to keep
157        # our instances nice and lean.
158        cls = type(self)
159        if storagename != cls.storagename:
160            self.storagename = storagename
161        if store_default != cls.store_default:
162            self.store_default = store_default
163        if whole_days != cls.whole_days:
164            self.whole_days = whole_days
165        if whole_hours != cls.whole_hours:
166            self.whole_hours = whole_hours
167        if whole_minutes != cls.whole_minutes:
168            self.whole_minutes = whole_minutes
169        if soft_default is not cls.soft_default:
170            # Do what dataclasses does with its default types and
171            # tell the user to use factory for mutable ones.
172            if isinstance(soft_default, (list, dict, set)):
173                raise ValueError(
174                    f'mutable {type(soft_default)} is not allowed'
175                    f' for soft_default; use soft_default_factory.'
176                )
177            self.soft_default = soft_default
178        if soft_default_factory is not cls.soft_default_factory:
179            self.soft_default_factory = soft_default_factory
180            if self.soft_default is not cls.soft_default:
181                raise ValueError(
182                    'Cannot set both soft_default and soft_default_factory'
183                )
184
185    def validate_for_field(self, cls: type, field: dataclasses.Field) -> None:
186        """Ensure the IOAttrs instance is ok to use with the provided field."""
187
188        # Turning off store_default requires the field to have either
189        # a default or a a default_factory or for us to have soft equivalents.
190
191        if not self.store_default:
192            field_default_factory: Any = field.default_factory
193            if (
194                field_default_factory is dataclasses.MISSING
195                and field.default is dataclasses.MISSING
196                and self.soft_default is self.MISSING
197                and self.soft_default_factory is self.MISSING
198            ):
199                raise TypeError(
200                    f'Field {field.name} of {cls} has'
201                    f' neither a default nor a default_factory'
202                    f' and IOAttrs contains neither a soft_default'
203                    f' nor a soft_default_factory;'
204                    f' store_default=False cannot be set for it.'
205                )
206
207    def validate_datetime(
208        self, value: datetime.datetime, fieldpath: str
209    ) -> None:
210        """Ensure a datetime value meets our value requirements."""
211        if self.whole_days:
212            if any(
213                x != 0
214                for x in (
215                    value.hour,
216                    value.minute,
217                    value.second,
218                    value.microsecond,
219                )
220            ):
221                raise ValueError(
222                    f'Value {value} at {fieldpath} is not a whole day.'
223                )
224        elif self.whole_hours:
225            if any(
226                x != 0 for x in (value.minute, value.second, value.microsecond)
227            ):
228                raise ValueError(
229                    f'Value {value} at {fieldpath}' f' is not a whole hour.'
230                )
231        elif self.whole_minutes:
232            if any(x != 0 for x in (value.second, value.microsecond)):
233                raise ValueError(
234                    f'Value {value} at {fieldpath}' f' is not a whole minute.'
235                )

For specifying io behavior in annotations.

'storagename', if passed, is the name used when storing to json/etc. 'store_default' can be set to False to avoid writing values when equal to the default value. Note that this requires the dataclass field to define a default or default_factory or for its IOAttrs to define a soft_default value. 'whole_days', if True, requires datetime values to be exactly on day boundaries (see efro.util.utc_today()). 'whole_hours', if True, requires datetime values to lie exactly on hour boundaries (see efro.util.utc_this_hour()). 'whole_minutes', if True, requires datetime values to lie exactly on minute boundaries (see efro.util.utc_this_minute()). 'soft_default', if passed, injects a default value into dataclass instantiation when the field is not present in the input data. This allows dataclasses to add new non-optional fields while gracefully 'upgrading' old data. Note that when a soft_default is present it will take precedence over field defaults when determining whether to store a value for a field with store_default=False (since the soft_default value is what we'll get when reading that same data back in when the field is omitted). 'soft_default_factory' is similar to 'default_factory' in dataclass fields; it should be used instead of 'soft_default' for mutable types such as lists to prevent a single default object from unintentionally changing over time.

IOAttrs( storagename: str | None = None, store_default: bool = True, whole_days: bool = False, whole_hours: bool = False, whole_minutes: bool = False, soft_default: Any = <efro.dataclassio._base.IOAttrs._MissingType object>, soft_default_factory: Union[Callable[[], Any], efro.dataclassio._base.IOAttrs._MissingType] = <efro.dataclassio._base.IOAttrs._MissingType object>)
146    def __init__(
147        self,
148        storagename: str | None = storagename,
149        store_default: bool = store_default,
150        whole_days: bool = whole_days,
151        whole_hours: bool = whole_hours,
152        whole_minutes: bool = whole_minutes,
153        soft_default: Any = MISSING,
154        soft_default_factory: Callable[[], Any] | _MissingType = MISSING,
155    ):
156        # Only store values that differ from class defaults to keep
157        # our instances nice and lean.
158        cls = type(self)
159        if storagename != cls.storagename:
160            self.storagename = storagename
161        if store_default != cls.store_default:
162            self.store_default = store_default
163        if whole_days != cls.whole_days:
164            self.whole_days = whole_days
165        if whole_hours != cls.whole_hours:
166            self.whole_hours = whole_hours
167        if whole_minutes != cls.whole_minutes:
168            self.whole_minutes = whole_minutes
169        if soft_default is not cls.soft_default:
170            # Do what dataclasses does with its default types and
171            # tell the user to use factory for mutable ones.
172            if isinstance(soft_default, (list, dict, set)):
173                raise ValueError(
174                    f'mutable {type(soft_default)} is not allowed'
175                    f' for soft_default; use soft_default_factory.'
176                )
177            self.soft_default = soft_default
178        if soft_default_factory is not cls.soft_default_factory:
179            self.soft_default_factory = soft_default_factory
180            if self.soft_default is not cls.soft_default:
181                raise ValueError(
182                    'Cannot set both soft_default and soft_default_factory'
183                )
MISSING = <efro.dataclassio._base.IOAttrs._MissingType object>
storagename: str | None = None
store_default: bool = True
whole_days: bool = False
whole_hours: bool = False
whole_minutes: bool = False
soft_default: Any = <efro.dataclassio._base.IOAttrs._MissingType object>
soft_default_factory: Union[Callable[[], Any], efro.dataclassio._base.IOAttrs._MissingType] = <efro.dataclassio._base.IOAttrs._MissingType object>
def validate_for_field(self, cls: type, field: dataclasses.Field) -> None:
185    def validate_for_field(self, cls: type, field: dataclasses.Field) -> None:
186        """Ensure the IOAttrs instance is ok to use with the provided field."""
187
188        # Turning off store_default requires the field to have either
189        # a default or a a default_factory or for us to have soft equivalents.
190
191        if not self.store_default:
192            field_default_factory: Any = field.default_factory
193            if (
194                field_default_factory is dataclasses.MISSING
195                and field.default is dataclasses.MISSING
196                and self.soft_default is self.MISSING
197                and self.soft_default_factory is self.MISSING
198            ):
199                raise TypeError(
200                    f'Field {field.name} of {cls} has'
201                    f' neither a default nor a default_factory'
202                    f' and IOAttrs contains neither a soft_default'
203                    f' nor a soft_default_factory;'
204                    f' store_default=False cannot be set for it.'
205                )

Ensure the IOAttrs instance is ok to use with the provided field.

def validate_datetime(self, value: datetime.datetime, fieldpath: str) -> None:
207    def validate_datetime(
208        self, value: datetime.datetime, fieldpath: str
209    ) -> None:
210        """Ensure a datetime value meets our value requirements."""
211        if self.whole_days:
212            if any(
213                x != 0
214                for x in (
215                    value.hour,
216                    value.minute,
217                    value.second,
218                    value.microsecond,
219                )
220            ):
221                raise ValueError(
222                    f'Value {value} at {fieldpath} is not a whole day.'
223                )
224        elif self.whole_hours:
225            if any(
226                x != 0 for x in (value.minute, value.second, value.microsecond)
227            ):
228                raise ValueError(
229                    f'Value {value} at {fieldpath}' f' is not a whole hour.'
230                )
231        elif self.whole_minutes:
232            if any(x != 0 for x in (value.second, value.microsecond)):
233                raise ValueError(
234                    f'Value {value} at {fieldpath}' f' is not a whole minute.'
235                )

Ensure a datetime value meets our value requirements.

class IOExtendedData:
57class IOExtendedData:
58    """A class that data types can inherit from for extra functionality."""
59
60    def will_output(self) -> None:
61        """Called before data is sent to an outputter.
62
63        Can be overridden to validate or filter data before
64        sending it on its way.
65        """
66
67    @classmethod
68    def will_input(cls, data: dict) -> None:
69        """Called on raw data before a class instance is created from it.
70
71        Can be overridden to migrate old data formats to new, etc.
72        """

A class that data types can inherit from for extra functionality.

def will_output(self) -> None:
60    def will_output(self) -> None:
61        """Called before data is sent to an outputter.
62
63        Can be overridden to validate or filter data before
64        sending it on its way.
65        """

Called before data is sent to an outputter.

Can be overridden to validate or filter data before sending it on its way.

@classmethod
def will_input(cls, data: dict) -> None:
67    @classmethod
68    def will_input(cls, data: dict) -> None:
69        """Called on raw data before a class instance is created from it.
70
71        Can be overridden to migrate old data formats to new, etc.
72        """

Called on raw data before a class instance is created from it.

Can be overridden to migrate old data formats to new, etc.

def ioprep(cls: type, globalns: dict | None = None) -> None:
41def ioprep(cls: type, globalns: dict | None = None) -> None:
42    """Prep a dataclass type for use with this module's functionality.
43
44    Prepping ensures that all types contained in a data class as well as
45    the usage of said types are supported by this module and pre-builds
46    necessary constructs needed for encoding/decoding/etc.
47
48    Prepping will happen on-the-fly as needed, but a warning will be
49    emitted in such cases, as it is better to explicitly prep all used types
50    early in a process to ensure any invalid types or configuration are caught
51    immediately.
52
53    Prepping a dataclass involves evaluating its type annotations, which,
54    as of PEP 563, are stored simply as strings. This evaluation is done
55    with localns set to the class dict (so that types defined in the class
56    can be used) and globalns set to the containing module's class.
57    It is possible to override globalns for special cases such as when
58    prepping happens as part of an execed string instead of within a
59    module.
60    """
61    PrepSession(explicit=True, globalns=globalns).prep_dataclass(
62        cls, recursion_level=0
63    )

Prep a dataclass type for use with this module's functionality.

Prepping ensures that all types contained in a data class as well as the usage of said types are supported by this module and pre-builds necessary constructs needed for encoding/decoding/etc.

Prepping will happen on-the-fly as needed, but a warning will be emitted in such cases, as it is better to explicitly prep all used types early in a process to ensure any invalid types or configuration are caught immediately.

Prepping a dataclass involves evaluating its type annotations, which, as of PEP 563, are stored simply as strings. This evaluation is done with localns set to the class dict (so that types defined in the class can be used) and globalns set to the containing module's class. It is possible to override globalns for special cases such as when prepping happens as part of an execed string instead of within a module.

def ioprepped(cls: type[~T]) -> type[~T]:
66def ioprepped(cls: type[T]) -> type[T]:
67    """Class decorator for easily prepping a dataclass at definition time.
68
69    Note that in some cases it may not be possible to prep a dataclass
70    immediately (such as when its type annotations refer to forward-declared
71    types). In these cases, dataclass_prep() should be explicitly called for
72    the class as soon as possible; ideally at module import time to expose any
73    errors as early as possible in execution.
74    """
75    ioprep(cls)
76    return cls

Class decorator for easily prepping a dataclass at definition time.

Note that in some cases it may not be possible to prep a dataclass immediately (such as when its type annotations refer to forward-declared types). In these cases, dataclass_prep() should be explicitly called for the class as soon as possible; ideally at module import time to expose any errors as early as possible in execution.

def will_ioprep(cls: type[~T]) -> type[~T]:
79def will_ioprep(cls: type[T]) -> type[T]:
80    """Class decorator hinting that we will prep a class later.
81
82    In some cases (such as recursive types) we cannot use the @ioprepped
83    decorator and must instead call ioprep() explicitly later. However,
84    some of our custom pylint checking behaves differently when the
85    @ioprepped decorator is present, in that case requiring type annotations
86    to be present and not simply forward declared under an "if TYPE_CHECKING"
87    block. (since they are used at runtime).
88
89    The @will_ioprep decorator triggers the same pylint behavior
90    differences as @ioprepped (which are necessary for the later ioprep() call
91    to work correctly) but without actually running any prep itself.
92    """
93    return cls

Class decorator hinting that we will prep a class later.

In some cases (such as recursive types) we cannot use the @ioprepped decorator and must instead call ioprep() explicitly later. However, some of our custom pylint checking behaves differently when the @ioprepped decorator is present, in that case requiring type annotations to be present and not simply forward declared under an "if TYPE_CHECKING" block. (since they are used at runtime).

The @will_ioprep decorator triggers the same pylint behavior differences as @ioprepped (which are necessary for the later ioprep() call to work correctly) but without actually running any prep itself.

def is_ioprepped_dataclass(obj: Any) -> bool:
96def is_ioprepped_dataclass(obj: Any) -> bool:
97    """Return whether the obj is an ioprepped dataclass type or instance."""
98    cls = obj if isinstance(obj, type) else type(obj)
99    return dataclasses.is_dataclass(cls) and hasattr(cls, PREP_ATTR)

Return whether the obj is an ioprepped dataclass type or instance.

class DataclassFieldLookup(typing.Generic[~T]):
 61class DataclassFieldLookup(Generic[T]):
 62    """Get info about nested dataclass fields in type-safe way."""
 63
 64    def __init__(self, cls: type[T]) -> None:
 65        self.cls = cls
 66
 67    def path(self, callback: Callable[[T], Any]) -> str:
 68        """Look up a path on child dataclass fields.
 69
 70        example:
 71          DataclassFieldLookup(MyType).path(lambda obj: obj.foo.bar)
 72
 73        The above example will return the string 'foo.bar' or something
 74        like 'f.b' if the dataclasses have custom storage names set.
 75        It will also be static-type-checked, triggering an error if
 76        MyType.foo.bar is not a valid path. Note, however, that the
 77        callback technically allows any return value but only nested
 78        dataclasses and their fields will succeed.
 79        """
 80
 81        # We tell the type system that we are returning an instance
 82        # of our class, which allows it to perform type checking on
 83        # member lookups. In reality, however, we are providing a
 84        # special object which captures path lookups, so we can build
 85        # a string from them.
 86        if not TYPE_CHECKING:
 87            out = callback(_PathCapture(self.cls))
 88            if not isinstance(out, _PathCapture):
 89                raise TypeError(
 90                    f'Expected a valid path under'
 91                    f' the provided object; got a {type(out)}.'
 92                )
 93            return out.path
 94        return ''
 95
 96    def paths(self, callback: Callable[[T], list[Any]]) -> list[str]:
 97        """Look up multiple paths on child dataclass fields.
 98
 99        Functionality is identical to path() but for multiple paths at once.
100
101        example:
102          DataclassFieldLookup(MyType).paths(lambda obj: [obj.foo, obj.bar])
103        """
104        outvals: list[str] = []
105        if not TYPE_CHECKING:
106            outs = callback(_PathCapture(self.cls))
107            assert isinstance(outs, list)
108            for out in outs:
109                if not isinstance(out, _PathCapture):
110                    raise TypeError(
111                        f'Expected a valid path under'
112                        f' the provided object; got a {type(out)}.'
113                    )
114                outvals.append(out.path)
115        return outvals

Get info about nested dataclass fields in type-safe way.

DataclassFieldLookup(cls: type[~T])
64    def __init__(self, cls: type[T]) -> None:
65        self.cls = cls
cls
def path(self, callback: Callable[[~T], Any]) -> str:
67    def path(self, callback: Callable[[T], Any]) -> str:
68        """Look up a path on child dataclass fields.
69
70        example:
71          DataclassFieldLookup(MyType).path(lambda obj: obj.foo.bar)
72
73        The above example will return the string 'foo.bar' or something
74        like 'f.b' if the dataclasses have custom storage names set.
75        It will also be static-type-checked, triggering an error if
76        MyType.foo.bar is not a valid path. Note, however, that the
77        callback technically allows any return value but only nested
78        dataclasses and their fields will succeed.
79        """
80
81        # We tell the type system that we are returning an instance
82        # of our class, which allows it to perform type checking on
83        # member lookups. In reality, however, we are providing a
84        # special object which captures path lookups, so we can build
85        # a string from them.
86        if not TYPE_CHECKING:
87            out = callback(_PathCapture(self.cls))
88            if not isinstance(out, _PathCapture):
89                raise TypeError(
90                    f'Expected a valid path under'
91                    f' the provided object; got a {type(out)}.'
92                )
93            return out.path
94        return ''

Look up a path on child dataclass fields.

example: DataclassFieldLookup(MyType).path(lambda obj: obj.foo.bar)

The above example will return the string 'foo.bar' or something like 'f.b' if the dataclasses have custom storage names set. It will also be static-type-checked, triggering an error if MyType.foo.bar is not a valid path. Note, however, that the callback technically allows any return value but only nested dataclasses and their fields will succeed.

def paths(self, callback: Callable[[~T], list[Any]]) -> list[str]:
 96    def paths(self, callback: Callable[[T], list[Any]]) -> list[str]:
 97        """Look up multiple paths on child dataclass fields.
 98
 99        Functionality is identical to path() but for multiple paths at once.
100
101        example:
102          DataclassFieldLookup(MyType).paths(lambda obj: [obj.foo, obj.bar])
103        """
104        outvals: list[str] = []
105        if not TYPE_CHECKING:
106            outs = callback(_PathCapture(self.cls))
107            assert isinstance(outs, list)
108            for out in outs:
109                if not isinstance(out, _PathCapture):
110                    raise TypeError(
111                        f'Expected a valid path under'
112                        f' the provided object; got a {type(out)}.'
113                    )
114                outvals.append(out.path)
115        return outvals

Look up multiple paths on child dataclass fields.

Functionality is identical to path() but for multiple paths at once.

example: DataclassFieldLookup(MyType).paths(lambda obj: [obj.foo, obj.bar])

def dataclass_to_dict( obj: Any, codec: Codec = <Codec.JSON: 'json'>, coerce_to_float: bool = True) -> dict:
43def dataclass_to_dict(
44    obj: Any, codec: Codec = Codec.JSON, coerce_to_float: bool = True
45) -> dict:
46    """Given a dataclass object, return a json-friendly dict.
47
48    All values will be checked to ensure they match the types specified
49    on fields. Note that a limited set of types and data configurations is
50    supported.
51
52    Values with type Any will be checked to ensure they match types supported
53    directly by json. This does not include types such as tuples which are
54    implicitly translated by Python's json module (as this would break
55    the ability to do a lossless round-trip with data).
56
57    If coerce_to_float is True, integer values present on float typed fields
58    will be converted to float in the dict output. If False, a TypeError
59    will be triggered.
60    """
61
62    out = _Outputter(
63        obj, create=True, codec=codec, coerce_to_float=coerce_to_float
64    ).run()
65    assert isinstance(out, dict)
66    return out

Given a dataclass object, return a json-friendly dict.

All values will be checked to ensure they match the types specified on fields. Note that a limited set of types and data configurations is supported.

Values with type Any will be checked to ensure they match types supported directly by json. This does not include types such as tuples which are implicitly translated by Python's json module (as this would break the ability to do a lossless round-trip with data).

If coerce_to_float is True, integer values present on float typed fields will be converted to float in the dict output. If False, a TypeError will be triggered.

def dataclass_to_json( obj: Any, coerce_to_float: bool = True, pretty: bool = False, sort_keys: bool | None = None) -> str:
69def dataclass_to_json(
70    obj: Any,
71    coerce_to_float: bool = True,
72    pretty: bool = False,
73    sort_keys: bool | None = None,
74) -> str:
75    """Utility function; return a json string from a dataclass instance.
76
77    Basically json.dumps(dataclass_to_dict(...)).
78    By default, keys are sorted for pretty output and not otherwise, but
79    this can be overridden by supplying a value for the 'sort_keys' arg.
80    """
81    import json
82
83    jdict = dataclass_to_dict(
84        obj=obj, coerce_to_float=coerce_to_float, codec=Codec.JSON
85    )
86    if sort_keys is None:
87        sort_keys = pretty
88    if pretty:
89        return json.dumps(jdict, indent=2, sort_keys=sort_keys)
90    return json.dumps(jdict, separators=(',', ':'), sort_keys=sort_keys)

Utility function; return a json string from a dataclass instance.

Basically json.dumps(dataclass_to_dict(...)). By default, keys are sorted for pretty output and not otherwise, but this can be overridden by supplying a value for the 'sort_keys' arg.

def dataclass_from_dict( cls: type[~T], values: dict, codec: Codec = <Codec.JSON: 'json'>, coerce_to_float: bool = True, allow_unknown_attrs: bool = True, discard_unknown_attrs: bool = False) -> ~T:
 93def dataclass_from_dict(
 94    cls: type[T],
 95    values: dict,
 96    codec: Codec = Codec.JSON,
 97    coerce_to_float: bool = True,
 98    allow_unknown_attrs: bool = True,
 99    discard_unknown_attrs: bool = False,
100) -> T:
101    """Given a dict, return a dataclass of a given type.
102
103    The dict must be formatted to match the specified codec (generally
104    json-friendly object types). This means that sequence values such as
105    tuples or sets should be passed as lists, enums should be passed as their
106    associated values, nested dataclasses should be passed as dicts, etc.
107
108    All values are checked to ensure their types/values are valid.
109
110    Data for attributes of type Any will be checked to ensure they match
111    types supported directly by json. This does not include types such
112    as tuples which are implicitly translated by Python's json module
113    (as this would break the ability to do a lossless round-trip with data).
114
115    If coerce_to_float is True, int values passed for float typed fields
116    will be converted to float values. Otherwise, a TypeError is raised.
117
118    If allow_unknown_attrs is False, AttributeErrors will be raised for
119    attributes present in the dict but not on the data class. Otherwise, they
120    will be preserved as part of the instance and included if it is
121    exported back to a dict, unless discard_unknown_attrs is True, in which
122    case they will simply be discarded.
123    """
124    return _Inputter(
125        cls,
126        codec=codec,
127        coerce_to_float=coerce_to_float,
128        allow_unknown_attrs=allow_unknown_attrs,
129        discard_unknown_attrs=discard_unknown_attrs,
130    ).run(values)

Given a dict, return a dataclass of a given type.

The dict must be formatted to match the specified codec (generally json-friendly object types). This means that sequence values such as tuples or sets should be passed as lists, enums should be passed as their associated values, nested dataclasses should be passed as dicts, etc.

All values are checked to ensure their types/values are valid.

Data for attributes of type Any will be checked to ensure they match types supported directly by json. This does not include types such as tuples which are implicitly translated by Python's json module (as this would break the ability to do a lossless round-trip with data).

If coerce_to_float is True, int values passed for float typed fields will be converted to float values. Otherwise, a TypeError is raised.

If allow_unknown_attrs is False, AttributeErrors will be raised for attributes present in the dict but not on the data class. Otherwise, they will be preserved as part of the instance and included if it is exported back to a dict, unless discard_unknown_attrs is True, in which case they will simply be discarded.

def dataclass_from_json( cls: type[~T], json_str: str, coerce_to_float: bool = True, allow_unknown_attrs: bool = True, discard_unknown_attrs: bool = False) -> ~T:
133def dataclass_from_json(
134    cls: type[T],
135    json_str: str,
136    coerce_to_float: bool = True,
137    allow_unknown_attrs: bool = True,
138    discard_unknown_attrs: bool = False,
139) -> T:
140    """Utility function; return a dataclass instance given a json string.
141
142    Basically dataclass_from_dict(json.loads(...))
143    """
144    import json
145
146    return dataclass_from_dict(
147        cls=cls,
148        values=json.loads(json_str),
149        coerce_to_float=coerce_to_float,
150        allow_unknown_attrs=allow_unknown_attrs,
151        discard_unknown_attrs=discard_unknown_attrs,
152    )

Utility function; return a dataclass instance given a json string.

Basically dataclass_from_dict(json.loads(...))

def dataclass_validate( obj: Any, coerce_to_float: bool = True, codec: Codec = <Codec.JSON: 'json'>) -> None:
155def dataclass_validate(
156    obj: Any, coerce_to_float: bool = True, codec: Codec = Codec.JSON
157) -> None:
158    """Ensure that values in a dataclass instance are the correct types."""
159
160    # Simply run an output pass but tell it not to generate data;
161    # only run validation.
162    _Outputter(
163        obj, create=False, codec=codec, coerce_to_float=coerce_to_float
164    ).run()

Ensure that values in a dataclass instance are the correct types.