My problem could be summarised by the following example:
from enum import Enum
import json
class FooBarType(Enum):
standard = 0
foo = 1
bar = 2
dict = {'name': 'test', 'value': 'test', 'type': FooBarType.foo}
json.dumps(dict)
TypeError: <FooBarType.foo: 1> is not JSON serializable
I get a type error, because enums are not JSON serializable.
I primarily though of implementing a JsonEncoder and adding it to the json.dumps() call but I cannot change the line where json.dumps() call is made.
So, my question is :
Is it possible to dump an enum in json without passing an encoder to json.dumps(), but instead, by adding class method(s) in FooBarType enum ?
I expect to extract the following json:
{'name': 'test', 'value': 'test', 'type': 'foo'}
or
{'name': 'test', 'value': 'test', 'type': 1}
Try:
from enum import Enum
# class StrEnum(str, Enum):
# """Enum where members are also (and must be) strs"""
class Color(str, Enum):
RED = 'red'
GREEN = 'green'
BLUE = 'blue'
data = [
{
'name': 'car',
'color': Color.RED,
},
{
'name': 'dog',
'color': Color.BLUE,
},
]
import json
print(json.dumps(data))
Result:
[
{
"name": "car",
"color": "red"
},
{
"name": "dog",
"color": "blue"
}
]
Sadly, there is no direct support for Enum in JSON.
The closest automatic support is to use IntEnum (which enum34 also supports), and then json will treat your enums as ints; of course, decoding them will give you an int back, but that is as good it gets without specifying your encoder/decoder.
Just adding method(s) to the FooBarType enum won't do what you want.
As I mentioned in my comment, you can however use part of my answer to the question Making object JSON serializable with regular encoder to monkey-patch the json module so it will return the name (or value) of Enum members. I'm assuming you're using the enums34 module by Ethan Furman et al, which was backported to Python 2.7 since that version doesn't come with it built-in — it became part of the standard library in Python 3.4.
Note this will work even though you can't change the line where the json.dumps() call occurs as long as that happens after the patch is applied. This is because Python normally caches imported modules in sys.modules, i.e. they aren't reloaded everytime they are used in separate scripts — so any changes made this to them are "sticky" and remain in effect.
So for what you want to do, first create your own module to make the patch. For example: make_enum_json_serializable.py.
""" Module that monkey-patches the json module when it's imported so
JSONEncoder.default() automatically checks to see if the object being encoded
is an instance of an Enum type and, if so, returns its name.
"""
from enum import Enum
from json import JSONEncoder
_saved_default = JSONEncoder().default # Save default method.
def _new_default(self, obj):
if isinstance(obj, Enum):
return obj.name # Could also be obj.value
else:
return _saved_default
JSONEncoder.default = _new_default # Set new default method.
Then, in your own script, all you need to do is essentially add one line:
from enum import Enum
import json
import make_enum_json_serializable # ADDED
class FooBarType(Enum):
standard = 0
foo = 1
bar = 2
a_dict = {'name': 'spam', 'value': 42, 'type': FooBarType.foo}
print(json.dumps(a_dict))
Output:
{"type": "foo", "name": "spam", "value": 42}
UPDATE: Please read the answer from #gil9red, I think it's better than mine!
I don't think there is a great way for this and you will lose features of the Enum.
Simplest option: Don't subclass Enum:
class FooBarType:
standard = 0
foo = 1
bar = 2
dict = {'type': FooBarType.foo}
json.dumps(dict)
What you could also do:
class EnumIntValue(int):
def __new__(cls, name, value):
c = int.__new__(cls, int(value))
c.name = name
return c
def __repr__(self):
return self.name
def __str__(self):
return self.name
class FooBarType:
standard = EnumIntValue('standard',0)
foo = EnumIntValue('foo',0)
bar = EnumIntValue('bar',2)
dict = {'type': FooBarType.foo}
json.dumps(dict)
This will actually give you
{"type": foo}
And therefore not really be valid json, but you can play around with it to fit your needs!
I've recently bumped into a situation where I had to serialize an object that has a couple of Enum types as members.
Basically, I've just added a helper function that maps enum types to their name.
from enum import Enum, auto
from json import dumps
class Status(Enum):
OK = auto()
NOT_OK = auto()
class MyObject:
def __init__(self, status):
self.status = status
obja = MyObject(Status.OK)
objb = MyObject(Status.NOT_OK)
print(dumps(obja))
print(dumps(objb))
This of course fails with the error TypeError: Object of type MyObject is not JSON serializable, as the status member of the MyObject instances is not serializable.
from enum import Enum, auto
from json import dumps
def _prepare_for_serialization(obj):
serialized_dict = dict()
for k, v in obj.__dict__.items():
serialized_dict[k] = v.name if isinstance(v, Enum) else v
return serialized_dict
class Status(Enum):
OK = auto()
NOT_OK = auto()
class MyObject:
def __init__(self, status):
self.status = status
obja = MyObject(Status.OK)
objb = MyObject(Status.NOT_OK)
print(dumps(_prepare_for_serialization(obja)))
print(dumps(_prepare_for_serialization(objb)))
This prints:
{"status": "OK"}
{"status": "NOT_OK"}
Later on, I've used the same helper function to cherry-pick keys for the serialized dict.
You can use a metaclass instead of an enum, and instead of multiple-inheritance without these side effects.
https://gist.github.com/earonesty/81e6c29fa4c54e9b67d9979ddbd8489d
For example:
class FooBarType(metaclass=TypedEnum):
standard = 0
foo = 1
bar = 2
That way every instance is an integer and is also a FooBarType.
Metaclass below.
class TypedEnum(type):
"""This metaclass creates an enumeration that preserves isinstance(element, type)."""
def __new__(mcs, cls, bases, classdict):
"""Discover the enum members by removing all intrinsics and specials."""
object_attrs = set(dir(type(cls, (object,), {})))
member_names = set(classdict.keys()) - object_attrs
member_names = member_names - set(name for name in member_names if name.startswith("_") and name.endswith("_"))
new_class = None
base = None
for attr in member_names:
value = classdict[attr]
if new_class is None:
# base class for all members is the type of the value
base = type(classdict[attr])
ext_bases = (*bases, base)
new_class = super().__new__(mcs, cls, ext_bases, classdict)
setattr(new_class, "__member_names__", member_names)
else:
if not base == type(classdict[attr]): # noqa
raise SyntaxError("Cannot mix types in TypedEnum")
new_val = new_class.__new__(new_class, value)
setattr(new_class, attr, new_val)
for parent in bases:
new_names = getattr(parent, "__member_names__", set())
member_names |= new_names
for attr in new_names:
value = getattr(parent, attr)
if not isinstance(value, base):
raise SyntaxError("Cannot mix inherited types in TypedEnum: %s from %s" % (attr, parent))
# convert all inherited values to the new class
setattr(new_class, attr, new_class(value))
return new_class
def __call__(cls, arg):
for name in cls.__member_names__:
if arg == getattr(cls, name):
return type.__call__(cls, arg)
raise ValueError("Invalid value '%s' for %s" % (arg, cls.__name__))
#property
def __members__(cls):
"""Sufficient to make the #unique decorator work."""
class FakeEnum: # pylint: disable=too-few-public-methods
"""Object that looks a bit like an Enum instance."""
def __init__(self, name, value):
self.name = name
self.value = value
return {name: FakeEnum(name, getattr(cls, name)) for name in cls.__member_names__}
def __iter__(cls):
"""List all enum values."""
return (getattr(cls, name) for name in cls.__member_names__)
def __len__(cls):
"""Get number of enum values."""
return len(cls.__member_names__)
If you have a class model instead a dict, you can convert to json with this:
from enum import Enum
import json
class FooBarType(str, Enum):
standard = 0
foo = 1
bar = 2
class ModelExample():
def __init__(self, name: str, type: FooBarType) -> None:
self.name = name
self.type = type
# instantiate a class with your values
model_example = ModelExample(name= 'test', type= FooBarType.foo)
# vars -> get a dict of the class
json.loads(json.dumps(vars(model_example)))
Result:
{'name': 'test', 'type': '1'}
How do I serialise a Python Enum member to JSON, so that I can deserialise the resulting JSON back into a Python object?
For example, this code:
from enum import Enum
import json
class Status(Enum):
success = 0
json.dumps(Status.success)
results in the error:
TypeError: <Status.success: 0> is not JSON serializable
How can I avoid that?
I know this is old but I feel this will help people. I just went through this exact problem and discovered if you're using string enums, declaring your enums as a subclass of str works well for almost all situations:
import json
from enum import Enum
class LogLevel(str, Enum):
DEBUG = 'DEBUG'
INFO = 'INFO'
print(LogLevel.DEBUG)
print(json.dumps(LogLevel.DEBUG))
print(json.loads('"DEBUG"'))
print(LogLevel('DEBUG'))
Will output:
LogLevel.DEBUG
"DEBUG"
DEBUG
LogLevel.DEBUG
As you can see, loading the JSON outputs the string DEBUG but it is easily castable back into a LogLevel object. A good option if you don't want to create a custom JSONEncoder.
The correct answer depends on what you intend to do with the serialized version.
If you are going to unserialize back into Python, see Zero's answer.
If your serialized version is going to another language then you probably want to use an IntEnum instead, which is automatically serialized as the corresponding integer:
from enum import IntEnum
import json
class Status(IntEnum):
success = 0
failure = 1
json.dumps(Status.success)
and this returns:
'0'
If you want to encode an arbitrary enum.Enum member to JSON and then decode
it as the same enum member (rather than simply the enum member's value attribute), you can do so by writing a custom JSONEncoder class, and a decoding function to pass as the object_hook argument to json.load() or json.loads():
PUBLIC_ENUMS = {
'Status': Status,
# ...
}
class EnumEncoder(json.JSONEncoder):
def default(self, obj):
if type(obj) in PUBLIC_ENUMS.values():
return {"__enum__": str(obj)}
return json.JSONEncoder.default(self, obj)
def as_enum(d):
if "__enum__" in d:
name, member = d["__enum__"].split(".")
return getattr(PUBLIC_ENUMS[name], member)
else:
return d
The as_enum function relies on the JSON having been encoded using EnumEncoder, or something which behaves identically to it.
The restriction to members of PUBLIC_ENUMS is necessary to avoid a maliciously crafted text being used to, for example, trick calling code into saving private information (e.g. a secret key used by the application) to an unrelated database field, from where it could then be exposed (see https://chat.stackoverflow.com/transcript/message/35999686#35999686).
Example usage:
>>> data = {
... "action": "frobnicate",
... "status": Status.success
... }
>>> text = json.dumps(data, cls=EnumEncoder)
>>> text
'{"status": {"__enum__": "Status.success"}, "action": "frobnicate"}'
>>> json.loads(text, object_hook=as_enum)
{'status': <Status.success: 0>, 'action': 'frobnicate'}
In Python >= 3.7, can just use
json.dumps(enum_obj, default=str)
If you want to use the enum value, you can do
json.dumps(enum_obj, default=lambda x: x.value)
or if you want to use the enum name,
json.dumps(enum_obj, default=lambda x: x.name)
You just need to inherit from str or int class:
from enum import Enum, unique
#unique
class StatusEnum(int, Enum):
pending: int = 11
approved: int = 15
declined: int = 266
That's it, it will be serialised using any JSON encoder.
I liked Zero Piraeus' answer, but modified it slightly for working with the API for Amazon Web Services (AWS) known as Boto.
class EnumEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Enum):
return obj.name
return json.JSONEncoder.default(self, obj)
I then added this method to my data model:
def ToJson(self) -> str:
return json.dumps(self.__dict__, cls=EnumEncoder, indent=1, sort_keys=True)
I hope this helps someone.
If you are using jsonpickle the easiest way should look as below.
from enum import Enum
import jsonpickle
#jsonpickle.handlers.register(Enum, base=True)
class EnumHandler(jsonpickle.handlers.BaseHandler):
def flatten(self, obj, data):
return obj.value # Convert to json friendly format
if __name__ == '__main__':
class Status(Enum):
success = 0
error = 1
class SimpleClass:
pass
simple_class = SimpleClass()
simple_class.status = Status.success
json = jsonpickle.encode(simple_class, unpicklable=False)
print(json)
After Json serialization you will have as expected {"status": 0} instead of
{"status": {"__objclass__": {"py/type": "__main__.Status"}, "_name_": "success", "_value_": 0}}
You can even combine the solutions mentioned above with the automatic value creation for Enums. I use this in combination with Pydantic and FastAPI to provide lower case names for a REST API:
from enum import Enum, auto
import json
class StrEnum(str, Enum):
pass
# this creates nice lowercase and JSON serializable names
# https://docs.python.org/3/library/enum.html#using-automatic-values
class AutoNameLower(StrEnum):
def _generate_next_value_(name, start, count, last_values):
return name.lower()
class AutoNameLowerStrEnum(AutoNameLower):
pass
class MyActualEnum(AutoNameLowerStrEnum):
THIS = auto()
THAT = auto()
FOO = auto()
BAR = auto()
print(MyActualEnum.THIS)
print(json.dumps(MyActualEnum.THIS))
print(list(MyActualEnum))
Console:
>>> MyActualEnum.THIS
>>> "this"
>>> [<MyActualEnum.THIS: 'this'>, <MyActualEnum.THAT: 'that'>, <MyActualEnum.FOO: 'foo'>, <MyActualEnum.BAR: 'bar'>]
This worked for me:
class Status(Enum):
success = 0
def __json__(self):
return self.value
Didn't have to change anything else. Obviously, you'll only get the value out of this and will need to do some other work if you want to convert the serialized value back into the enum later.
I want to convert JSON data into a Python object.
I receive JSON data objects from the Facebook API, which I want to store in my database.
My current View in Django (Python) (request.POST contains the JSON):
response = request.POST
user = FbApiUser(user_id = response['id'])
user.name = response['name']
user.username = response['username']
user.save()
This works fine, but how do I handle complex JSON data objects?
Wouldn't it be much better if I could somehow convert this JSON object into a Python object for easy use?
UPDATE
With Python3, you can do it in one line, using SimpleNamespace and object_hook:
import json
from types import SimpleNamespace
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse JSON into an object with attributes corresponding to dict keys.
x = json.loads(data, object_hook=lambda d: SimpleNamespace(**d))
print(x.name, x.hometown.name, x.hometown.id)
OLD ANSWER (Python2)
In Python2, you can do it in one line, using namedtuple and object_hook (but it's very slow with many nested objects):
import json
from collections import namedtuple
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse JSON into an object with attributes corresponding to dict keys.
x = json.loads(data, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
print x.name, x.hometown.name, x.hometown.id
or, to reuse this easily:
def _json_object_hook(d): return namedtuple('X', d.keys())(*d.values())
def json2obj(data): return json.loads(data, object_hook=_json_object_hook)
x = json2obj(data)
If you want it to handle keys that aren't good attribute names, check out namedtuple's rename parameter.
You could try this:
class User(object):
def __init__(self, name, username):
self.name = name
self.username = username
import json
j = json.loads(your_json)
u = User(**j)
Just create a new object, and pass the parameters as a map.
You can have a JSON with objects too:
import json
class Address(object):
def __init__(self, street, number):
self.street = street
self.number = number
def __str__(self):
return "{0} {1}".format(self.street, self.number)
class User(object):
def __init__(self, name, address):
self.name = name
self.address = Address(**address)
def __str__(self):
return "{0} ,{1}".format(self.name, self.address)
if __name__ == '__main__':
js = '''{"name":"Cristian", "address":{"street":"Sesame","number":122}}'''
j = json.loads(js)
print(j)
u = User(**j)
print(u)
Check out the section titled Specializing JSON object decoding in the json module documentation. You can use that to decode a JSON object into a specific Python type.
Here's an example:
class User(object):
def __init__(self, name, username):
self.name = name
self.username = username
import json
def object_decoder(obj):
if '__type__' in obj and obj['__type__'] == 'User':
return User(obj['name'], obj['username'])
return obj
json.loads('{"__type__": "User", "name": "John Smith", "username": "jsmith"}',
object_hook=object_decoder)
print type(User) # -> <type 'type'>
Update
If you want to access data in a dictionary via the json module do this:
user = json.loads('{"__type__": "User", "name": "John Smith", "username": "jsmith"}')
print user['name']
print user['username']
Just like a regular dictionary.
This is not code golf, but here is my shortest trick, using types.SimpleNamespace as the container for JSON objects.
Compared to the leading namedtuple solution, it is:
probably faster/smaller as it does not create a class for each object
shorter
no rename option, and probably the same limitation on keys that are not valid identifiers (uses setattr under the covers)
Example:
from __future__ import print_function
import json
try:
from types import SimpleNamespace as Namespace
except ImportError:
# Python 2.x fallback
from argparse import Namespace
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
x = json.loads(data, object_hook=lambda d: Namespace(**d))
print (x.name, x.hometown.name, x.hometown.id)
Here's a quick and dirty json pickle alternative
import json
class User:
def __init__(self, name, username):
self.name = name
self.username = username
def to_json(self):
return json.dumps(self.__dict__)
#classmethod
def from_json(cls, json_str):
json_dict = json.loads(json_str)
return cls(**json_dict)
# example usage
User("tbrown", "Tom Brown").to_json()
User.from_json(User("tbrown", "Tom Brown").to_json()).to_json()
For complex objects, you can use JSON Pickle
Python library for serializing any arbitrary object graph into JSON.
It can take almost any Python object and turn the object into JSON.
Additionally, it can reconstitute the object back into Python.
If you're using Python 3.5+, you can use jsons to serialize and deserialize to plain old Python objects:
import jsons
response = request.POST
# You'll need your class attributes to match your dict keys, so in your case do:
response['id'] = response.pop('user_id')
# Then you can load that dict into your class:
user = jsons.load(response, FbApiUser)
user.save()
You could also make FbApiUser inherit from jsons.JsonSerializable for more elegance:
user = FbApiUser.from_json(response)
These examples will work if your class consists of Python default types, like strings, integers, lists, datetimes, etc. The jsons lib will require type hints for custom types though.
If you are using python 3.6+, you can use marshmallow-dataclass. Contrarily to all the solutions listed above, it is both simple, and type safe:
from marshmallow_dataclass import dataclass
#dataclass
class User:
name: str
user = User.Schema().load({"name": "Ramirez"})
dacite may also be a solution for you, it supports following features:
nested structures
(basic) types checking
optional fields (i.e. typing.Optional)
unions
forward references
collections
custom type hooks
https://pypi.org/project/dacite/
from dataclasses import dataclass
from dacite import from_dict
#dataclass
class User:
name: str
age: int
is_active: bool
data = {
'name': 'John',
'age': 30,
'is_active': True,
}
user = from_dict(data_class=User, data=data)
assert user == User(name='John', age=30, is_active=True)
Improving the lovasoa's very good answer.
If you are using python 3.6+, you can use:
pip install marshmallow-enum and
pip install marshmallow-dataclass
Its simple and type safe.
You can transform your class in a string-json and vice-versa:
From Object to String Json:
from marshmallow_dataclass import dataclass
user = User("Danilo","50","RedBull",15,OrderStatus.CREATED)
user_json = User.Schema().dumps(user)
user_json_str = user_json.data
From String Json to Object:
json_str = '{"name":"Danilo", "orderId":"50", "productName":"RedBull", "quantity":15, "status":"Created"}'
user, err = User.Schema().loads(json_str)
print(user,flush=True)
Class definitions:
class OrderStatus(Enum):
CREATED = 'Created'
PENDING = 'Pending'
CONFIRMED = 'Confirmed'
FAILED = 'Failed'
#dataclass
class User:
def __init__(self, name, orderId, productName, quantity, status):
self.name = name
self.orderId = orderId
self.productName = productName
self.quantity = quantity
self.status = status
name: str
orderId: str
productName: str
quantity: int
status: OrderStatus
Since no one provided an answer quite like mine, I am going to post it here.
It is a robust class that can easily convert back and forth between JSON str and dict that I have copied from my answer to another question:
import json
class PyJSON(object):
def __init__(self, d):
if type(d) is str:
d = json.loads(d)
self.from_dict(d)
def from_dict(self, d):
self.__dict__ = {}
for key, value in d.items():
if type(value) is dict:
value = PyJSON(value)
self.__dict__[key] = value
def to_dict(self):
d = {}
for key, value in self.__dict__.items():
if type(value) is PyJSON:
value = value.to_dict()
d[key] = value
return d
def __repr__(self):
return str(self.to_dict())
def __setitem__(self, key, value):
self.__dict__[key] = value
def __getitem__(self, key):
return self.__dict__[key]
json_str = """... JSON string ..."""
py_json = PyJSON(json_str)
I have written a small (de)serialization framework called any2any that helps doing complex transformations between two Python types.
In your case, I guess you want to transform from a dictionary (obtained with json.loads) to an complex object response.education ; response.name, with a nested structure response.education.id, etc ...
So that's exactly what this framework is made for. The documentation is not great yet, but by using any2any.simple.MappingToObject, you should be able to do that very easily. Please ask if you need help.
JSON to python object
The follwing code creates dynamic attributes with the objects keys recursively.
JSON object - fb_data.json:
{
"name": "John Smith",
"hometown": {
"name": "New York",
"id": 123
},
"list": [
"a",
"b",
"c",
1,
{
"key": 1
}
],
"object": {
"key": {
"key": 1
}
}
}
On the conversion we have 3 cases:
lists
dicts (new object)
bool, int, float and str
import json
class AppConfiguration(object):
def __init__(self, data=None):
if data is None:
with open("fb_data.json") as fh:
data = json.loads(fh.read())
else:
data = dict(data)
for key, val in data.items():
setattr(self, key, self.compute_attr_value(val))
def compute_attr_value(self, value):
if isinstance(value, list):
return [self.compute_attr_value(x) for x in value]
elif isinstance(value, dict):
return AppConfiguration(value)
else:
return value
if __name__ == "__main__":
instance = AppConfiguration()
print(instance.name)
print(instance.hometown.name)
print(instance.hometown.id)
print(instance.list[4].key)
print(instance.object.key.key)
Now the key, value pairs are attributes - objects.
output:
John Smith
New York
123
1
1
Paste JSON as Code
Supports TypeScript, Python, Go, Ruby, C#, Java, Swift, Rust, Kotlin, C++, Flow, Objective-C, JavaScript, Elm, and JSON Schema.
Interactively generate types and (de-)serialization code from JSON, JSON Schema, and TypeScript
Paste JSON/JSON Schema/TypeScript as code
quicktype infers types from sample JSON data, then outputs strongly typed models and serializers for working with that data in your desired programming language.
output:
# Generated by https://quicktype.io
#
# To change quicktype's target language, run command:
#
# "Set quicktype target language"
from typing import List, Union
class Hometown:
name: str
id: int
def __init__(self, name: str, id: int) -> None:
self.name = name
self.id = id
class Key:
key: int
def __init__(self, key: int) -> None:
self.key = key
class Object:
key: Key
def __init__(self, key: Key) -> None:
self.key = key
class FbData:
name: str
hometown: Hometown
list: List[Union[Key, int, str]]
object: Object
def __init__(self, name: str, hometown: Hometown, list: List[Union[Key, int, str]], object: Object) -> None:
self.name = name
self.hometown = hometown
self.list = list
self.object = object
This extension is available for free in the Visual Studio Code Marketplace.
The lightest solution I think is
import orjson # faster then json =)
from typing import NamedTuple
_j = '{"name":"Иван","age":37,"mother":{"name":"Ольга","age":58},"children":["Маша","Игорь","Таня"],"married": true,' \
'"dog":null} '
class PersonNameAge(NamedTuple):
name: str
age: int
class UserInfo(NamedTuple):
name: str
age: int
mother: PersonNameAge
children: list
married: bool
dog: str
j = orjson.loads(_j)
u = UserInfo(**j)
print(u.name, u.age, u.mother, u.children, u.married, u.dog)
>>> Ivan 37 {'name': 'Olga', 'age': 58} ['Mary', 'Igor', 'Jane'] True None
Expanding on DS's answer a bit, if you need the object to be mutable (which namedtuple is not), you can use the recordclass library instead of namedtuple:
import json
from recordclass import recordclass
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse into a mutable object
x = json.loads(data, object_hook=lambda d: recordclass('X', d.keys())(*d.values()))
The modified object can then be converted back to json very easily using simplejson:
x.name = "John Doe"
new_json = simplejson.dumps(x)
dataclass-wizard is a modern option that can similarly work for you. It supports auto key casing transforms, such as camelCase or TitleCase, both of which is quite common in API responses.
The default key transform when dumping instance to a dict/JSON is camelCase, but this can be easily overriden using a Meta config supplied on the main dataclass.
https://pypi.org/project/dataclass-wizard/
from dataclasses import dataclass
from dataclass_wizard import fromdict, asdict
#dataclass
class User:
name: str
age: int
is_active: bool
data = {
'name': 'John',
'age': 30,
'isActive': True,
}
user = fromdict(User, data)
assert user == User(name='John', age=30, is_active=True)
json_dict = asdict(user)
assert json_dict == {'name': 'John', 'age': 30, 'isActive': True}
Example of setting a Meta config, which converts fields to lisp-case when serializing to dict/JSON:
DumpMeta(key_transform='LISP').bind_to(User)
While searching for a solution, I've stumbled upon this blog post: https://blog.mosthege.net/2016/11/12/json-deserialization-of-nested-objects/
It uses the same technique as stated in previous answers but with a usage of decorators.
Another thing I found useful is the fact that it returns a typed object at the end of deserialisation
class JsonConvert(object):
class_mappings = {}
#classmethod
def class_mapper(cls, d):
for keys, cls in clsself.mappings.items():
if keys.issuperset(d.keys()): # are all required arguments present?
return cls(**d)
else:
# Raise exception instead of silently returning None
raise ValueError('Unable to find a matching class for object: {!s}'.format(d))
#classmethod
def complex_handler(cls, Obj):
if hasattr(Obj, '__dict__'):
return Obj.__dict__
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(Obj), repr(Obj)))
#classmethod
def register(cls, claz):
clsself.mappings[frozenset(tuple([attr for attr,val in cls().__dict__.items()]))] = cls
return cls
#classmethod
def to_json(cls, obj):
return json.dumps(obj.__dict__, default=cls.complex_handler, indent=4)
#classmethod
def from_json(cls, json_str):
return json.loads(json_str, object_hook=cls.class_mapper)
Usage:
#JsonConvert.register
class Employee(object):
def __init__(self, Name:int=None, Age:int=None):
self.Name = Name
self.Age = Age
return
#JsonConvert.register
class Company(object):
def __init__(self, Name:str="", Employees:[Employee]=None):
self.Name = Name
self.Employees = [] if Employees is None else Employees
return
company = Company("Contonso")
company.Employees.append(Employee("Werner", 38))
company.Employees.append(Employee("Mary"))
as_json = JsonConvert.to_json(company)
from_json = JsonConvert.from_json(as_json)
as_json_from_json = JsonConvert.to_json(from_json)
assert(as_json_from_json == as_json)
print(as_json_from_json)
Modifying #DS response a bit, to load from a file:
def _json_object_hook(d): return namedtuple('X', d.keys())(*d.values())
def load_data(file_name):
with open(file_name, 'r') as file_data:
return file_data.read().replace('\n', '')
def json2obj(file_name): return json.loads(load_data(file_name), object_hook=_json_object_hook)
One thing: this cannot load items with numbers ahead. Like this:
{
"1_first_item": {
"A": "1",
"B": "2"
}
}
Because "1_first_item" is not a valid python field name.
The answers given here does not return the correct object type, hence I created these methods below. They also fail if you try to add more fields to the class that does not exist in the given JSON:
def dict_to_class(class_name: Any, dictionary: dict) -> Any:
instance = class_name()
for key in dictionary.keys():
setattr(instance, key, dictionary[key])
return instance
def json_to_class(class_name: Any, json_string: str) -> Any:
dict_object = json.loads(json_string)
return dict_to_class(class_name, dict_object)
There are multiple viable answers already, but there are some minor libraries made by individuals that can do the trick for most users.
An example would be json2object. Given a defined class, it deserialises json data to your custom model, including custom attributes and child objects.
Its use is very simple. An example from the library wiki:
from json2object import jsontoobject as jo
class Student:
def __init__(self):
self.firstName = None
self.lastName = None
self.courses = [Course('')]
class Course:
def __init__(self, name):
self.name = name
data = '''{
"firstName": "James",
"lastName": "Bond",
"courses": [{
"name": "Fighting"},
{
"name": "Shooting"}
]
}
'''
model = Student()
result = jo.deserialize(data, model)
print(result.courses[0].name)
class SimpleClass:
def __init__(self, **kwargs):
for k, v in kwargs.items():
if type(v) is dict:
setattr(self, k, SimpleClass(**v))
else:
setattr(self, k, v)
json_dict = {'name': 'jane doe', 'username': 'jane', 'test': {'foo': 1}}
class_instance = SimpleClass(**json_dict)
print(class_instance.name, class_instance.test.foo)
print(vars(class_instance))
If you are looking for type safe deserialization of JSON or any complex dict into a python class I would highly recommend pydantic for Python 3.7+. Not only does it has a succinct API (does not require writing 'helper' boilerplate), can integrate with Python dataclasses but has static and runtime type validation of complex and nested data structures.
Example usage:
from pydantic import BaseModel
from datetime import datetime
class Item(BaseModel):
field1: str | int # union
field2: int | None = None # optional
field3: str = 'default' # default values
class User(BaseModel):
name: str | None = None
username: str
created: datetime # default type converters
items: list[Item] = [] # nested complex types
data = {
'name': 'Jane Doe',
'username': 'user1',
'created': '2020-12-31T23:59:00+10:00',
'items': [
{'field1': 1, 'field2': 2},
{'field1': 'b'},
{'field1': 'c', 'field3': 'override'}
]
}
user: User = User(**data)
For more details and features, check out pydantic's rational section in their documentation.
If you're using Python 3.6 or newer, you could have a look at squema - a lightweight module for statically typed data structures. It makes your code easy to read while at the same time providing simple data validation, conversion and serialization without extra work. You can think of it as a more sophisticated and opinionated alternative to namedtuples and dataclasses. Here's how you could use it:
from uuid import UUID
from squema import Squema
class FbApiUser(Squema):
id: UUID
age: int
name: str
def save(self):
pass
user = FbApiUser(**json.loads(response))
user.save()
You can use
x = Map(json.loads(response))
x.__class__ = MyClass
where
class Map(dict):
def __init__(self, *args, **kwargs):
super(Map, self).__init__(*args, **kwargs)
for arg in args:
if isinstance(arg, dict):
for k, v in arg.iteritems():
self[k] = v
if isinstance(v, dict):
self[k] = Map(v)
if kwargs:
# for python 3 use kwargs.items()
for k, v in kwargs.iteritems():
self[k] = v
if isinstance(v, dict):
self[k] = Map(v)
def __getattr__(self, attr):
return self.get(attr)
def __setattr__(self, key, value):
self.__setitem__(key, value)
def __setitem__(self, key, value):
super(Map, self).__setitem__(key, value)
self.__dict__.update({key: value})
def __delattr__(self, item):
self.__delitem__(item)
def __delitem__(self, key):
super(Map, self).__delitem__(key)
del self.__dict__[key]
For a generic, future-proof solution.
I was searching for a solution that worked with recordclass.RecordClass, supports nested objects and works for both json serialization and json deserialization.
Expanding on DS's answer, and expanding on solution from BeneStr, I came up with the following that seems to work:
Code:
import json
import recordclass
class NestedRec(recordclass.RecordClass):
a : int = 0
b : int = 0
class ExampleRec(recordclass.RecordClass):
x : int = None
y : int = None
nested : NestedRec = NestedRec()
class JsonSerializer:
#staticmethod
def dumps(obj, ensure_ascii=True, indent=None, sort_keys=False):
return json.dumps(obj, default=JsonSerializer.__obj_to_dict, ensure_ascii=ensure_ascii, indent=indent, sort_keys=sort_keys)
#staticmethod
def loads(s, klass):
return JsonSerializer.__dict_to_obj(klass, json.loads(s))
#staticmethod
def __obj_to_dict(obj):
if hasattr(obj, "_asdict"):
return obj._asdict()
else:
return json.JSONEncoder().default(obj)
#staticmethod
def __dict_to_obj(klass, s_dict):
kwargs = {
key : JsonSerializer.__dict_to_obj(cls, s_dict[key]) if hasattr(cls,'_asdict') else s_dict[key] \
for key,cls in klass.__annotations__.items() \
if s_dict is not None and key in s_dict
}
return klass(**kwargs)
Usage:
example_0 = ExampleRec(x = 10, y = 20, nested = NestedRec( a = 30, b = 40 ) )
#Serialize to JSON
json_str = JsonSerializer.dumps(example_0)
print(json_str)
#{
# "x": 10,
# "y": 20,
# "nested": {
# "a": 30,
# "b": 40
# }
#}
# Deserialize from JSON
example_1 = JsonSerializer.loads(json_str, ExampleRec)
example_1.x += 1
example_1.y += 1
example_1.nested.a += 1
example_1.nested.b += 1
json_str = JsonSerializer.dumps(example_1)
print(json_str)
#{
# "x": 11,
# "y": 21,
# "nested": {
# "a": 31,
# "b": 41
# }
#}
def load_model_from_dict(self, data: dict):
for key, value in data.items():
self.__dict__[key] = value
return self
It help returns your own model, with unforeseenable variables from the dict.
So I was hunting for a way to unmarshal any arbitrary type (think dict of dataclass, or dict of a dict of an array of dataclass) without a ton of custom deserialization code.
This is my approach:
import json
from dataclasses import dataclass, make_dataclass
from dataclasses_json import DataClassJsonMixin, dataclass_json
#dataclass_json
#dataclass
class Person:
name: str
def unmarshal_json(data, t):
Unmarhsal = make_dataclass('Unmarhsal', [('res', t)],
bases=(DataClassJsonMixin,))
d = json.loads(data)
out = Unmarhsal.from_dict({"res": d})
return out.res
unmarshalled = unmarshal_json('{"1": {"name": "john"} }', dict[str, Person])
print(unmarshalled)
Prints: {'1': Person(name='john')}
This appears to be an XY problem (asking A where the actual problem is B).
The root of the issue is: How to effectively reference/modify deep-nested JSON structures without having to do obj['foo']['bar'][42]['quux'], which poses a typing challenge, a code-bloat issue, a readability issue and an error-trapping issue?
Use glom
from glom import glom
# Basic deep get
data = {'a': {'b': {'c': 'd'}}}
print(glom(data, 'a.b.c'))
It will handle list items also: glom(data, 'a.b.c.42.d')
I've benchmarked it against a naive implementation:
def extract(J, levels):
# Twice as fast as using glom
for level in levels.split('.'):
J = J[int(level) if level.isnumeric() else level]
return J
... and it returns 0.14ms on a complex JSON object, compared with 0.06ms for the naive impl.
It can also handle comlex queries, e.g. pulling out all foo.bar.records where .name == 'Joe Bloggs'
EDIT:
Another performant approach is to recursively use a class that overrides __getitem__ and __getattr__:
class Ob:
def __init__(self, J):
self.J = J
def __getitem__(self, index):
return Ob(self.J[index])
def __getattr__(self, attr):
value = self.J.get(attr, None)
return Ob(value) if type(value) in (list, dict) else value
Now you can do:
ob = Ob(J)
# if you're fetching a final raw value (not list/dict
ob.foo.bar[42].quux.leaf
# for intermediate values
ob.foo.bar[42].quux.J
This also benchmarks surprisingly well. Comparable with my previous naive impl. If anyone can spot a way to tidy up access for non-leaf queries, leave a comment!
Python3.x
The best aproach I could reach with my knowledge was this.
Note that this code treat set() too.
This approach is generic just needing the extension of class (in the second example).
Note that I'm just doing it to files, but it's easy to modify the behavior to your taste.
However this is a CoDec.
With a little more work you can construct your class in other ways.
I assume a default constructor to instance it, then I update the class dict.
import json
import collections
class JsonClassSerializable(json.JSONEncoder):
REGISTERED_CLASS = {}
def register(ctype):
JsonClassSerializable.REGISTERED_CLASS[ctype.__name__] = ctype
def default(self, obj):
if isinstance(obj, collections.Set):
return dict(_set_object=list(obj))
if isinstance(obj, JsonClassSerializable):
jclass = {}
jclass["name"] = type(obj).__name__
jclass["dict"] = obj.__dict__
return dict(_class_object=jclass)
else:
return json.JSONEncoder.default(self, obj)
def json_to_class(self, dct):
if '_set_object' in dct:
return set(dct['_set_object'])
elif '_class_object' in dct:
cclass = dct['_class_object']
cclass_name = cclass["name"]
if cclass_name not in self.REGISTERED_CLASS:
raise RuntimeError(
"Class {} not registered in JSON Parser"
.format(cclass["name"])
)
instance = self.REGISTERED_CLASS[cclass_name]()
instance.__dict__ = cclass["dict"]
return instance
return dct
def encode_(self, file):
with open(file, 'w') as outfile:
json.dump(
self.__dict__, outfile,
cls=JsonClassSerializable,
indent=4,
sort_keys=True
)
def decode_(self, file):
try:
with open(file, 'r') as infile:
self.__dict__ = json.load(
infile,
object_hook=self.json_to_class
)
except FileNotFoundError:
print("Persistence load failed "
"'{}' do not exists".format(file)
)
class C(JsonClassSerializable):
def __init__(self):
self.mill = "s"
JsonClassSerializable.register(C)
class B(JsonClassSerializable):
def __init__(self):
self.a = 1230
self.c = C()
JsonClassSerializable.register(B)
class A(JsonClassSerializable):
def __init__(self):
self.a = 1
self.b = {1, 2}
self.c = B()
JsonClassSerializable.register(A)
A().encode_("test")
b = A()
b.decode_("test")
print(b.a)
print(b.b)
print(b.c.a)
Edit
With some more of research I found a way to generalize without the need of the SUPERCLASS register method call, using a metaclass
import json
import collections
REGISTERED_CLASS = {}
class MetaSerializable(type):
def __call__(cls, *args, **kwargs):
if cls.__name__ not in REGISTERED_CLASS:
REGISTERED_CLASS[cls.__name__] = cls
return super(MetaSerializable, cls).__call__(*args, **kwargs)
class JsonClassSerializable(json.JSONEncoder, metaclass=MetaSerializable):
def default(self, obj):
if isinstance(obj, collections.Set):
return dict(_set_object=list(obj))
if isinstance(obj, JsonClassSerializable):
jclass = {}
jclass["name"] = type(obj).__name__
jclass["dict"] = obj.__dict__
return dict(_class_object=jclass)
else:
return json.JSONEncoder.default(self, obj)
def json_to_class(self, dct):
if '_set_object' in dct:
return set(dct['_set_object'])
elif '_class_object' in dct:
cclass = dct['_class_object']
cclass_name = cclass["name"]
if cclass_name not in REGISTERED_CLASS:
raise RuntimeError(
"Class {} not registered in JSON Parser"
.format(cclass["name"])
)
instance = REGISTERED_CLASS[cclass_name]()
instance.__dict__ = cclass["dict"]
return instance
return dct
def encode_(self, file):
with open(file, 'w') as outfile:
json.dump(
self.__dict__, outfile,
cls=JsonClassSerializable,
indent=4,
sort_keys=True
)
def decode_(self, file):
try:
with open(file, 'r') as infile:
self.__dict__ = json.load(
infile,
object_hook=self.json_to_class
)
except FileNotFoundError:
print("Persistence load failed "
"'{}' do not exists".format(file)
)
class C(JsonClassSerializable):
def __init__(self):
self.mill = "s"
class B(JsonClassSerializable):
def __init__(self):
self.a = 1230
self.c = C()
class A(JsonClassSerializable):
def __init__(self):
self.a = 1
self.b = {1, 2}
self.c = B()
A().encode_("test")
b = A()
b.decode_("test")
print(b.a)
# 1
print(b.b)
# {1, 2}
print(b.c.a)
# 1230
print(b.c.c.mill)
# s
this is not a very difficult thing, i saw the answers above, most of them had a performance problem in the "list"
this code is much faster than the above
import json
class jsonify:
def __init__(self, data):
self.jsonify = data
def __getattr__(self, attr):
value = self.jsonify.get(attr)
if isinstance(value, (list, dict)):
return jsonify(value)
return value
def __getitem__(self, index):
value = self.jsonify[index]
if isinstance(value, (list, dict)):
return jsonify(value)
return value
def __setitem__(self, index, value):
self.jsonify[index] = value
def __delattr__(self, index):
self.jsonify.pop(index)
def __delitem__(self, index):
self.jsonify.pop(index)
def __repr__(self):
return json.dumps(self.jsonify, indent=2, default=lambda x: str(x))
exmaple
response = jsonify(
{
'test': {
'test1': [{'ok': 1}]
}
}
)
response.test -> jsonify({'test1': [{'ok': 1}]})
response.test.test1 -> jsonify([{'ok': 1}])
response.test.test1[0] -> jsonify({'ok': 1})
response.test.test1[0].ok -> int(1)