I want to convert JSON data into a Python object.
I receive JSON data objects from the Facebook API, which I want to store in my database.
My current View in Django (Python) (request.POST contains the JSON):
response = request.POST
user = FbApiUser(user_id = response['id'])
user.name = response['name']
user.username = response['username']
user.save()
This works fine, but how do I handle complex JSON data objects?
Wouldn't it be much better if I could somehow convert this JSON object into a Python object for easy use?
UPDATE
With Python3, you can do it in one line, using SimpleNamespace and object_hook:
import json
from types import SimpleNamespace
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse JSON into an object with attributes corresponding to dict keys.
x = json.loads(data, object_hook=lambda d: SimpleNamespace(**d))
print(x.name, x.hometown.name, x.hometown.id)
OLD ANSWER (Python2)
In Python2, you can do it in one line, using namedtuple and object_hook (but it's very slow with many nested objects):
import json
from collections import namedtuple
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse JSON into an object with attributes corresponding to dict keys.
x = json.loads(data, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
print x.name, x.hometown.name, x.hometown.id
or, to reuse this easily:
def _json_object_hook(d): return namedtuple('X', d.keys())(*d.values())
def json2obj(data): return json.loads(data, object_hook=_json_object_hook)
x = json2obj(data)
If you want it to handle keys that aren't good attribute names, check out namedtuple's rename parameter.
You could try this:
class User(object):
def __init__(self, name, username):
self.name = name
self.username = username
import json
j = json.loads(your_json)
u = User(**j)
Just create a new object, and pass the parameters as a map.
You can have a JSON with objects too:
import json
class Address(object):
def __init__(self, street, number):
self.street = street
self.number = number
def __str__(self):
return "{0} {1}".format(self.street, self.number)
class User(object):
def __init__(self, name, address):
self.name = name
self.address = Address(**address)
def __str__(self):
return "{0} ,{1}".format(self.name, self.address)
if __name__ == '__main__':
js = '''{"name":"Cristian", "address":{"street":"Sesame","number":122}}'''
j = json.loads(js)
print(j)
u = User(**j)
print(u)
Check out the section titled Specializing JSON object decoding in the json module documentation. You can use that to decode a JSON object into a specific Python type.
Here's an example:
class User(object):
def __init__(self, name, username):
self.name = name
self.username = username
import json
def object_decoder(obj):
if '__type__' in obj and obj['__type__'] == 'User':
return User(obj['name'], obj['username'])
return obj
json.loads('{"__type__": "User", "name": "John Smith", "username": "jsmith"}',
object_hook=object_decoder)
print type(User) # -> <type 'type'>
Update
If you want to access data in a dictionary via the json module do this:
user = json.loads('{"__type__": "User", "name": "John Smith", "username": "jsmith"}')
print user['name']
print user['username']
Just like a regular dictionary.
This is not code golf, but here is my shortest trick, using types.SimpleNamespace as the container for JSON objects.
Compared to the leading namedtuple solution, it is:
probably faster/smaller as it does not create a class for each object
shorter
no rename option, and probably the same limitation on keys that are not valid identifiers (uses setattr under the covers)
Example:
from __future__ import print_function
import json
try:
from types import SimpleNamespace as Namespace
except ImportError:
# Python 2.x fallback
from argparse import Namespace
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
x = json.loads(data, object_hook=lambda d: Namespace(**d))
print (x.name, x.hometown.name, x.hometown.id)
Here's a quick and dirty json pickle alternative
import json
class User:
def __init__(self, name, username):
self.name = name
self.username = username
def to_json(self):
return json.dumps(self.__dict__)
#classmethod
def from_json(cls, json_str):
json_dict = json.loads(json_str)
return cls(**json_dict)
# example usage
User("tbrown", "Tom Brown").to_json()
User.from_json(User("tbrown", "Tom Brown").to_json()).to_json()
For complex objects, you can use JSON Pickle
Python library for serializing any arbitrary object graph into JSON.
It can take almost any Python object and turn the object into JSON.
Additionally, it can reconstitute the object back into Python.
If you're using Python 3.5+, you can use jsons to serialize and deserialize to plain old Python objects:
import jsons
response = request.POST
# You'll need your class attributes to match your dict keys, so in your case do:
response['id'] = response.pop('user_id')
# Then you can load that dict into your class:
user = jsons.load(response, FbApiUser)
user.save()
You could also make FbApiUser inherit from jsons.JsonSerializable for more elegance:
user = FbApiUser.from_json(response)
These examples will work if your class consists of Python default types, like strings, integers, lists, datetimes, etc. The jsons lib will require type hints for custom types though.
If you are using python 3.6+, you can use marshmallow-dataclass. Contrarily to all the solutions listed above, it is both simple, and type safe:
from marshmallow_dataclass import dataclass
#dataclass
class User:
name: str
user = User.Schema().load({"name": "Ramirez"})
dacite may also be a solution for you, it supports following features:
nested structures
(basic) types checking
optional fields (i.e. typing.Optional)
unions
forward references
collections
custom type hooks
https://pypi.org/project/dacite/
from dataclasses import dataclass
from dacite import from_dict
#dataclass
class User:
name: str
age: int
is_active: bool
data = {
'name': 'John',
'age': 30,
'is_active': True,
}
user = from_dict(data_class=User, data=data)
assert user == User(name='John', age=30, is_active=True)
Improving the lovasoa's very good answer.
If you are using python 3.6+, you can use:
pip install marshmallow-enum and
pip install marshmallow-dataclass
Its simple and type safe.
You can transform your class in a string-json and vice-versa:
From Object to String Json:
from marshmallow_dataclass import dataclass
user = User("Danilo","50","RedBull",15,OrderStatus.CREATED)
user_json = User.Schema().dumps(user)
user_json_str = user_json.data
From String Json to Object:
json_str = '{"name":"Danilo", "orderId":"50", "productName":"RedBull", "quantity":15, "status":"Created"}'
user, err = User.Schema().loads(json_str)
print(user,flush=True)
Class definitions:
class OrderStatus(Enum):
CREATED = 'Created'
PENDING = 'Pending'
CONFIRMED = 'Confirmed'
FAILED = 'Failed'
#dataclass
class User:
def __init__(self, name, orderId, productName, quantity, status):
self.name = name
self.orderId = orderId
self.productName = productName
self.quantity = quantity
self.status = status
name: str
orderId: str
productName: str
quantity: int
status: OrderStatus
Since no one provided an answer quite like mine, I am going to post it here.
It is a robust class that can easily convert back and forth between JSON str and dict that I have copied from my answer to another question:
import json
class PyJSON(object):
def __init__(self, d):
if type(d) is str:
d = json.loads(d)
self.from_dict(d)
def from_dict(self, d):
self.__dict__ = {}
for key, value in d.items():
if type(value) is dict:
value = PyJSON(value)
self.__dict__[key] = value
def to_dict(self):
d = {}
for key, value in self.__dict__.items():
if type(value) is PyJSON:
value = value.to_dict()
d[key] = value
return d
def __repr__(self):
return str(self.to_dict())
def __setitem__(self, key, value):
self.__dict__[key] = value
def __getitem__(self, key):
return self.__dict__[key]
json_str = """... JSON string ..."""
py_json = PyJSON(json_str)
I have written a small (de)serialization framework called any2any that helps doing complex transformations between two Python types.
In your case, I guess you want to transform from a dictionary (obtained with json.loads) to an complex object response.education ; response.name, with a nested structure response.education.id, etc ...
So that's exactly what this framework is made for. The documentation is not great yet, but by using any2any.simple.MappingToObject, you should be able to do that very easily. Please ask if you need help.
JSON to python object
The follwing code creates dynamic attributes with the objects keys recursively.
JSON object - fb_data.json:
{
"name": "John Smith",
"hometown": {
"name": "New York",
"id": 123
},
"list": [
"a",
"b",
"c",
1,
{
"key": 1
}
],
"object": {
"key": {
"key": 1
}
}
}
On the conversion we have 3 cases:
lists
dicts (new object)
bool, int, float and str
import json
class AppConfiguration(object):
def __init__(self, data=None):
if data is None:
with open("fb_data.json") as fh:
data = json.loads(fh.read())
else:
data = dict(data)
for key, val in data.items():
setattr(self, key, self.compute_attr_value(val))
def compute_attr_value(self, value):
if isinstance(value, list):
return [self.compute_attr_value(x) for x in value]
elif isinstance(value, dict):
return AppConfiguration(value)
else:
return value
if __name__ == "__main__":
instance = AppConfiguration()
print(instance.name)
print(instance.hometown.name)
print(instance.hometown.id)
print(instance.list[4].key)
print(instance.object.key.key)
Now the key, value pairs are attributes - objects.
output:
John Smith
New York
123
1
1
Paste JSON as Code
Supports TypeScript, Python, Go, Ruby, C#, Java, Swift, Rust, Kotlin, C++, Flow, Objective-C, JavaScript, Elm, and JSON Schema.
Interactively generate types and (de-)serialization code from JSON, JSON Schema, and TypeScript
Paste JSON/JSON Schema/TypeScript as code
quicktype infers types from sample JSON data, then outputs strongly typed models and serializers for working with that data in your desired programming language.
output:
# Generated by https://quicktype.io
#
# To change quicktype's target language, run command:
#
# "Set quicktype target language"
from typing import List, Union
class Hometown:
name: str
id: int
def __init__(self, name: str, id: int) -> None:
self.name = name
self.id = id
class Key:
key: int
def __init__(self, key: int) -> None:
self.key = key
class Object:
key: Key
def __init__(self, key: Key) -> None:
self.key = key
class FbData:
name: str
hometown: Hometown
list: List[Union[Key, int, str]]
object: Object
def __init__(self, name: str, hometown: Hometown, list: List[Union[Key, int, str]], object: Object) -> None:
self.name = name
self.hometown = hometown
self.list = list
self.object = object
This extension is available for free in the Visual Studio Code Marketplace.
The lightest solution I think is
import orjson # faster then json =)
from typing import NamedTuple
_j = '{"name":"Иван","age":37,"mother":{"name":"Ольга","age":58},"children":["Маша","Игорь","Таня"],"married": true,' \
'"dog":null} '
class PersonNameAge(NamedTuple):
name: str
age: int
class UserInfo(NamedTuple):
name: str
age: int
mother: PersonNameAge
children: list
married: bool
dog: str
j = orjson.loads(_j)
u = UserInfo(**j)
print(u.name, u.age, u.mother, u.children, u.married, u.dog)
>>> Ivan 37 {'name': 'Olga', 'age': 58} ['Mary', 'Igor', 'Jane'] True None
Expanding on DS's answer a bit, if you need the object to be mutable (which namedtuple is not), you can use the recordclass library instead of namedtuple:
import json
from recordclass import recordclass
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse into a mutable object
x = json.loads(data, object_hook=lambda d: recordclass('X', d.keys())(*d.values()))
The modified object can then be converted back to json very easily using simplejson:
x.name = "John Doe"
new_json = simplejson.dumps(x)
dataclass-wizard is a modern option that can similarly work for you. It supports auto key casing transforms, such as camelCase or TitleCase, both of which is quite common in API responses.
The default key transform when dumping instance to a dict/JSON is camelCase, but this can be easily overriden using a Meta config supplied on the main dataclass.
https://pypi.org/project/dataclass-wizard/
from dataclasses import dataclass
from dataclass_wizard import fromdict, asdict
#dataclass
class User:
name: str
age: int
is_active: bool
data = {
'name': 'John',
'age': 30,
'isActive': True,
}
user = fromdict(User, data)
assert user == User(name='John', age=30, is_active=True)
json_dict = asdict(user)
assert json_dict == {'name': 'John', 'age': 30, 'isActive': True}
Example of setting a Meta config, which converts fields to lisp-case when serializing to dict/JSON:
DumpMeta(key_transform='LISP').bind_to(User)
While searching for a solution, I've stumbled upon this blog post: https://blog.mosthege.net/2016/11/12/json-deserialization-of-nested-objects/
It uses the same technique as stated in previous answers but with a usage of decorators.
Another thing I found useful is the fact that it returns a typed object at the end of deserialisation
class JsonConvert(object):
class_mappings = {}
#classmethod
def class_mapper(cls, d):
for keys, cls in clsself.mappings.items():
if keys.issuperset(d.keys()): # are all required arguments present?
return cls(**d)
else:
# Raise exception instead of silently returning None
raise ValueError('Unable to find a matching class for object: {!s}'.format(d))
#classmethod
def complex_handler(cls, Obj):
if hasattr(Obj, '__dict__'):
return Obj.__dict__
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(Obj), repr(Obj)))
#classmethod
def register(cls, claz):
clsself.mappings[frozenset(tuple([attr for attr,val in cls().__dict__.items()]))] = cls
return cls
#classmethod
def to_json(cls, obj):
return json.dumps(obj.__dict__, default=cls.complex_handler, indent=4)
#classmethod
def from_json(cls, json_str):
return json.loads(json_str, object_hook=cls.class_mapper)
Usage:
#JsonConvert.register
class Employee(object):
def __init__(self, Name:int=None, Age:int=None):
self.Name = Name
self.Age = Age
return
#JsonConvert.register
class Company(object):
def __init__(self, Name:str="", Employees:[Employee]=None):
self.Name = Name
self.Employees = [] if Employees is None else Employees
return
company = Company("Contonso")
company.Employees.append(Employee("Werner", 38))
company.Employees.append(Employee("Mary"))
as_json = JsonConvert.to_json(company)
from_json = JsonConvert.from_json(as_json)
as_json_from_json = JsonConvert.to_json(from_json)
assert(as_json_from_json == as_json)
print(as_json_from_json)
Modifying #DS response a bit, to load from a file:
def _json_object_hook(d): return namedtuple('X', d.keys())(*d.values())
def load_data(file_name):
with open(file_name, 'r') as file_data:
return file_data.read().replace('\n', '')
def json2obj(file_name): return json.loads(load_data(file_name), object_hook=_json_object_hook)
One thing: this cannot load items with numbers ahead. Like this:
{
"1_first_item": {
"A": "1",
"B": "2"
}
}
Because "1_first_item" is not a valid python field name.
The answers given here does not return the correct object type, hence I created these methods below. They also fail if you try to add more fields to the class that does not exist in the given JSON:
def dict_to_class(class_name: Any, dictionary: dict) -> Any:
instance = class_name()
for key in dictionary.keys():
setattr(instance, key, dictionary[key])
return instance
def json_to_class(class_name: Any, json_string: str) -> Any:
dict_object = json.loads(json_string)
return dict_to_class(class_name, dict_object)
There are multiple viable answers already, but there are some minor libraries made by individuals that can do the trick for most users.
An example would be json2object. Given a defined class, it deserialises json data to your custom model, including custom attributes and child objects.
Its use is very simple. An example from the library wiki:
from json2object import jsontoobject as jo
class Student:
def __init__(self):
self.firstName = None
self.lastName = None
self.courses = [Course('')]
class Course:
def __init__(self, name):
self.name = name
data = '''{
"firstName": "James",
"lastName": "Bond",
"courses": [{
"name": "Fighting"},
{
"name": "Shooting"}
]
}
'''
model = Student()
result = jo.deserialize(data, model)
print(result.courses[0].name)
class SimpleClass:
def __init__(self, **kwargs):
for k, v in kwargs.items():
if type(v) is dict:
setattr(self, k, SimpleClass(**v))
else:
setattr(self, k, v)
json_dict = {'name': 'jane doe', 'username': 'jane', 'test': {'foo': 1}}
class_instance = SimpleClass(**json_dict)
print(class_instance.name, class_instance.test.foo)
print(vars(class_instance))
If you are looking for type safe deserialization of JSON or any complex dict into a python class I would highly recommend pydantic for Python 3.7+. Not only does it has a succinct API (does not require writing 'helper' boilerplate), can integrate with Python dataclasses but has static and runtime type validation of complex and nested data structures.
Example usage:
from pydantic import BaseModel
from datetime import datetime
class Item(BaseModel):
field1: str | int # union
field2: int | None = None # optional
field3: str = 'default' # default values
class User(BaseModel):
name: str | None = None
username: str
created: datetime # default type converters
items: list[Item] = [] # nested complex types
data = {
'name': 'Jane Doe',
'username': 'user1',
'created': '2020-12-31T23:59:00+10:00',
'items': [
{'field1': 1, 'field2': 2},
{'field1': 'b'},
{'field1': 'c', 'field3': 'override'}
]
}
user: User = User(**data)
For more details and features, check out pydantic's rational section in their documentation.
If you're using Python 3.6 or newer, you could have a look at squema - a lightweight module for statically typed data structures. It makes your code easy to read while at the same time providing simple data validation, conversion and serialization without extra work. You can think of it as a more sophisticated and opinionated alternative to namedtuples and dataclasses. Here's how you could use it:
from uuid import UUID
from squema import Squema
class FbApiUser(Squema):
id: UUID
age: int
name: str
def save(self):
pass
user = FbApiUser(**json.loads(response))
user.save()
You can use
x = Map(json.loads(response))
x.__class__ = MyClass
where
class Map(dict):
def __init__(self, *args, **kwargs):
super(Map, self).__init__(*args, **kwargs)
for arg in args:
if isinstance(arg, dict):
for k, v in arg.iteritems():
self[k] = v
if isinstance(v, dict):
self[k] = Map(v)
if kwargs:
# for python 3 use kwargs.items()
for k, v in kwargs.iteritems():
self[k] = v
if isinstance(v, dict):
self[k] = Map(v)
def __getattr__(self, attr):
return self.get(attr)
def __setattr__(self, key, value):
self.__setitem__(key, value)
def __setitem__(self, key, value):
super(Map, self).__setitem__(key, value)
self.__dict__.update({key: value})
def __delattr__(self, item):
self.__delitem__(item)
def __delitem__(self, key):
super(Map, self).__delitem__(key)
del self.__dict__[key]
For a generic, future-proof solution.
I was searching for a solution that worked with recordclass.RecordClass, supports nested objects and works for both json serialization and json deserialization.
Expanding on DS's answer, and expanding on solution from BeneStr, I came up with the following that seems to work:
Code:
import json
import recordclass
class NestedRec(recordclass.RecordClass):
a : int = 0
b : int = 0
class ExampleRec(recordclass.RecordClass):
x : int = None
y : int = None
nested : NestedRec = NestedRec()
class JsonSerializer:
#staticmethod
def dumps(obj, ensure_ascii=True, indent=None, sort_keys=False):
return json.dumps(obj, default=JsonSerializer.__obj_to_dict, ensure_ascii=ensure_ascii, indent=indent, sort_keys=sort_keys)
#staticmethod
def loads(s, klass):
return JsonSerializer.__dict_to_obj(klass, json.loads(s))
#staticmethod
def __obj_to_dict(obj):
if hasattr(obj, "_asdict"):
return obj._asdict()
else:
return json.JSONEncoder().default(obj)
#staticmethod
def __dict_to_obj(klass, s_dict):
kwargs = {
key : JsonSerializer.__dict_to_obj(cls, s_dict[key]) if hasattr(cls,'_asdict') else s_dict[key] \
for key,cls in klass.__annotations__.items() \
if s_dict is not None and key in s_dict
}
return klass(**kwargs)
Usage:
example_0 = ExampleRec(x = 10, y = 20, nested = NestedRec( a = 30, b = 40 ) )
#Serialize to JSON
json_str = JsonSerializer.dumps(example_0)
print(json_str)
#{
# "x": 10,
# "y": 20,
# "nested": {
# "a": 30,
# "b": 40
# }
#}
# Deserialize from JSON
example_1 = JsonSerializer.loads(json_str, ExampleRec)
example_1.x += 1
example_1.y += 1
example_1.nested.a += 1
example_1.nested.b += 1
json_str = JsonSerializer.dumps(example_1)
print(json_str)
#{
# "x": 11,
# "y": 21,
# "nested": {
# "a": 31,
# "b": 41
# }
#}
def load_model_from_dict(self, data: dict):
for key, value in data.items():
self.__dict__[key] = value
return self
It help returns your own model, with unforeseenable variables from the dict.
So I was hunting for a way to unmarshal any arbitrary type (think dict of dataclass, or dict of a dict of an array of dataclass) without a ton of custom deserialization code.
This is my approach:
import json
from dataclasses import dataclass, make_dataclass
from dataclasses_json import DataClassJsonMixin, dataclass_json
#dataclass_json
#dataclass
class Person:
name: str
def unmarshal_json(data, t):
Unmarhsal = make_dataclass('Unmarhsal', [('res', t)],
bases=(DataClassJsonMixin,))
d = json.loads(data)
out = Unmarhsal.from_dict({"res": d})
return out.res
unmarshalled = unmarshal_json('{"1": {"name": "john"} }', dict[str, Person])
print(unmarshalled)
Prints: {'1': Person(name='john')}
This appears to be an XY problem (asking A where the actual problem is B).
The root of the issue is: How to effectively reference/modify deep-nested JSON structures without having to do obj['foo']['bar'][42]['quux'], which poses a typing challenge, a code-bloat issue, a readability issue and an error-trapping issue?
Use glom
from glom import glom
# Basic deep get
data = {'a': {'b': {'c': 'd'}}}
print(glom(data, 'a.b.c'))
It will handle list items also: glom(data, 'a.b.c.42.d')
I've benchmarked it against a naive implementation:
def extract(J, levels):
# Twice as fast as using glom
for level in levels.split('.'):
J = J[int(level) if level.isnumeric() else level]
return J
... and it returns 0.14ms on a complex JSON object, compared with 0.06ms for the naive impl.
It can also handle comlex queries, e.g. pulling out all foo.bar.records where .name == 'Joe Bloggs'
EDIT:
Another performant approach is to recursively use a class that overrides __getitem__ and __getattr__:
class Ob:
def __init__(self, J):
self.J = J
def __getitem__(self, index):
return Ob(self.J[index])
def __getattr__(self, attr):
value = self.J.get(attr, None)
return Ob(value) if type(value) in (list, dict) else value
Now you can do:
ob = Ob(J)
# if you're fetching a final raw value (not list/dict
ob.foo.bar[42].quux.leaf
# for intermediate values
ob.foo.bar[42].quux.J
This also benchmarks surprisingly well. Comparable with my previous naive impl. If anyone can spot a way to tidy up access for non-leaf queries, leave a comment!
Python3.x
The best aproach I could reach with my knowledge was this.
Note that this code treat set() too.
This approach is generic just needing the extension of class (in the second example).
Note that I'm just doing it to files, but it's easy to modify the behavior to your taste.
However this is a CoDec.
With a little more work you can construct your class in other ways.
I assume a default constructor to instance it, then I update the class dict.
import json
import collections
class JsonClassSerializable(json.JSONEncoder):
REGISTERED_CLASS = {}
def register(ctype):
JsonClassSerializable.REGISTERED_CLASS[ctype.__name__] = ctype
def default(self, obj):
if isinstance(obj, collections.Set):
return dict(_set_object=list(obj))
if isinstance(obj, JsonClassSerializable):
jclass = {}
jclass["name"] = type(obj).__name__
jclass["dict"] = obj.__dict__
return dict(_class_object=jclass)
else:
return json.JSONEncoder.default(self, obj)
def json_to_class(self, dct):
if '_set_object' in dct:
return set(dct['_set_object'])
elif '_class_object' in dct:
cclass = dct['_class_object']
cclass_name = cclass["name"]
if cclass_name not in self.REGISTERED_CLASS:
raise RuntimeError(
"Class {} not registered in JSON Parser"
.format(cclass["name"])
)
instance = self.REGISTERED_CLASS[cclass_name]()
instance.__dict__ = cclass["dict"]
return instance
return dct
def encode_(self, file):
with open(file, 'w') as outfile:
json.dump(
self.__dict__, outfile,
cls=JsonClassSerializable,
indent=4,
sort_keys=True
)
def decode_(self, file):
try:
with open(file, 'r') as infile:
self.__dict__ = json.load(
infile,
object_hook=self.json_to_class
)
except FileNotFoundError:
print("Persistence load failed "
"'{}' do not exists".format(file)
)
class C(JsonClassSerializable):
def __init__(self):
self.mill = "s"
JsonClassSerializable.register(C)
class B(JsonClassSerializable):
def __init__(self):
self.a = 1230
self.c = C()
JsonClassSerializable.register(B)
class A(JsonClassSerializable):
def __init__(self):
self.a = 1
self.b = {1, 2}
self.c = B()
JsonClassSerializable.register(A)
A().encode_("test")
b = A()
b.decode_("test")
print(b.a)
print(b.b)
print(b.c.a)
Edit
With some more of research I found a way to generalize without the need of the SUPERCLASS register method call, using a metaclass
import json
import collections
REGISTERED_CLASS = {}
class MetaSerializable(type):
def __call__(cls, *args, **kwargs):
if cls.__name__ not in REGISTERED_CLASS:
REGISTERED_CLASS[cls.__name__] = cls
return super(MetaSerializable, cls).__call__(*args, **kwargs)
class JsonClassSerializable(json.JSONEncoder, metaclass=MetaSerializable):
def default(self, obj):
if isinstance(obj, collections.Set):
return dict(_set_object=list(obj))
if isinstance(obj, JsonClassSerializable):
jclass = {}
jclass["name"] = type(obj).__name__
jclass["dict"] = obj.__dict__
return dict(_class_object=jclass)
else:
return json.JSONEncoder.default(self, obj)
def json_to_class(self, dct):
if '_set_object' in dct:
return set(dct['_set_object'])
elif '_class_object' in dct:
cclass = dct['_class_object']
cclass_name = cclass["name"]
if cclass_name not in REGISTERED_CLASS:
raise RuntimeError(
"Class {} not registered in JSON Parser"
.format(cclass["name"])
)
instance = REGISTERED_CLASS[cclass_name]()
instance.__dict__ = cclass["dict"]
return instance
return dct
def encode_(self, file):
with open(file, 'w') as outfile:
json.dump(
self.__dict__, outfile,
cls=JsonClassSerializable,
indent=4,
sort_keys=True
)
def decode_(self, file):
try:
with open(file, 'r') as infile:
self.__dict__ = json.load(
infile,
object_hook=self.json_to_class
)
except FileNotFoundError:
print("Persistence load failed "
"'{}' do not exists".format(file)
)
class C(JsonClassSerializable):
def __init__(self):
self.mill = "s"
class B(JsonClassSerializable):
def __init__(self):
self.a = 1230
self.c = C()
class A(JsonClassSerializable):
def __init__(self):
self.a = 1
self.b = {1, 2}
self.c = B()
A().encode_("test")
b = A()
b.decode_("test")
print(b.a)
# 1
print(b.b)
# {1, 2}
print(b.c.a)
# 1230
print(b.c.c.mill)
# s
this is not a very difficult thing, i saw the answers above, most of them had a performance problem in the "list"
this code is much faster than the above
import json
class jsonify:
def __init__(self, data):
self.jsonify = data
def __getattr__(self, attr):
value = self.jsonify.get(attr)
if isinstance(value, (list, dict)):
return jsonify(value)
return value
def __getitem__(self, index):
value = self.jsonify[index]
if isinstance(value, (list, dict)):
return jsonify(value)
return value
def __setitem__(self, index, value):
self.jsonify[index] = value
def __delattr__(self, index):
self.jsonify.pop(index)
def __delitem__(self, index):
self.jsonify.pop(index)
def __repr__(self):
return json.dumps(self.jsonify, indent=2, default=lambda x: str(x))
exmaple
response = jsonify(
{
'test': {
'test1': [{'ok': 1}]
}
}
)
response.test -> jsonify({'test1': [{'ok': 1}]})
response.test.test1 -> jsonify([{'ok': 1}])
response.test.test1[0] -> jsonify({'ok': 1})
response.test.test1[0].ok -> int(1)