JSON serialization of dictionary with complex objects - python

I am trying to serialize the dictionary playersElo for saving/loading it as/from JSON.
But as it's not a serializable object and I can't find a way to do it.
playersElo={} # dictionary of {<int> : <PlayerElo>}
playersElo[1] = PlayerElo()
playersElo[2] = PlayerElo()
...
class PlayerElo:
"""
A class to represent a player in the Elo Rating System
"""
def __init__(self, name: str, id: str, rating):
self.id = id
self.name = name
# comment the 2 lines below in order to start with a rating associated
# to current player rank
self.eloratings = {0: 1500}
self.elomatches = {0: 0}
self.initialrating = rating

Maybe this can be a starting spot for you. The serializer grabs the __dict__ attribute from the object and makes a new dict-of-dicts, then writes it to JSON. The deserializer creates a dummy object, then updates the __dict__ on the way in.
import json
class PlayerElo:
"""
A class to represent a player in the Elo Rating System
"""
def __init__(self, name: str, id: str, rating):
self.id = id
self.name = name
self.eloratings = {0: 1500}
self.elomatches = {0: 0}
self.initialrating = rating
playersElo={} # dictionary of {<int> : <PlayerElo>}
playersElo[1] = PlayerElo('Joe','123',999)
playersElo[2] = PlayerElo('Bill','456',1999)
def serialize(ratings):
newdict = {i:j.__dict__ for i,j in ratings.items()}
json.dump( newdict, open('x.json','w') )
def deserialize():
o = json.load(open('x.json'))
pe = {}
for k,v in o.items():
obj = PlayerElo('0','0',0)
obj.__dict__.update( v )
pe[int(k)] = obj
return pe
print(playersElo)
serialize( playersElo )
pe = deserialize( )
print(pe)

You can extend the json.JSONEncoder to handle instances of your class. There are examples in the module's documentation, but here one why of doing it with your PlayerElo class.
Note: Also see my answer to the question Making object JSON serializable with regular encoder for a more generic way of doing it.
import json
class PlayerElo:
""" A class to represent a player in the Elo Rating System. """
def __init__(self, name: str, id: str, rating):
self.id = id
self.name = name
# comment the 2 lines below in order to start with a rating associated
# to current player rank
self.eloratings = {0: 1500}
self.elomatches = {0: 0}
self.initialrating = rating
class MyJSONEcoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, PlayerElo):
return dict(type='PlayerElo', name=obj.name, id = obj.id,
rating=obj.initialrating)
return super().default(obj)
playersElo={} # dictionary of {<int> : <PlayerElo>}
playersElo[1] = PlayerElo('Bob', 'thx1138', 4)
playersElo[2] = PlayerElo('Sue', 'p-138', 3)
from pprint import pprint
my_encoder = MyJSONEcoder()
pprint(my_encoder.encode(playersElo))
Here the JSON string it generated and printed:
('{"1": {"type": "PlayerElo", "name": "Bob", "id": "thx1138", "rating": 4}, '
'"2": {"type": "PlayerElo", "name": "Sue", "id": "p-138", "rating": 3}}')

Related

Is there a way to turn a many to many object relationship into a dictionary?

I have two classes
Club with property id and name
Student with property id, name and clubs (list)
One student can be part of multiple clubs.
One club can have multiple students
class Club:
def __init__(self, id, name):
self.id = id
self.name = name
class Student:
def __init__(self, id, name, clubs):
self.id = id
self.name = name
self.clubs = clubs
I want to have a dictionary where the key is club name and value is list of students.
I have around 30 different many to many relationships in the application.
Is there a way to do it in a generic way?
A crude inverter could be like below.
def reverse(elems, array_name, i_key):
dic = {}
for elem in elems:
for elem_field in elem[array_name]:
key = elem_field[i_key]
if key not in dic:
dic[key] = []
dic[key].append(elem)
return dic
club_a = { 'name': 'a'}
club_b = { 'name': 'b'}
club_c = { 'name': 'c'}
stud_a = { 'id': 1, 'clubs': [club_a, club_b]}
stud_b = { 'id': 2, 'clubs': [club_b, club_c]}
print(reverse([stud_a, stud_b], 'clubs', 'name'))
# a: stud_a
# b: stud_a, stud_b
# c: stud_b

How to access external object attributes from the items in a list in Python?

I have a City object that has some attributes, including a list of houses:
city1 = {
'name': "Tokio",
'houses': [
{
'code': 1,
'residents': 4
},
{
'code': 2,
'residents': 2
}
]
}
My problem is that, within each object in the list of houses I need to have a method mount_description(self) that needs to access the name attribute of the external object City where this list of houses is inside:
class House():
code = None
residents = 0
def mount_description(self):
# get 'name' attribute of the external object
class City():
name = None
houses = list()
I know the correct thing would be for the City object to send the name to the items in the list of houses, but unfortunately due to many details I cannot do this. I really need each Home object to be able to access the external object where it is located.
Does anyone know how to do this? Or if this is possible?
Give to the house a reference to its host city
class House():
code = None
residents = 0
city = None
def __init__(self, city, code, residents):
self.city = city
self.code = code
self.residents = residents
#Do what you want here
def mount_description(self):
print(self.city.name)
class City():
name = None
houses = list()
def __init__(self, json):
for house in json['houses']:
self.houses.append(House(self, house['code'], house['residents']))
self.name = json['name']
def set_name(self, newname):
self.name = newname
def print_name(self):
print(self.name)
Then you can do
city1_json = {
'name': "Tokio",
'houses': [
{
'code': 1,
'residents': 4
},
{
'code': 2,
'residents': 2
}
]
}
city = City(city1_json)
city.print_name()
house1 = city.houses[0]
house1.mount_description()
>>> Tokio
>>> Tokio
city.set_name("Paris")
city.print_name()
house1.mount_description()
>>> Paris
>>> Paris
I think you should need to build something like a most simple tree. For City, you have a list for houses, or children. Each house doesn't know where is located, so need a parent for the house, or city.
class House():
def __init__(self, code=None, residents=0):
self.code = code
self.residents = residents
self.parent = None
#property
def city(self):
# get 'name' attribute of the external object
if self.parent:
return self.parent.name
else:
return None
class City():
def __init__(self, name):
self.name = name
self.houses = list()
def add_house(self, house):
self.houses.append(house)
house.parent = self
def add_houses(self, houses):
self.houses += houses
for house in houses:
house.parent = self
city_1 = City('Tokyo')
house_1 = House(code=1, residents=4)
house_2 = House(code=2, residents=2)
city_1.add_house(house_1)
city_1.add_house(house_2)
city_2 = City('Chicago')
house_3 = House(code=1, residents=3)
house_4 = House(code=2, residents=5)
city_2.add_houses([house_3, house_4])
>>> house_1.city, house_3.city
('Tokyo', 'Chicago')

pynamodb create Model based off existing Class object

I am using pynamodb in a Python Flask project and am starting to build my model(s) to define the objects that will be used with tables.
The documentation says you define a model as follows:
from pynamodb.models import Model
from pynamodb.attributes import UnicodeAttribute
class UserModel(Model):
"""
A DynamoDB User
"""
class Meta:
table_name = "dynamodb-user"
email = UnicodeAttribute(null=True)
first_name = UnicodeAttribute(range_key=True)
last_name = UnicodeAttribute(hash_key=True)
However I already have an existing class in another module, that I've already defined, see below:
class ActivityTask:
def __init__(self,task_name, sequence_order):
self.taskid = uuid.uuid4()
self.taskcreated = datetime.datetime.now()
self.taskname = task_name
self.sequenceorder = sequence_order
Is there a way for me to somehow "port" my existing ActivityTask class object so that I can use it as a model? As it already matches the schema for the DynamoDB table in question.
Here is a class I made to automatically generate a Pynamodb Model from a Marshmellow class :
class PynamodbModel:
def __init__(self, base_object):
self.base_object = base_object
attributes = self.make_attributes(self.base_object.schema)
meta_attr = {"table_name" : self.base_object.__name__}
attributes['Meta'] = type("Meta", (), meta_attr)
self.table : Model = type("Table", (Model,), attributes)
def convert_attr(self, attr):
if type(attr) == Nested:
atttr = attr.inner.nested
def make_attributes(self, schema_obj):
attributes = {}
for name, elem in schema_obj._declared_fields.items():
if name == 'id':
attributes[name] = attibute_conversion[type(elem)](hash_key=True)
elif type(elem) == List:
if type(elem.inner) == Nested:
attributes[name] = attibute_conversion[type(elem)](of=self.make_nested_attr(elem.inner.nested))
else:
attributes[name] = attibute_conversion[type(elem)]()
elif type(elem) == Nested:
attributes[name] = self.make_nested_attr(elem.nested)
else:
attributes[name] = attibute_conversion[type(elem)]()
return attributes
def make_nested_attr(self, nested_schema):
attributes = self.make_attributes(nested_schema)
return type(nested_schema.__class__.__name__, (MapAttribute,), attributes)
Go here to see the full example !
I basically just iterate over the Marshmellow Schema attributes and assign the correspondant Pynamodb attributes. Hope it help !

Insert Nested Objects into mongodb in python3 (Nested object is a class Instance)

I have a nested structure like this:
class Student(object):
def __init__(self, name, age, address):
self.name = name
self.age = age
self.address = address
class Address(object):
def __init__(self, street, pcode, another_obj):
self.street = street
self.postal_code = pcode
self.another_obj = another_obj
class AnotherObject(object):
def __init__(self):
self.someattr = 'some_init_value'
# Now i amm going to save data like this
another_obj = AnotherObject()
address = Address('xyz', 'xyz', another_obj)
obj = Student('abc', 32, address)
The object obj is an instance of a class. I am doing collection.insert_one(obj). Usually, I would do obj.dict to get "dict" of class instance which is compatible with pymongo but it does not convert the nested objects into dict too.
The problem here is that the "address" and "some_other_object" are also some other class instances and that causes bson.errors.InvalidDocument Exception while insertion.
Is there a way to convert nested class instances/documents (address & some_other_object) into dict or any other type acceptable to mongodb.
The package I am using for mongodb communication is pymongo v3.9.0.
Error is TypeError:
document must be an instance of dict, bson.son.SON,
bson.raw_bson.RawBSONDocument, or a type that inherits from
collections.MutableMapping
Add in some to_dict() methods to your classes:
from pymongo import MongoClient
class Student(object):
def __init__(self, name, age, address):
self.name = name
self.age = age
self.address = address
def to_dict(self) -> dict:
record = {
"name": self.name,
"age": self.age,
"address": self.address.to_dict() if self.address is not None else None
}
return record
class Address(object):
def __init__(self, street, pcode, another_obj):
self.street = street
self.postal_code = pcode
self.another_obj = another_obj
def to_dict(self) -> dict:
record = {
"street": self.street,
"postal_code": self.postal_code,
"another_obj": self.another_obj.to_dict() if self.another_obj is not None else None
}
return record
class AnotherObject(object):
def __init__(self):
self.someattr = 'some_init_value'
def to_dict(self) -> dict:
record = {
"someattr": self.someattr
}
return record
# Now i amm going to save data like this
another_obj = AnotherObject()
address = Address('xyz', 'xyz', another_obj)
obj = Student('abc', 32, address)
db = MongoClient()['yourdatabase']
db.collection.insert_one(obj.to_dict())
print(db.collection.find_one({}))

How to convert JSON data into a Python object?

I want to convert JSON data into a Python object.
I receive JSON data objects from the Facebook API, which I want to store in my database.
My current View in Django (Python) (request.POST contains the JSON):
response = request.POST
user = FbApiUser(user_id = response['id'])
user.name = response['name']
user.username = response['username']
user.save()
This works fine, but how do I handle complex JSON data objects?
Wouldn't it be much better if I could somehow convert this JSON object into a Python object for easy use?
UPDATE
With Python3, you can do it in one line, using SimpleNamespace and object_hook:
import json
from types import SimpleNamespace
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse JSON into an object with attributes corresponding to dict keys.
x = json.loads(data, object_hook=lambda d: SimpleNamespace(**d))
print(x.name, x.hometown.name, x.hometown.id)
OLD ANSWER (Python2)
In Python2, you can do it in one line, using namedtuple and object_hook (but it's very slow with many nested objects):
import json
from collections import namedtuple
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse JSON into an object with attributes corresponding to dict keys.
x = json.loads(data, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
print x.name, x.hometown.name, x.hometown.id
or, to reuse this easily:
def _json_object_hook(d): return namedtuple('X', d.keys())(*d.values())
def json2obj(data): return json.loads(data, object_hook=_json_object_hook)
x = json2obj(data)
If you want it to handle keys that aren't good attribute names, check out namedtuple's rename parameter.
You could try this:
class User(object):
def __init__(self, name, username):
self.name = name
self.username = username
import json
j = json.loads(your_json)
u = User(**j)
Just create a new object, and pass the parameters as a map.
You can have a JSON with objects too:
import json
class Address(object):
def __init__(self, street, number):
self.street = street
self.number = number
def __str__(self):
return "{0} {1}".format(self.street, self.number)
class User(object):
def __init__(self, name, address):
self.name = name
self.address = Address(**address)
def __str__(self):
return "{0} ,{1}".format(self.name, self.address)
if __name__ == '__main__':
js = '''{"name":"Cristian", "address":{"street":"Sesame","number":122}}'''
j = json.loads(js)
print(j)
u = User(**j)
print(u)
Check out the section titled Specializing JSON object decoding in the json module documentation. You can use that to decode a JSON object into a specific Python type.
Here's an example:
class User(object):
def __init__(self, name, username):
self.name = name
self.username = username
import json
def object_decoder(obj):
if '__type__' in obj and obj['__type__'] == 'User':
return User(obj['name'], obj['username'])
return obj
json.loads('{"__type__": "User", "name": "John Smith", "username": "jsmith"}',
object_hook=object_decoder)
print type(User) # -> <type 'type'>
Update
If you want to access data in a dictionary via the json module do this:
user = json.loads('{"__type__": "User", "name": "John Smith", "username": "jsmith"}')
print user['name']
print user['username']
Just like a regular dictionary.
This is not code golf, but here is my shortest trick, using types.SimpleNamespace as the container for JSON objects.
Compared to the leading namedtuple solution, it is:
probably faster/smaller as it does not create a class for each object
shorter
no rename option, and probably the same limitation on keys that are not valid identifiers (uses setattr under the covers)
Example:
from __future__ import print_function
import json
try:
from types import SimpleNamespace as Namespace
except ImportError:
# Python 2.x fallback
from argparse import Namespace
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
x = json.loads(data, object_hook=lambda d: Namespace(**d))
print (x.name, x.hometown.name, x.hometown.id)
Here's a quick and dirty json pickle alternative
import json
class User:
def __init__(self, name, username):
self.name = name
self.username = username
def to_json(self):
return json.dumps(self.__dict__)
#classmethod
def from_json(cls, json_str):
json_dict = json.loads(json_str)
return cls(**json_dict)
# example usage
User("tbrown", "Tom Brown").to_json()
User.from_json(User("tbrown", "Tom Brown").to_json()).to_json()
For complex objects, you can use JSON Pickle
Python library for serializing any arbitrary object graph into JSON.
It can take almost any Python object and turn the object into JSON.
Additionally, it can reconstitute the object back into Python.
If you're using Python 3.5+, you can use jsons to serialize and deserialize to plain old Python objects:
import jsons
response = request.POST
# You'll need your class attributes to match your dict keys, so in your case do:
response['id'] = response.pop('user_id')
# Then you can load that dict into your class:
user = jsons.load(response, FbApiUser)
user.save()
You could also make FbApiUser inherit from jsons.JsonSerializable for more elegance:
user = FbApiUser.from_json(response)
These examples will work if your class consists of Python default types, like strings, integers, lists, datetimes, etc. The jsons lib will require type hints for custom types though.
If you are using python 3.6+, you can use marshmallow-dataclass. Contrarily to all the solutions listed above, it is both simple, and type safe:
from marshmallow_dataclass import dataclass
#dataclass
class User:
name: str
user = User.Schema().load({"name": "Ramirez"})
dacite may also be a solution for you, it supports following features:
nested structures
(basic) types checking
optional fields (i.e. typing.Optional)
unions
forward references
collections
custom type hooks
https://pypi.org/project/dacite/
from dataclasses import dataclass
from dacite import from_dict
#dataclass
class User:
name: str
age: int
is_active: bool
data = {
'name': 'John',
'age': 30,
'is_active': True,
}
user = from_dict(data_class=User, data=data)
assert user == User(name='John', age=30, is_active=True)
Improving the lovasoa's very good answer.
If you are using python 3.6+, you can use:
pip install marshmallow-enum and
pip install marshmallow-dataclass
Its simple and type safe.
You can transform your class in a string-json and vice-versa:
From Object to String Json:
from marshmallow_dataclass import dataclass
user = User("Danilo","50","RedBull",15,OrderStatus.CREATED)
user_json = User.Schema().dumps(user)
user_json_str = user_json.data
From String Json to Object:
json_str = '{"name":"Danilo", "orderId":"50", "productName":"RedBull", "quantity":15, "status":"Created"}'
user, err = User.Schema().loads(json_str)
print(user,flush=True)
Class definitions:
class OrderStatus(Enum):
CREATED = 'Created'
PENDING = 'Pending'
CONFIRMED = 'Confirmed'
FAILED = 'Failed'
#dataclass
class User:
def __init__(self, name, orderId, productName, quantity, status):
self.name = name
self.orderId = orderId
self.productName = productName
self.quantity = quantity
self.status = status
name: str
orderId: str
productName: str
quantity: int
status: OrderStatus
Since no one provided an answer quite like mine, I am going to post it here.
It is a robust class that can easily convert back and forth between JSON str and dict that I have copied from my answer to another question:
import json
class PyJSON(object):
def __init__(self, d):
if type(d) is str:
d = json.loads(d)
self.from_dict(d)
def from_dict(self, d):
self.__dict__ = {}
for key, value in d.items():
if type(value) is dict:
value = PyJSON(value)
self.__dict__[key] = value
def to_dict(self):
d = {}
for key, value in self.__dict__.items():
if type(value) is PyJSON:
value = value.to_dict()
d[key] = value
return d
def __repr__(self):
return str(self.to_dict())
def __setitem__(self, key, value):
self.__dict__[key] = value
def __getitem__(self, key):
return self.__dict__[key]
json_str = """... JSON string ..."""
py_json = PyJSON(json_str)
I have written a small (de)serialization framework called any2any that helps doing complex transformations between two Python types.
In your case, I guess you want to transform from a dictionary (obtained with json.loads) to an complex object response.education ; response.name, with a nested structure response.education.id, etc ...
So that's exactly what this framework is made for. The documentation is not great yet, but by using any2any.simple.MappingToObject, you should be able to do that very easily. Please ask if you need help.
JSON to python object
The follwing code creates dynamic attributes with the objects keys recursively.
JSON object - fb_data.json:
{
"name": "John Smith",
"hometown": {
"name": "New York",
"id": 123
},
"list": [
"a",
"b",
"c",
1,
{
"key": 1
}
],
"object": {
"key": {
"key": 1
}
}
}
On the conversion we have 3 cases:
lists
dicts (new object)
bool, int, float and str
import json
class AppConfiguration(object):
def __init__(self, data=None):
if data is None:
with open("fb_data.json") as fh:
data = json.loads(fh.read())
else:
data = dict(data)
for key, val in data.items():
setattr(self, key, self.compute_attr_value(val))
def compute_attr_value(self, value):
if isinstance(value, list):
return [self.compute_attr_value(x) for x in value]
elif isinstance(value, dict):
return AppConfiguration(value)
else:
return value
if __name__ == "__main__":
instance = AppConfiguration()
print(instance.name)
print(instance.hometown.name)
print(instance.hometown.id)
print(instance.list[4].key)
print(instance.object.key.key)
Now the key, value pairs are attributes - objects.
output:
John Smith
New York
123
1
1
Paste JSON as Code
Supports TypeScript, Python, Go, Ruby, C#, Java, Swift, Rust, Kotlin, C++, Flow, Objective-C, JavaScript, Elm, and JSON Schema.
Interactively generate types and (de-)serialization code from JSON, JSON Schema, and TypeScript
Paste JSON/JSON Schema/TypeScript as code
quicktype infers types from sample JSON data, then outputs strongly typed models and serializers for working with that data in your desired programming language.
output:
# Generated by https://quicktype.io
#
# To change quicktype's target language, run command:
#
# "Set quicktype target language"
from typing import List, Union
class Hometown:
name: str
id: int
def __init__(self, name: str, id: int) -> None:
self.name = name
self.id = id
class Key:
key: int
def __init__(self, key: int) -> None:
self.key = key
class Object:
key: Key
def __init__(self, key: Key) -> None:
self.key = key
class FbData:
name: str
hometown: Hometown
list: List[Union[Key, int, str]]
object: Object
def __init__(self, name: str, hometown: Hometown, list: List[Union[Key, int, str]], object: Object) -> None:
self.name = name
self.hometown = hometown
self.list = list
self.object = object
This extension is available for free in the Visual Studio Code Marketplace.
The lightest solution I think is
import orjson # faster then json =)
from typing import NamedTuple
_j = '{"name":"Иван","age":37,"mother":{"name":"Ольга","age":58},"children":["Маша","Игорь","Таня"],"married": true,' \
'"dog":null} '
class PersonNameAge(NamedTuple):
name: str
age: int
class UserInfo(NamedTuple):
name: str
age: int
mother: PersonNameAge
children: list
married: bool
dog: str
j = orjson.loads(_j)
u = UserInfo(**j)
print(u.name, u.age, u.mother, u.children, u.married, u.dog)
>>> Ivan 37 {'name': 'Olga', 'age': 58} ['Mary', 'Igor', 'Jane'] True None
Expanding on DS's answer a bit, if you need the object to be mutable (which namedtuple is not), you can use the recordclass library instead of namedtuple:
import json
from recordclass import recordclass
data = '{"name": "John Smith", "hometown": {"name": "New York", "id": 123}}'
# Parse into a mutable object
x = json.loads(data, object_hook=lambda d: recordclass('X', d.keys())(*d.values()))
The modified object can then be converted back to json very easily using simplejson:
x.name = "John Doe"
new_json = simplejson.dumps(x)
dataclass-wizard is a modern option that can similarly work for you. It supports auto key casing transforms, such as camelCase or TitleCase, both of which is quite common in API responses.
The default key transform when dumping instance to a dict/JSON is camelCase, but this can be easily overriden using a Meta config supplied on the main dataclass.
https://pypi.org/project/dataclass-wizard/
from dataclasses import dataclass
from dataclass_wizard import fromdict, asdict
#dataclass
class User:
name: str
age: int
is_active: bool
data = {
'name': 'John',
'age': 30,
'isActive': True,
}
user = fromdict(User, data)
assert user == User(name='John', age=30, is_active=True)
json_dict = asdict(user)
assert json_dict == {'name': 'John', 'age': 30, 'isActive': True}
Example of setting a Meta config, which converts fields to lisp-case when serializing to dict/JSON:
DumpMeta(key_transform='LISP').bind_to(User)
While searching for a solution, I've stumbled upon this blog post: https://blog.mosthege.net/2016/11/12/json-deserialization-of-nested-objects/
It uses the same technique as stated in previous answers but with a usage of decorators.
Another thing I found useful is the fact that it returns a typed object at the end of deserialisation
class JsonConvert(object):
class_mappings = {}
#classmethod
def class_mapper(cls, d):
for keys, cls in clsself.mappings.items():
if keys.issuperset(d.keys()): # are all required arguments present?
return cls(**d)
else:
# Raise exception instead of silently returning None
raise ValueError('Unable to find a matching class for object: {!s}'.format(d))
#classmethod
def complex_handler(cls, Obj):
if hasattr(Obj, '__dict__'):
return Obj.__dict__
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(Obj), repr(Obj)))
#classmethod
def register(cls, claz):
clsself.mappings[frozenset(tuple([attr for attr,val in cls().__dict__.items()]))] = cls
return cls
#classmethod
def to_json(cls, obj):
return json.dumps(obj.__dict__, default=cls.complex_handler, indent=4)
#classmethod
def from_json(cls, json_str):
return json.loads(json_str, object_hook=cls.class_mapper)
Usage:
#JsonConvert.register
class Employee(object):
def __init__(self, Name:int=None, Age:int=None):
self.Name = Name
self.Age = Age
return
#JsonConvert.register
class Company(object):
def __init__(self, Name:str="", Employees:[Employee]=None):
self.Name = Name
self.Employees = [] if Employees is None else Employees
return
company = Company("Contonso")
company.Employees.append(Employee("Werner", 38))
company.Employees.append(Employee("Mary"))
as_json = JsonConvert.to_json(company)
from_json = JsonConvert.from_json(as_json)
as_json_from_json = JsonConvert.to_json(from_json)
assert(as_json_from_json == as_json)
print(as_json_from_json)
Modifying #DS response a bit, to load from a file:
def _json_object_hook(d): return namedtuple('X', d.keys())(*d.values())
def load_data(file_name):
with open(file_name, 'r') as file_data:
return file_data.read().replace('\n', '')
def json2obj(file_name): return json.loads(load_data(file_name), object_hook=_json_object_hook)
One thing: this cannot load items with numbers ahead. Like this:
{
"1_first_item": {
"A": "1",
"B": "2"
}
}
Because "1_first_item" is not a valid python field name.
The answers given here does not return the correct object type, hence I created these methods below. They also fail if you try to add more fields to the class that does not exist in the given JSON:
def dict_to_class(class_name: Any, dictionary: dict) -> Any:
instance = class_name()
for key in dictionary.keys():
setattr(instance, key, dictionary[key])
return instance
def json_to_class(class_name: Any, json_string: str) -> Any:
dict_object = json.loads(json_string)
return dict_to_class(class_name, dict_object)
There are multiple viable answers already, but there are some minor libraries made by individuals that can do the trick for most users.
An example would be json2object. Given a defined class, it deserialises json data to your custom model, including custom attributes and child objects.
Its use is very simple. An example from the library wiki:
from json2object import jsontoobject as jo
class Student:
def __init__(self):
self.firstName = None
self.lastName = None
self.courses = [Course('')]
class Course:
def __init__(self, name):
self.name = name
data = '''{
"firstName": "James",
"lastName": "Bond",
"courses": [{
"name": "Fighting"},
{
"name": "Shooting"}
]
}
'''
model = Student()
result = jo.deserialize(data, model)
print(result.courses[0].name)
class SimpleClass:
def __init__(self, **kwargs):
for k, v in kwargs.items():
if type(v) is dict:
setattr(self, k, SimpleClass(**v))
else:
setattr(self, k, v)
json_dict = {'name': 'jane doe', 'username': 'jane', 'test': {'foo': 1}}
class_instance = SimpleClass(**json_dict)
print(class_instance.name, class_instance.test.foo)
print(vars(class_instance))
If you are looking for type safe deserialization of JSON or any complex dict into a python class I would highly recommend pydantic for Python 3.7+. Not only does it has a succinct API (does not require writing 'helper' boilerplate), can integrate with Python dataclasses but has static and runtime type validation of complex and nested data structures.
Example usage:
from pydantic import BaseModel
from datetime import datetime
class Item(BaseModel):
field1: str | int # union
field2: int | None = None # optional
field3: str = 'default' # default values
class User(BaseModel):
name: str | None = None
username: str
created: datetime # default type converters
items: list[Item] = [] # nested complex types
data = {
'name': 'Jane Doe',
'username': 'user1',
'created': '2020-12-31T23:59:00+10:00',
'items': [
{'field1': 1, 'field2': 2},
{'field1': 'b'},
{'field1': 'c', 'field3': 'override'}
]
}
user: User = User(**data)
For more details and features, check out pydantic's rational section in their documentation.
If you're using Python 3.6 or newer, you could have a look at squema - a lightweight module for statically typed data structures. It makes your code easy to read while at the same time providing simple data validation, conversion and serialization without extra work. You can think of it as a more sophisticated and opinionated alternative to namedtuples and dataclasses. Here's how you could use it:
from uuid import UUID
from squema import Squema
class FbApiUser(Squema):
id: UUID
age: int
name: str
def save(self):
pass
user = FbApiUser(**json.loads(response))
user.save()
You can use
x = Map(json.loads(response))
x.__class__ = MyClass
where
class Map(dict):
def __init__(self, *args, **kwargs):
super(Map, self).__init__(*args, **kwargs)
for arg in args:
if isinstance(arg, dict):
for k, v in arg.iteritems():
self[k] = v
if isinstance(v, dict):
self[k] = Map(v)
if kwargs:
# for python 3 use kwargs.items()
for k, v in kwargs.iteritems():
self[k] = v
if isinstance(v, dict):
self[k] = Map(v)
def __getattr__(self, attr):
return self.get(attr)
def __setattr__(self, key, value):
self.__setitem__(key, value)
def __setitem__(self, key, value):
super(Map, self).__setitem__(key, value)
self.__dict__.update({key: value})
def __delattr__(self, item):
self.__delitem__(item)
def __delitem__(self, key):
super(Map, self).__delitem__(key)
del self.__dict__[key]
For a generic, future-proof solution.
I was searching for a solution that worked with recordclass.RecordClass, supports nested objects and works for both json serialization and json deserialization.
Expanding on DS's answer, and expanding on solution from BeneStr, I came up with the following that seems to work:
Code:
import json
import recordclass
class NestedRec(recordclass.RecordClass):
a : int = 0
b : int = 0
class ExampleRec(recordclass.RecordClass):
x : int = None
y : int = None
nested : NestedRec = NestedRec()
class JsonSerializer:
#staticmethod
def dumps(obj, ensure_ascii=True, indent=None, sort_keys=False):
return json.dumps(obj, default=JsonSerializer.__obj_to_dict, ensure_ascii=ensure_ascii, indent=indent, sort_keys=sort_keys)
#staticmethod
def loads(s, klass):
return JsonSerializer.__dict_to_obj(klass, json.loads(s))
#staticmethod
def __obj_to_dict(obj):
if hasattr(obj, "_asdict"):
return obj._asdict()
else:
return json.JSONEncoder().default(obj)
#staticmethod
def __dict_to_obj(klass, s_dict):
kwargs = {
key : JsonSerializer.__dict_to_obj(cls, s_dict[key]) if hasattr(cls,'_asdict') else s_dict[key] \
for key,cls in klass.__annotations__.items() \
if s_dict is not None and key in s_dict
}
return klass(**kwargs)
Usage:
example_0 = ExampleRec(x = 10, y = 20, nested = NestedRec( a = 30, b = 40 ) )
#Serialize to JSON
json_str = JsonSerializer.dumps(example_0)
print(json_str)
#{
# "x": 10,
# "y": 20,
# "nested": {
# "a": 30,
# "b": 40
# }
#}
# Deserialize from JSON
example_1 = JsonSerializer.loads(json_str, ExampleRec)
example_1.x += 1
example_1.y += 1
example_1.nested.a += 1
example_1.nested.b += 1
json_str = JsonSerializer.dumps(example_1)
print(json_str)
#{
# "x": 11,
# "y": 21,
# "nested": {
# "a": 31,
# "b": 41
# }
#}
def load_model_from_dict(self, data: dict):
for key, value in data.items():
self.__dict__[key] = value
return self
It help returns your own model, with unforeseenable variables from the dict.
So I was hunting for a way to unmarshal any arbitrary type (think dict of dataclass, or dict of a dict of an array of dataclass) without a ton of custom deserialization code.
This is my approach:
import json
from dataclasses import dataclass, make_dataclass
from dataclasses_json import DataClassJsonMixin, dataclass_json
#dataclass_json
#dataclass
class Person:
name: str
def unmarshal_json(data, t):
Unmarhsal = make_dataclass('Unmarhsal', [('res', t)],
bases=(DataClassJsonMixin,))
d = json.loads(data)
out = Unmarhsal.from_dict({"res": d})
return out.res
unmarshalled = unmarshal_json('{"1": {"name": "john"} }', dict[str, Person])
print(unmarshalled)
Prints: {'1': Person(name='john')}
This appears to be an XY problem (asking A where the actual problem is B).
The root of the issue is: How to effectively reference/modify deep-nested JSON structures without having to do obj['foo']['bar'][42]['quux'], which poses a typing challenge, a code-bloat issue, a readability issue and an error-trapping issue?
Use glom
from glom import glom
# Basic deep get
data = {'a': {'b': {'c': 'd'}}}
print(glom(data, 'a.b.c'))
It will handle list items also: glom(data, 'a.b.c.42.d')
I've benchmarked it against a naive implementation:
def extract(J, levels):
# Twice as fast as using glom
for level in levels.split('.'):
J = J[int(level) if level.isnumeric() else level]
return J
... and it returns 0.14ms on a complex JSON object, compared with 0.06ms for the naive impl.
It can also handle comlex queries, e.g. pulling out all foo.bar.records where .name == 'Joe Bloggs'
EDIT:
Another performant approach is to recursively use a class that overrides __getitem__ and __getattr__:
class Ob:
def __init__(self, J):
self.J = J
def __getitem__(self, index):
return Ob(self.J[index])
def __getattr__(self, attr):
value = self.J.get(attr, None)
return Ob(value) if type(value) in (list, dict) else value
Now you can do:
ob = Ob(J)
# if you're fetching a final raw value (not list/dict
ob.foo.bar[42].quux.leaf
# for intermediate values
ob.foo.bar[42].quux.J
This also benchmarks surprisingly well. Comparable with my previous naive impl. If anyone can spot a way to tidy up access for non-leaf queries, leave a comment!
Python3.x
The best aproach I could reach with my knowledge was this.
Note that this code treat set() too.
This approach is generic just needing the extension of class (in the second example).
Note that I'm just doing it to files, but it's easy to modify the behavior to your taste.
However this is a CoDec.
With a little more work you can construct your class in other ways.
I assume a default constructor to instance it, then I update the class dict.
import json
import collections
class JsonClassSerializable(json.JSONEncoder):
REGISTERED_CLASS = {}
def register(ctype):
JsonClassSerializable.REGISTERED_CLASS[ctype.__name__] = ctype
def default(self, obj):
if isinstance(obj, collections.Set):
return dict(_set_object=list(obj))
if isinstance(obj, JsonClassSerializable):
jclass = {}
jclass["name"] = type(obj).__name__
jclass["dict"] = obj.__dict__
return dict(_class_object=jclass)
else:
return json.JSONEncoder.default(self, obj)
def json_to_class(self, dct):
if '_set_object' in dct:
return set(dct['_set_object'])
elif '_class_object' in dct:
cclass = dct['_class_object']
cclass_name = cclass["name"]
if cclass_name not in self.REGISTERED_CLASS:
raise RuntimeError(
"Class {} not registered in JSON Parser"
.format(cclass["name"])
)
instance = self.REGISTERED_CLASS[cclass_name]()
instance.__dict__ = cclass["dict"]
return instance
return dct
def encode_(self, file):
with open(file, 'w') as outfile:
json.dump(
self.__dict__, outfile,
cls=JsonClassSerializable,
indent=4,
sort_keys=True
)
def decode_(self, file):
try:
with open(file, 'r') as infile:
self.__dict__ = json.load(
infile,
object_hook=self.json_to_class
)
except FileNotFoundError:
print("Persistence load failed "
"'{}' do not exists".format(file)
)
class C(JsonClassSerializable):
def __init__(self):
self.mill = "s"
JsonClassSerializable.register(C)
class B(JsonClassSerializable):
def __init__(self):
self.a = 1230
self.c = C()
JsonClassSerializable.register(B)
class A(JsonClassSerializable):
def __init__(self):
self.a = 1
self.b = {1, 2}
self.c = B()
JsonClassSerializable.register(A)
A().encode_("test")
b = A()
b.decode_("test")
print(b.a)
print(b.b)
print(b.c.a)
Edit
With some more of research I found a way to generalize without the need of the SUPERCLASS register method call, using a metaclass
import json
import collections
REGISTERED_CLASS = {}
class MetaSerializable(type):
def __call__(cls, *args, **kwargs):
if cls.__name__ not in REGISTERED_CLASS:
REGISTERED_CLASS[cls.__name__] = cls
return super(MetaSerializable, cls).__call__(*args, **kwargs)
class JsonClassSerializable(json.JSONEncoder, metaclass=MetaSerializable):
def default(self, obj):
if isinstance(obj, collections.Set):
return dict(_set_object=list(obj))
if isinstance(obj, JsonClassSerializable):
jclass = {}
jclass["name"] = type(obj).__name__
jclass["dict"] = obj.__dict__
return dict(_class_object=jclass)
else:
return json.JSONEncoder.default(self, obj)
def json_to_class(self, dct):
if '_set_object' in dct:
return set(dct['_set_object'])
elif '_class_object' in dct:
cclass = dct['_class_object']
cclass_name = cclass["name"]
if cclass_name not in REGISTERED_CLASS:
raise RuntimeError(
"Class {} not registered in JSON Parser"
.format(cclass["name"])
)
instance = REGISTERED_CLASS[cclass_name]()
instance.__dict__ = cclass["dict"]
return instance
return dct
def encode_(self, file):
with open(file, 'w') as outfile:
json.dump(
self.__dict__, outfile,
cls=JsonClassSerializable,
indent=4,
sort_keys=True
)
def decode_(self, file):
try:
with open(file, 'r') as infile:
self.__dict__ = json.load(
infile,
object_hook=self.json_to_class
)
except FileNotFoundError:
print("Persistence load failed "
"'{}' do not exists".format(file)
)
class C(JsonClassSerializable):
def __init__(self):
self.mill = "s"
class B(JsonClassSerializable):
def __init__(self):
self.a = 1230
self.c = C()
class A(JsonClassSerializable):
def __init__(self):
self.a = 1
self.b = {1, 2}
self.c = B()
A().encode_("test")
b = A()
b.decode_("test")
print(b.a)
# 1
print(b.b)
# {1, 2}
print(b.c.a)
# 1230
print(b.c.c.mill)
# s
this is not a very difficult thing, i saw the answers above, most of them had a performance problem in the "list"
this code is much faster than the above
import json
class jsonify:
def __init__(self, data):
self.jsonify = data
def __getattr__(self, attr):
value = self.jsonify.get(attr)
if isinstance(value, (list, dict)):
return jsonify(value)
return value
def __getitem__(self, index):
value = self.jsonify[index]
if isinstance(value, (list, dict)):
return jsonify(value)
return value
def __setitem__(self, index, value):
self.jsonify[index] = value
def __delattr__(self, index):
self.jsonify.pop(index)
def __delitem__(self, index):
self.jsonify.pop(index)
def __repr__(self):
return json.dumps(self.jsonify, indent=2, default=lambda x: str(x))
exmaple
response = jsonify(
{
'test': {
'test1': [{'ok': 1}]
}
}
)
response.test -> jsonify({'test1': [{'ok': 1}]})
response.test.test1 -> jsonify([{'ok': 1}])
response.test.test1[0] -> jsonify({'ok': 1})
response.test.test1[0].ok -> int(1)

Categories

Resources