Variable name as class name in python json - python

I have this piece of code
import json
class Object:
def toJSON(self):
return json.dumps(self, default=lambda o: o.__dict__,
sort_keys=False, indent=4)
languages={"pl_PL":"1","en_US":"2","de_DE":"4","fr_FR":"10","cs_CZ":"6"}
def generateJSON():
product=Object()
for lang in languages.keys():
product.translations = Object()
product.translations.lang=Object()
product.translations.lang.name = "xxx"
print(product.toJSON())
if __name__ == '__main__':
generateJSON()
Which gives me an output:
{
"translations": {
"lang": {
"name": "xxx"
}
} }
How can I assing lang value to my Object name to get the output:
{
"translations": {
"pl_PL": {
"name": "xxx"
},
{
"en_US": {
"name": "xxx"
}
}etc... }

To use the string names in lang each time round the loop you need to use setattr() and getattr():
import json
class Object:
def toJSON(self):
return json.dumps(self, default=lambda o: o.__dict__,
sort_keys=False, indent=4)
languages={"pl_PL":"1","en_US":"2","de_DE":"4","fr_FR":"10","cs_CZ":"6"}
def generateJSON():
product=Object()
product.translations = Object()
for lang in languages.keys():
setattr(product.translations,lang,Object())
getattr(product.translations,lang).name = "xxx"
print(product.toJSON())
if __name__ == '__main__':
generateJSON()
Output as requested.

Related

FastAPI mocking dependencies

I am writing unit test cases for my fastapi project and unable to mock a dynamodb call.
File_1
This file has all the methods to perform DynamoDB actions using boto3 calls.
class DynamoDBRepository:
Insert Item - Inserts value
Get Item - Returns value
#File_2
Has a "AppConfig" class which will be used as a dependency in a later file
from file_1 import DynamoDBRepository
class AppConfig:
def __init__(self) -> None:
"""Constructor class to instantiate dynamodb"""
self._job_table = "Dynamo_DB_Table"
self._region = "Table_region"
self._dynamodb_repository = DynamoDBRepository(table=self._job_table, region=self._region) # created a object for the dynamodb class mentioned in file 1.
File_3:
This file has the fast_api route decorator
from file_2 import AppConfig
#router.get(
"/object/{object_id}"
)
def get_request(
object_id: str,
objects: AppConfig = Depends(AppConfig),
) -> ObjectBody:
try:
object_detail = objects._dynamodb_repository.get_item({"object_id": object_id})
return object_detail["Item"]
I am trying to mock the get_item method in my test file:
File_4
This is my test file in which
client = TestClient(fast_api_app)
class MockAppConfig:
def __init__(self) -> None:
"""Constructor class to instantiate dynamodb and lambda"""
self._job_table = "Dynamo_DB_Table"
self._region = "Table_region"
self._dynamodb_repository = DynamoDBRepository(table=self._job_table, region=self._region)
def test_get_request():
fast_api_app.dependency_overrides[AppConfig] = MockAppConfig
MockAppConfig()._dynamodb_repository.get_item = {
"id": "1234",
"title": "Foo",
"description": "Hello",
}
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
The mocking of get_item is not working and it is still querying the original db and failing due to credential check.
I tried monkeypatch & fastapi_dep fixtures, also patching but somehow the get_item method mocking isn't working
Will mocking get_item method work?
class MockDynamoDBRepository():
def get_item(*args, **kwargs):
return {
"Item": {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
}
class MockAppConfig:
def __init__(self) -> None:
"""Constructor class to instantiate dynamodb and lambda"""
self._job_table = "Dynamo_DB_Table"
self._region = "Table_region"
self._dynamodb_repository = MockDynamoDBRepository(table=self._job_table, region=self._region)
def test_get_request():
fast_api_app.dependency_overrides[AppConfig] = MockAppConfig
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
Building on #svfat's answer, here is how you can do the test with fastapi_dep - pick any one of the test approaches - with clause or indirect parameter:
class MockDynamoDBRepository():
def __init__(self, *args, **kwargs):
pass
def get_item(self, *args, **kwargs):
return {
"Item": {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
}
class MockAppConfig:
def __init__(self) -> None:
"""Constructor class to instantiate dynamodb and lambda"""
self._job_table = "Mock Dynamo_DB_Table"
self._region = "Mock Table_region"
self._dynamodb_repository = MockDynamoDBRepository(table=self._job_table,
region=self._region)
def test_get_request_deps(fastapi_dep):
with fastapi_dep(fast_api_app).override(
{
AppConfig: MockAppConfig,
}
):
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
#pytest.mark.parametrize(
"fastapi_dep",
[
(
fast_api_app,
{AppConfig: MockAppConfig},
)
],
indirect=True,
)
def test_get_request_deps_indirect(fastapi_dep):
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
If you don't want to create all the extra classes, you can use the pure mock approach like so:
from mock.mock import MagicMock
def test_get_request_deps_mock(fastapi_dep):
my_mock = MagicMock()
my_mock._dynamodb_repository.get_item.return_value = {
"Item": {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
}
with fastapi_dep(file_3.app).override(
{
AppConfig: lambda: my_mock,
}
):
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}

mocking S3 download file operation for gzipped files

I am trying to mock some S3 operations and after banging my head against the stubber object, I tried doing something as follows:
def mock_make_api_call(self, operation_name, kwarg):
if operation_name == "ListObjectsV2":
return {
"KeyCount": 1,
"Contents": [
{"Key": "sensor_1", "LastModified": "2021-11-30T12:58:14+00:00"}
],
}
elif operation_name == "GetObjectTagging":
return {"TagSet": []}
elif operation_name == "HeadObject":
return {
"ContentLength": 10,
"ContentType": "gzip",
"ResponseMetadata": {
"Bucket": "1",
},
}
elif operation_name == "GetObject":
content = get_object_response()
return {
"ContentLength": len(content),
"ContentType": "xml",
"ContentEncoding": "gzip",
"Body": content,
"ResponseMetadata": {
"Bucket": "1",
},
}
Ot is the s3 download_fileoperation which is giving me a headache. As far as I can tell it generates, the HeadObjectand GetObjectcalls.
My content generation method is as follows:
def get_object_response():
content = b"<some-valid-xml>"
buf = BytesIO()
compressed = gzip.GzipFile(fileobj=buf, mode="wb")
compressed.write(content)
compressed.close()
return buf.getvalue()
The way it gets used is:
with NamedTemporaryFile() as tmp:
s3_client.download_file(Bucket=..., Key=..., Filename=tmp.name)
However, my test fails with:
elf = <s3transfer.utils.StreamReaderProgress object at 0x116a77820>
args = (262144,), kwargs = {}
def read(self, *args, **kwargs):
> value = self._stream.read(*args, **kwargs)
E AttributeError: 'bytes' object has no attribute 'read'
I simply cannot figure out how to encode the response so that the generated content can be saved.

Serialize multiple classes to JSON

I have a class A where it stores a collection of variables of type B, how can I serialize class A to JSON properly?
example:
class A:
def __init__(self):
self.b_collection = []
#...
class B:
def __init__(self):
# ...
pass
and add instances of B into the collection:
a = A()
a.b_collection = [B(), B(), B()]
when I try to serialize a with json.dumps(a) I get this error: Object of type A is not JSON serializable.
Is there a way to specify how to encoder should encode that class?
something like
def __encode__(self, encoder):
encoder.start_obj()
encoder.add_property('name', self.value)
encoder.add_property('age', self.age)
encoder.end_obj()
which would return something like
{
name: 'Tomer',
age: '19'
}
You can extend json.JSONEncoder to define how to serialize your objects. The default method of your subclass will take a Python object as an argument. You can return a new object that is (hopefully) encodable, or pass the object on to the parent in hopes that it knows how to encode the object.
For example,
class A:
def __init__(self):
self.b_collection = []
class B:
def __init__(self, name, age):
self.name = name
self.age = age
class ABEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, A):
return {'__A__': obj.b_collection}
elif isinstance(obj, B):
return {'__B__': obj.__dict__}
return super().default(obj)
a = A()
a.b_collection.append(B("Tomer", "19"))
a.b_collection.append(B("Bob", "21"))
a.b_collection.append(B("Alice", "23"))
print(json.dumps(a, cls=ABEncoder, indent=4))
would produce
{
"__A__": [
{
"__B__": {
"name": "Tomer",
"age": "19"
}
},
{
"__B__": {
"name": "Bob",
"age": "21"
}
},
{
"__B__": {
"name": "Alice",
"age": "23"
}
}
]
}
Note that you can handle A and B separately; you don't have to first encode the B objects before returning the encodable form of A; the B objects will be encoded later when the list itself is encoded.
The extra objects make it easier to write a decoder; you don't have to make it this complicated if you don't want to be able to decode the JSON to an instance of A. Instead, you can just define
class ABEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, A):
return obj.b_collection
elif isinstance(obj, B):
return obj.__dict__
return super().default(obj)
to get
[
{
"name": "Tomer",
"age": "19"
},
{
"name": "Bob",
"age": "21"
},
{
"name": "Alice",
"age": "23"
}
]
Maybe data classes can help:
from dataclasses import asdict, dataclass
from typing import List
import json
#dataclass
class Person:
name: str
age: str
#dataclass
class Group:
persons: List[Person]
data = [
{'name': 'Tomer', 'age': '19'},
{'name': 'Ivan', 'age': '20'}
]
persons = [Person(**person) for person in data]
group = Group(persons=persons)
assert json.dumps(asdict(group)) == '{"persons": [{"name": "Tomer", "age": "19"}, {"name": "Ivan", "age": "20"}]}'
persons = [asdict(person) for person in group.persons]
assert persons == [{"name": "Tomer", "age": "19"}, {"name": "Ivan", "age": "20"}]
assert json.dumps(persons) == '[{"name": "Tomer", "age": "19"}, {"name": "Ivan", "age": "20"}]'

How to convert Json object into tree type in python?

I have the following Json:
{
file1: {
path_to_file: 'file1.txt',
children : 'file2'
},
file2: {
path_to_file: 'file1.txt',
children : 'file3,file4'
},
file3: {
path_to_file: 'a/file3.txt',
children : ''
},
file4: {
path_to_file: 'b/file4.txt',
children : ''
}
}
I want to construct a tree from this Json.
each node should have: name (file1 etc..), path_to_file which is just a data field and convert the children into "pointers" to next node.
I have the following code:
class Node(object):
def __init__(self, name, path_to_file=None):
self.name = name
self.path_to_file= path_to_file
self.children = []
def add_child(self, obj):
self.children.append(obj)
This can be used as:
>>> n = Node(5)
>>> p = Node(6)
>>> q = Node(7)
>>> n.add_child(p)
>>> n.add_child(q)
Now, I want to use properties from my json instead of the number above. So I have this code:
jsonObject= json.load(json_string)
for key in jsonObject:
value = jsonObject[key]
print("The key and value are ({}) = ({})".format(key, value))
This gives me:
json.decoder.JSONDecodeError: Expecting property name enclosed in double quotes: line 2 column 4 (char 7)
How can I extract the properties in the Json object inorder to construct the call to the Node class?
Your json is not a standard json format, it should be double quotes, no single quotes
import json
json_string = """
{
"file1": {
"path_to_file": "file1.txt",
"children": "file2"
},
"file2": {
"path_to_file": "file1.txt",
"children": "file3,file4"
},
"file3": {
"path_to_file": "a/file3.txt",
"children": ""
},
"file4": {
"path_to_file": "b/file4.txt",
"children": ""
}
}
"""
jsonObject = json.loads(json_string)
for key in jsonObject:
value = jsonObject[key]
print("The key and value are ({}) = ({})".format(key, value))
output as:
The key and value are (file1) = ({'path_to_file': 'file1.txt', 'children': 'file2'})
The key and value are (file2) = ({'path_to_file': 'file1.txt', 'children': 'file3,file4'})
The key and value are (file3) = ({'path_to_file': 'a/file3.txt', 'children': ''})
The key and value are (file4) = ({'path_to_file': 'b/file4.txt', 'children': ''})
update answer
For better display, I added the dump method.
import json
json_string = """
{
"file1": {
"path_to_file": "file1.txt",
"children": "file2"
},
"file2": {
"path_to_file": "file1.txt",
"children": "file3,file4"
},
"file3": {
"path_to_file": "a/file3.txt",
"children": ""
},
"file4": {
"path_to_file": "b/file4.txt",
"children": ""
}
}
"""
class Node(object):
def __init__(self, name, path_to_file=None):
self.name = name
self.path_to_file = path_to_file
self.children = []
def add_child(self, obj):
self.children.append(obj)
def dump(self, indent=0):
"""dump tree to string"""
tab = ' '*(indent-1) + ' |- ' if indent > 0 else ''
print('%s%s' % (tab, self.name))
for obj in self.children:
obj.dump(indent + 1)
name2info = json.loads(json_string)
def get_tree(name):
info = name2info[name]
root = Node(name, info['path_to_file'])
for child in info['children'].split(","):
if child:
root.add_child(get_tree(child))
return root
root = get_tree('file1')
# get children info
print(root.name, root.children[0].name, root.children[0].children[1].path_to_file)
root.dump()
it outputs:
file1 file2 b/file4.txt
file1
|- file2
|- file3
|- file4

Search JSON file from python

I am trying to search for a variable in a JSON file.
Current JSON file (devices.json) is:
{
"NYC": {
"Floor1": [
{
"name": "server1",
"ip": "1.1.1.1"
},
{
"name": "server2",
"ip": "1.1.1.2"
}
],
"Floor2": [
...
],
"sitenum": 1
},
"Boston": {
...
"sitenum": 2
}
...
}
Two questions:
I am new to JSON, so is this formatted correctly for
lists/dictionaries?
I'd like to preform a python query to display Floor(s){name, ip} for sitenum (x)
Current Python file is
import json
with open('devices.json') as jsonfile:
data = json.load(jsonfile)
Thanks!
this python script will return the floor details as a list of json values for a given sitenum and floor.
import json
def get_floor_details(sitenum, floor, data):
for k,v in data.items():
if v['sitenum'] == sitenum:
return(v[floor])
with open('sample.json') as json_file:
data = json.load(json_file)
sitenum = 1
floor = 'Floor1'
floor_details = get_floor_details(sitenum, floor, data)
print(floor_details)
Output:
[{'name': 'server1', 'ip': '1.1.1.1'}, {'name': 'server2', 'ip': '1.1.1.2'}]
def findFloor(site_num_val, data):
return_val = {}
for each_loc in data:
if site_num_val == each_loc["sitenum"]:
return_val = data[each_loc].copy()
del return_val["sitenum"]
break
else:
print "sitenum not found"
return return_val
I hope this solves your problem in trying to get the information.

Categories

Resources