Adding json element top of the other keys - python

I have json file something like this one.
{
"SomethingA": {
"SomethingB": {
"SomethingC": {
"C-property": "something",
"C-property2": {}
}
}
}
}
I want to add some new data top of the the "Something C" as "NEWSomethingC"
so It should be
{
"SomethingA": {
"SomethingB": {
"NEWSomethingC": {
"NEWC-property": "NEWsomething",
"NEWC-property2": {}
},
"SomethingC": {
"C-property": "something",
"C-property2": {}
}
}
}
}
Okay, here is the problem. I can't add new value top of the keys. Always, NEWSomethingC is going to appear below the SomethingC.
The code I use for adding...
with open(credantials.init['config'], 'r+') as f:
data = json.load(f)
try:
old_data = data['SomethingA'][SomethingB]
append_data = data['SomethingA'][SomethingB]
old_data = {NEWSomethingC :{'C-property':something, 'C-Property2':{}}}
except KeyError:
print ('There is no key you want to search here')
append_data.update(old_data)
print(append_data)
f.seek(0)
json.dump(data,f, indent=4)
f.truncate()

As already pointed out dictionaries in python are unorderd. Therefore we have to use OrderedDict
As explained in this answer we can use the object_pairs_hook
argument in json.loads() to load as OrderdDicts. Then we can add a new dictionary to our "OrderdJsonDictionary" and use the move_to_end function to move our added dictionary to the begin
with open(credantials.init['config'], 'r+') as f:
data = json.load(f, object_pairs_hook=OrderedDict)
new_data = {'newc':{
"NEWC-property": "NEWsomething",
"NEWC-property2": {}
}
}
data["SomethingA"]["SomethingB"].update(new_data)
# last=False moves to the beginning
data["SomethingA"]["SomethingB"].move_to_end(list(new_data.keys())[0], last=False)
f.seek(0)
json.dump(data,f, indent=4)
f.truncate()

So what you would want to do is read the data, search for the data to the point where you wish to make an insertion.
1. Write that data to a new file
2. add your new insertion to the new file
3. add the rest of the file contents to the new file
4. delete the old file
So in order to write to file you would want to insert the following into your code.
outfile = open('file.json')
json.dump(data, outfile)
outfile.close()

Related

How to delete an element in a json file python

I am trying to delete an element in a json file,
here is my json file:
before:
{
"names": [
{
"PrevStreak": false,
"Streak": 0,
"name": "Brody B#3719",
"points": 0
},
{
"PrevStreak": false,
"Streak": 0,
"name": "XY_MAGIC#1111",
"points": 0
}
]
}
after running script:
{
"names": [
{
"PrevStreak": false,
"Streak": 0,
"name": "Brody B#3719",
"points": 0
}
]
}
how would I do this in python? the file is stored locally and I am deciding which element to delete by the name in each element
Thanks
I would load the file, remove the item, and then save it again. Example:
import json
with open("filename.json") as f:
data = json.load(f)
f.pop(data["names"][1]) # or iterate through entries to find matching name
with open("filename.json", "w") as f:
json.dump(data, f)
You will have to read the file, convert it to python native data type (e.g. dictionary), then delete the element and save the file. In your case something like this could work:
import json
filepath = 'data.json'
with open(filepath, 'r') as fp:
data = json.load(fp)
del data['names'][1]
with open(filepath, 'w') as fp:
json.dump(data, fp)
Try this:
# importing the module
import ast
# reading the data from the file
with open('dictionary.txt') as f:
data = f.read()
print("Data type before reconstruction : ", type(data))
# reconstructing the data as a dictionary
a_dict = ast.literal_eval(data)
{"names":[a for a in a_dict["names"] if a.get("name") !="XY_MAGIC#1111"]}
import json
with open("test.json",'r') as f:
data = json.loads(f.read())
names=data.get('names')
for idx,name in enumerate(names):
if name['name']=='XY_MAGIC#1111':
del names[idx]
break
print(names)
In order to read the file best approach would be using the with statement after which you could just use pythons json library and convert json string to python dict. once you get dict you can access the values and do your operations as required. you could convert it as json using json.dumps() then save it
This does the right thing useing the python json module, and prettyprints the json back to the file afterwards:
import json
jsonpath = '/path/to/json/file.json'
with open(jsonpath) as file:
j = json.loads(file.read())
names_to_remove = ['XY_MAGIC#1111']
for element in j['names']:
if element['name'] in names_to_remove:
j['names'].remove(element)
with open(jsonpath, 'w') as file:
file.write(json.dumps(j, indent=4))

How do I maintain the same structure when reading from, modifying and writing back to a JSON file?

I am currently reading in a JSON file, adding a key and writing it back out to the same file using this procedure
with open('data.json', 'r+') as f:
data = json.load(f)
temp_key={"test":"val"}
data["test"]["new_key"] = temp_key
f.seek(0) # <--- should reset file position to the beginning.
json.dump(data, f, indent=2)
f.truncate() # remove remaining part
(adopted from here)
but the issue is that it does not maintain order. for instance if I read in:
{
"test": {
"something": "something_else"
},
"abc": {
"what": "huh"
}
}
the output turns out as:
{
"abc": {
"what": "huh"
},
"test": {
"something": "something_else",
"new_key": {
"test": "val"
}
}
}
When I would like it to be:
{
"test": {
"something": "something_else",
"new_key": {
"test": "val"
}
},
"abc": {
"what": "huh"
}
}
I realise that JSON is a key/value based structure and the order does not matter, but is there a way of making the modification and maintaining the original structure?
As I said in a comment, you can use a collections.OrderedDict along with the optional object_pairs_hook keyword argument accepted by json.load() (in Python 2.7) to preserve the order of the original data when you rewrite the file.
This is what I meant:
#!/usr/bin/env python2
from collections import OrderedDict
import json
with open('ordered_data.json', 'r+') as f:
data = json.load(f, object_pairs_hook=OrderedDict)
temp_key = {"test": "val"}
data["test"]["new_key"] = temp_key
f.seek(0) # Reset file position to the beginning.
json.dump(data, f, indent=2)
f.truncate() # Remove remaining part.

Python JSON add Key-Value pair

I'm trying to add key value pairs into the existing JSON file. I am able to concatenate to the parent label, How to add value to the child items?
JSON file:
{
"students": [
{
"name": "Hendrick"
},
{
"name": "Mikey"
}
]
}
Code:
import json
with open("input.json") as json_file:
json_decoded = json.load(json_file)
json_decoded['country'] = 'UK'
with open("output.json", 'w') as json_file:
for d in json_decoded[students]:
json.dump(json_decoded, json_file)
Expected Results:
{
"students": [
{
"name": "Hendrick",
"country": "UK"
},
{
"name": "Mikey",
"country": "UK"
}
]
}
You can do the following in order to manipulate the dict the way you want:
for s in json_decoded['students']:
s['country'] = 'UK'
json_decoded['students'] is a list of dictionaries that you can simply iterate and update in a loop. Now you can dump the entire object:
with open("output.json", 'w') as json_file:
json.dump(json_decoded, json_file)
import json
with open("input.json", 'r') as json_file:
json_decoded = json.load(json_file)
for element in json_decoded['students']:
element['country'] = 'UK'
with open("output.json", 'w') as json_out_file:
json.dump(json_decoded, json_out_file)
opened a json file i.e. input.json
iterated through each of its element
add a key named "country" and dynamic value "UK", to each element
opened a new json file with the modified JSON.
Edit:
Moved writing to output file inside to first with segment. Issue with earlier implemenation is that json_decoded will not be instantiated if opening of input.json fails. And hence, writing to output will raise an exception - NameError: name 'json_decoded' is not defined
This gives [None, None] but update the dict:
a = {'students': [{'name': 'Hendrick'}, {'name': 'Mikey'}]}
[i.update({'country':'UK'}) for i in a['students']]
print(a)

How can I use jsonpath in python to change an element value in the json object

I have the following json object (Say car_details.json):
{
"name":"John",
"age":30,
"cars":
[
{
"car_model": "Mustang",
"car_brand": "Ford"
},
{
"car_model": "cx-5",
"car_brand": "Mazda"
}
}
I want to change the value of car_model from cx-5 to cx-9 through python code.
I am providing the json path to this element, through an external file. The json-path expression is basically represented as a string. Something like this:
'cars[2].car_model'
And the new value is also provided through an external file as a string:
'cx-9'
Now how do I parse through car_details.json using the jsonpath expression, and change its value to the one provided as string, and finally return the modified json object
P.S I want to do this through python code
This is an approach without using json module. Load your data in variable. Then iterate over cars key/values. If you find the key that is the value you are looking for set it to new value.
Also note: you need to close your array block, otherwise your above json is not valid. Generally I use an online json parser to check if my data is valid etc. (may be helpful in future).
data = {
"name":"John",
"age":30,
"cars":
[
{
"car_model": "Mustang",
"car_brand": "Ford"
},
{
"car_model": "cx-5",
"car_brand": "Mazda"
}
]
}
for cars in data['cars']:
for key, value in cars.items():
if key == "car_model" and value == "cx-5":
cars[key] = "cx-9"
print(data)
If you want to load your json object from a file, let's assume it is called "data.json" and is in the same directory as the python script you are going to run:
import json
with open('data.json') as json_data:
data = json.load(json_data)
for cars in data['cars']:
for key, value in cars.items():
if key == "car_model" and value == "cx-5":
cars[key] = "cx-9"
print(data)
Now if you'd like to write the content to the original file or new file, in this case I am writing to a file called "newdata.json":
import json
import re
with open('data.json') as json_data:
data = json.load(json_data)
print(data)
with open('external.txt') as f:
content = f.read()
print(content)
for cars in data['cars']:
for key, value in cars.items():
if key == "car_model" and value == "cx-5":
cars[key] = content
with open('newdata.json', 'w') as outfile:
json.dump(data, outfile)

Mass creation of JSON config files

I have code that uses a JSON file as an input - each entry in the JSON file is a unique configuration for a simulation run. Below is a simplified sample of one row (expanded) of the JSON file. You can have thousands of these rows each with different unique values.
{
"1": {
"description": "unique configuration 1",
"attribute to change": 1750,
"object type 1": {
"object name": {
"number": 10,
"object attribute 1": 5
}
},
"object type 2": {
"object name": {
"number": 5,
"object attribute 1": 50
}
}
}
}
It works well. However whenever I wish to make a change to the configuration files I need to do it manually, which if you have thousands of entries, can be tedious. I wish to be able to load a default JSON structure (which is the above) and automatically create the variations required.
i have created the below code which is nearly there.
def generate_config_file(input_data, attribute, new_value):
for key, value in input_data.items():
if isinstance(value, dict):
if attribute in value:
value[attribute] = new_value
else:
generate_config_file(value, attribute, new_value)
elif key == attribute:
input_data[attribute] = new_value
file_name = input('Enter file name: ')
if len(file_name) < 1:
file_name = 'default structure.JSON'
id_num = 1
out_file = open('new config file.JSON', "a")
# so here create a new file with multiple rows
# where "attribute to change" is the attribute modified
# here between 5 and 95 in steps of 5
for i in range(5, 100, 5):
with open(file_name) as data_file:
data = json.load(data_file)
# give the new row a unique id_num
data[str(id_num)] = data.pop('1')
generate_config_file(data[str(id_num)], 'attribute to change', i)
json.dump(data, out_file, sort_keys=True, indent=4)
id_num += 1
out_file.close()
I would like the output to look like below (except you would have 19 rows). I have collapsed to the top level but within each row (1,2,3....19) the structure should match the default above. The only difference between the rows being the value associated with the attribute to be changed.
{
"1": {},
"2": {},
"3": {}
}
However it produces:
{
"1": {}
}{
"2": {}
}{
"3": {}
}
I've tried various things. Such as converting the output to a string and trying to strip out the extra {} and replacing with a '. I have also tried when dumping each section of output o remove the outer {} and replace. Neither worked and I am now not sure what to try.
Any help appreciated.
What you are doing is dumping the json data in the for loop, which will always dump a dictionary, which will always produce the data you are getting now, to get around this. I suggest you create a new dictionary ( new_data ) like this,
new_data = {}
# so here create a new file with multiple rows
# where "attribute to change" is the attribute modified
# here between 5 and 95 in steps of 5
for i in range(5, 100, 5):
with open(file_name) as data_file:
data = json.load(data_file)
# give the new row a unique id_num
data[str(id_num)] = data.pop('1')
generate_config_file(data[str(id_num)], 'attribute to change', i)
new_data[str(id_num)] = data[str(id_num)]
#json.dump(data, out_file, sort_keys=True, indent=4)
id_num += 1
json.dump(new_data, out_file, sort_keys=True, indent=4)
and dump it afterwards
You are talking about "rows" but you expect a dictionary structure like this, (that's a valid JSON file):
{
"1": {},
"2": {},
"3": {}
}
So I think is better to forget about "rows" and think always in terms of dictionary key value pairs, mainly because "rows" aren't part of JSON standard, check also the validator.
Using inside a loop:
json.dump(data, out_file, sort_keys=True, indent=4)
having opened the output file in incremental mode here:
out_file = open('new config file.JSON', "a")`)
translates to stacking multiple objects into the output.json text file,
that creates an invalid JSON file, like the one you pointed out.
In order to avoid this you may write your dictionary structure to file all at once, to do this you can change the second part of your example code like this:
# parse old config
with open(file_name, "r") as data_file:
data = json.load(data_file)
# set the new value of attribute_to_change for the first object in json
i = 5
# loop through top level object or what you call rowss
for key in sorted(data.keys()):
# update the attribute with recursive function on each top level
# object with an increasing value i
generate_config_file(data[key], 'attribute to change', i)
i += 5
# if you have 19 objects inside your root object in input json
# the value will span from 5 to 95
# save the whole modified "data" dictionary in one shot
out_file_name = 'new config file.JSON'
with open(out_file_name, "w") as out_file:
json.dump(data, out_file, sort_keys=True, indent=4)

Categories

Resources