Uconvert json response to CSV file - python

i have the below json response.
{"userId":"vendor","sessionId":"3d716be43d094fefa2261b5347a6127f","channelName":"/omsevents/usession/3d716be43d094fefa2261b5347a6127f","fadList":["NERemoteConfig","ShowSWStatus","esm_modify_erp","ManageStaticLink","ForceActivateSw","GetNeTime","AlarmDebounceModify","SetMeTime","DeleteME","esm_prov_tunnel","CreateJob","GlobalAlignDownwards","esm_prov_mclag","removeSubnet","MapNodeMovement","ModifyRNEIndex","ViewSwdlJobReport","GetSwPkg","AdvancedConstruct","wdm_performanceManagementFunction","Map_FUNCTION_admin_operator","ManageNEInventory","GetNeList","OSAddressRW","ShowInformations","SwStatusSwdlJob","CreateSwdlSession","RunPerfMon_15m","cpb_operationFunction","AbortSwdlSession","Cmd_MapInv_SbnListNodes","F_DELETE_CUSTOMER_TEMPLATE","esm_prov_npa","InternalCmd_MapView_View_ZoomIn","OSAddressRO","Cmd_TM_FUNCTION_allUser","esm_modify_lag","AlignUpwards","DeleteSwdlNes","InternalCmd_MapView_Obj_Undo","ShowBkupSession","CreateDynamicLink","ViewRestoreJobReport","DisplayRI","PathBuild","ManageMonitoring","ShowRestoreNes","ManageMeTime","SysMonRead","DeleteBkpNes","DeleteSwdlSession","RemoveGeneric","ShowSwdlNes","ManageNTP","Cmd_CLOG_Admin_Header_P","ShowEqp","DeleteBkpSession","DeleteRestoreSession","Cmd_CLOG_Admin_Header","ManageNEPopulation","GetSwStatusByType","ShowIsaSWStatus","MEAddressRW","ActivateSw","esm_generate_pm_report","Cmd_Inv_LinkListConnections","Cmd_MapNav_ObjPtrToNPAview","Cmd_CLOG_Ack","MSGUI_FUNCTION_ADMIN_TOPOLOGICALVIEWS","PingNE","esm_modify_mclag","AbortJob","ManualAlignDownwards","Cmd_MapInv_ListTopLevelNtws_PHY_TOPVIEW","Cmd_MapInv_SbnListChildSbns","ShowRestoreSession","Cmd_MapAct_SbnRemoveNode","F_ADD_CUSTOMER_TEMPLATE","ManageAccess","CreateBkupSessionWizard","CreateRestoreSessionWizard","ShowJobList","MigrateME","OpenJobWizard","Cmd_TM_FUNCTION_admin_construct","SysMonAdmin","ModifyLocation","wdm_neManagementFunction","OpenGetSwByTypeWizard","ModifyME","Cmd_MapNav_OpcToMap","NavigationToELM","Cmd_MapAct_SbnDeleteSbnAndSubordinates","AbortNe","Cmd_CLOG_User_Header","MultipleNeDownload","Cmd_TM_FUNCTION_admin_construct_operator","Admin","AddSwdlNes","saveSubnet","wdm_legacyManagementFunction","GetSwdl","Cmd_MapInv_SbnCreateMap","Cmd_MapAct_SbnCommitMapModify","Cmd_MapInv_ListTopLevelNtws_PHY_Test","esm_prov_customer","physconnInNet","UploadRemoteInventory","GetSwDetail","ActiveJob","NEInventory","ManageResynchro","RunPerfMon_hour","Operations","Cmd_MapWiz_ModifySubnet","AbortBkupSession","GetNeLabel","SetNeTime","InternalCmd_MapView_View_ZoomOut","NavigationToEquipments","ManageFilter","ViewBkupJobReport","DeleteSw","wdm_inventoryFunction","ManageLocalAccess","NetworkAddressRW","esm_job_reschedule","AddBkpNes","SysMonView","esm_design_template","Cmd_MapInv_PopulateSbnsInTree_PHY","addSubnet","InternalCmd_MapView_Obj_Redo","Cmd_MapInv_ShowOpcView","NetworkAddressRO","esm_design_feature","InternalCmd_MapView_View_ZoomBestFit","AlarmDebounceView","EditSwdlJobWizard","CreatePartialSwdlJob","ModifyACD","NavigationToURLs","EMLInventory","viewMap","Cmd_TM_FUNCTION_admin","esm_prov_service","Cmd_MapAct_SbnAddNode","DeleteRestoreNes","ActiveSoftware","NavigationToHostNe","AdvancedViewer","wdm_alarmManagementFunction","AllUsers","CreateSUBNE","ShowSwdlSession","EditJob","ManageSupervision","CreateNE","esm_modify_tunnel","ModifyUserLabel","Cmd_MapWiz_SbnCreateSimple","esm_modify_npa","GetNeType","Cmd_MapAct_DataSynchronize","OpenGetSwByNameWizard","InternalCmd_MapView_View_ChangeBackground","ResynchroAll","RunPerfMon_day","MulitpleNeBackup","ModifyComments","CreatePartialSwdlJobFromNeList","F_MODIFY_CUSTOMER_TEMPLATE","ManageLinkInventory","GetFtServer","esm_modify_service","ManageMib","EditRestorJobWizard","esm_deploy_networkConfig","Cmd_EventParameter","ShowJobStatus","ShowStatuses","addNode","Cmd_CLOG_User_Header_P","NavigationToAlarms","InternalCmd_MapView_Obj_Save","EditBkupJobWizard","InternalCmd_MapView_View_SwitchLayer","CreateRestoreSession","esm_job_delete","esm_prov_erp","Cmd_MapCrt_SbnCreate","Cmd_MapInv_ListObjsForCreateSbn","GetSwStatusByName","CreateSwdlSessionWizard","DirInventory","removeNode","ManageAdmin","esm_prov_lag","DeleteJob","CreateBkupSession","wdm_provisionFunction","ManageClone","modifySubnet","CommitSw","CreateME","ShowAlarms","InternalCmd_MapView_View_NormalSize","CreateRNE","InternalCmd_MapView_View_Miniature","ShowBackupNes","Construct","Cmd_Inv_SbnListPhyconnections","NERemoteConfig","ShowSWStatus","esm_modify_erp","ManageStaticLink","ForceActivateSw","GetNeTime","AlarmDebounceModify","SetMeTime","DeleteME","esm_prov_tunnel","CreateJob","GlobalAlignDownwards","esm_prov_mclag","removeSubnet","MapNodeMovement","ModifyRNEIndex","ViewSwdlJobReport","GetSwPkg","AdvancedConstruct","wdm_performanceManagementFunction","Map_FUNCTION_admin_operator","ManageNEInventory","GetNeList","OSAddressRW","ShowInformations","SwStatusSwdlJob","CreateSwdlSession","RunPerfMon_15m","cpb_operationFunction","AbortSwdlSession","Cmd_MapInv_SbnListNodes","F_DELETE_CUSTOMER_TEMPLATE","esm_prov_npa","InternalCmd_MapView_View_ZoomIn","OSAddressRO","Cmd_TM_FUNCTION_allUser","esm_modify_lag","AlignUpwards","DeleteSwdlNes","InternalCmd_MapView_Obj_Undo","ShowBkupSession","CreateDynamicLink","ViewRestoreJobReport","DisplayRI","PathBuild","ManageMonitoring","ShowRestoreNes","ManageMeTime","SysMonRead","DeleteBkpNes","DeleteSwdlSession","RemoveGeneric","ShowSwdlNes","ManageNTP","Cmd_CLOG_Admin_Header_P","ShowEqp","DeleteBkpSession","DeleteRestoreSession","Cmd_CLOG_Admin_Header","ManageNEPopulation","GetSwStatusByType","ShowIsaSWStatus","MEAddressRW","ActivateSw","esm_generate_pm_report","Cmd_Inv_LinkListConnections","Cmd_MapNav_ObjPtrToNPAview","Cmd_CLOG_Ack","MSGUI_FUNCTION_ADMIN_TOPOLOGICALVIEWS","PingNE","esm_modify_mclag","AbortJob","ManualAlignDownwards","Cmd_MapInv_ListTopLevelNtws_PHY_TOPVIEW","Cmd_MapInv_SbnListChildSbns","ShowRestoreSession","Cmd_MapAct_SbnRemoveNode","F_ADD_CUSTOMER_TEMPLATE","ManageAccess","CreateBkupSessionWizard","CreateRestoreSessionWizard","ShowJobList","MigrateME","OpenJobWizard","Cmd_TM_FUNCTION_admin_construct","SysMonAdmin","ModifyLocation","wdm_neManagementFunction","OpenGetSwByTypeWizard","ModifyME","Cmd_MapNav_OpcToMap","NavigationToELM","Cmd_MapAct_SbnDeleteSbnAndSubordinates","AbortNe","Cmd_CLOG_User_Header","MultipleNeDownload","Cmd_TM_FUNCTION_admin_construct_operator","Admin","AddSwdlNes","saveSubnet","wdm_legacyManagementFunction","GetSwdl","Cmd_MapInv_SbnCreateMap","Cmd_MapAct_SbnCommitMapModify","Cmd_MapInv_ListTopLevelNtws_PHY_Test","esm_prov_customer","physconnInNet","UploadRemoteInventory","GetSwDetail","ActiveJob","NEInventory","ManageResynchro","RunPerfMon_hour","Operations","Cmd_MapWiz_ModifySubnet","AbortBkupSession","GetNeLabel","SetNeTime","InternalCmd_MapView_View_ZoomOut","NavigationToEquipments","ManageFilter","ViewBkupJobReport","DeleteSw","wdm_inventoryFunction","ManageLocalAccess","NetworkAddressRW","esm_job_reschedule","AddBkpNes","SysMonView","esm_design_template","Cmd_MapInv_PopulateSbnsInTree_PHY","addSubnet","InternalCmd_MapView_Obj_Redo","Cmd_MapInv_ShowOpcView","NetworkAddressRO","esm_design_feature","InternalCmd_MapView_View_ZoomBestFit","AlarmDebounceView","EditSwdlJobWizard","CreatePartialSwdlJob","ModifyACD","NavigationToURLs","EMLInventory","viewMap","Cmd_TM_FUNCTION_admin","esm_prov_service","Cmd_MapAct_SbnAddNode","DeleteRestoreNes","ActiveSoftware","NavigationToHostNe","AdvancedViewer","wdm_alarmManagementFunction","AllUsers","CreateSUBNE","ShowSwdlSession","EditJob","ManageSupervision","CreateNE","esm_modify_tunnel","ModifyUserLabel","Cmd_MapWiz_SbnCreateSimple","esm_modify_npa","GetNeType","Cmd_MapAct_DataSynchronize","OpenGetSwByNameWizard","InternalCmd_MapView_View_ChangeBackground","ResynchroAll","RunPerfMon_day","MulitpleNeBackup","ModifyComments","CreatePartialSwdlJobFromNeList","F_MODIFY_CUSTOMER_TEMPLATE","ManageLinkInventory","GetFtServer","esm_modify_service","ManageMib","EditRestorJobWizard","esm_deploy_networkConfig","Cmd_EventParameter","ShowJobStatus","ShowStatuses","addNode","Cmd_CLOG_User_Header_P","NavigationToAlarms","InternalCmd_MapView_Obj_Save","EditBkupJobWizard","InternalCmd_MapView_View_SwitchLayer","CreateRestoreSession","esm_job_delete","esm_prov_erp","Cmd_MapCrt_SbnCreate","Cmd_MapInv_ListObjsForCreateSbn","GetSwStatusByName","CreateSwdlSessionWizard","DirInventory","removeNode","ManageAdmin","esm_prov_lag","DeleteJob","CreateBkupSession","wdm_provisionFunction","ManageClone","modifySubnet","CommitSw","CreateME","ShowAlarms","InternalCmd_MapView_View_NormalSize","CreateRNE","InternalCmd_MapView_View_Miniature","ShowBackupNes","Construct","Cmd_Inv_SbnListPhyconnections"],"nadString":"Voda unknown","userNadRole":"GLOBAL"}
i need to convert it to a csv file, each row has the header and below it the value. like this
userId sessionId channelName fadList NEremote ...
vendor 3d716be.. /omsevents/usession so on so on ...
i tried this solution but not working.
def json_csv() :
file = input("Please Enter new CSV file new :")
# Opening JSON file and loading the data
# into the variable data
with open(r"D:\json.txt") as json_file:
data = json.load(json_file)
employee_data = data
# now we will open a file for writing
data_file = open(file,'w')
# create the csv writer object
csv_writer = csv.writer(data_file)
# Counter variable used for writing
# headers to the CSV file
count = 0
for emp in data:
if count == 0 :
# Writing headers of CSV file
header = emp.keys()
csv_writer.writerow(header)
count += 1
# Writing data of CSV file
csv_writer.writerow(emp.values())
data_file.close()
json_csv()
UPDATE 1 :
Thanks to Hozafya , he provided below solution:
from pandas.io.json import json_normalize
x = open("test.txt").readline()
df = json_normalize(x)
df.to_csv("file.csv")
the script is working for the first 3 items. starting from the list, all the entire data in a one column. so i need the data to be as following for example.:
excel sheet example image
UPDATE 2 :
the solution which provided from Hozayfa solved my issue. just one more last thing, when i attach the json data directly, the script is work fine like below :
x = {"userId":"vendor","sessionId":"3d716be43d094fefa2261b5347a6127f","channelName":"/omsevents/usession/3d716be43d094fefa2261b5347a6127f","fadList":["NERemoteConfig","ShowSWStatus","esm_modify_erp","ManageStaticLink","ForceActivateSw","GetNeTime","AlarmDebounceModify","SetMeTime","DeleteME","esm_prov_tunnel","CreateJob","GlobalAlignDownwards","esm_prov_mclag","removeSubnet","MapNodeMovement","ModifyRNEIndex","ViewSwdlJobReport","GetSwPkg","AdvancedConstruct","wdm_performanceManagementFunction","Map_FUNCTION_admin_operator","ManageNEInventory","GetNeList","OSAddressRW","ShowInformations","SwStatusSwdlJob","CreateSwdlSession","RunPerfMon_15m","cpb_operationFunction","AbortSwdlSession","Cmd_MapInv_SbnListNodes","F_DELETE_CUSTOMER_TEMPLATE","esm_prov_npa","InternalCmd_MapView_View_ZoomIn","OSAddressRO","Cmd_TM_FUNCTION_allUser","esm_modify_lag","AlignUpwards","DeleteSwdlNes","InternalCmd_MapView_Obj_Undo","ShowBkupSession","CreateDynamicLink","ViewRestoreJobReport","DisplayRI","PathBuild","ManageMonitoring","ShowRestoreNes","ManageMeTime","SysMonRead","DeleteBkpNes","DeleteSwdlSession","RemoveGeneric","ShowSwdlNes","ManageNTP","Cmd_CLOG_Admin_Header_P","ShowEqp","DeleteBkpSession","DeleteRestoreSession","Cmd_CLOG_Admin_Header","ManageNEPopulation","GetSwStatusByType","ShowIsaSWStatus","MEAddressRW","ActivateSw","esm_generate_pm_report","Cmd_Inv_LinkListConnections","Cmd_MapNav_ObjPtrToNPAview","Cmd_CLOG_Ack","MSGUI_FUNCTION_ADMIN_TOPOLOGICALVIEWS","PingNE","esm_modify_mclag","AbortJob","ManualAlignDownwards","Cmd_MapInv_ListTopLevelNtws_PHY_TOPVIEW","Cmd_MapInv_SbnListChildSbns","ShowRestoreSession","Cmd_MapAct_SbnRemoveNode","F_ADD_CUSTOMER_TEMPLATE","ManageAccess","CreateBkupSessionWizard","CreateRestoreSessionWizard","ShowJobList","MigrateME","OpenJobWizard","Cmd_TM_FUNCTION_admin_construct","SysMonAdmin","ModifyLocation","wdm_neManagementFunction","OpenGetSwByTypeWizard","ModifyME","Cmd_MapNav_OpcToMap","NavigationToELM","Cmd_MapAct_SbnDeleteSbnAndSubordinates","AbortNe","Cmd_CLOG_User_Header","MultipleNeDownload","Cmd_TM_FUNCTION_admin_construct_operator","Admin","AddSwdlNes","saveSubnet","wdm_legacyManagementFunction","GetSwdl","Cmd_MapInv_SbnCreateMap","Cmd_MapAct_SbnCommitMapModify","Cmd_MapInv_ListTopLevelNtws_PHY_Test","esm_prov_customer","physconnInNet","UploadRemoteInventory","GetSwDetail","ActiveJob","NEInventory","ManageResynchro","RunPerfMon_hour","Operations","Cmd_MapWiz_ModifySubnet","AbortBkupSession","GetNeLabel","SetNeTime","InternalCmd_MapView_View_ZoomOut","NavigationToEquipments","ManageFilter","ViewBkupJobReport","DeleteSw","wdm_inventoryFunction","ManageLocalAccess","NetworkAddressRW","esm_job_reschedule","AddBkpNes","SysMonView","esm_design_template","Cmd_MapInv_PopulateSbnsInTree_PHY","addSubnet","InternalCmd_MapView_Obj_Redo","Cmd_MapInv_ShowOpcView","NetworkAddressRO","esm_design_feature","InternalCmd_MapView_View_ZoomBestFit","AlarmDebounceView","EditSwdlJobWizard","CreatePartialSwdlJob","ModifyACD","NavigationToURLs","EMLInventory","viewMap","Cmd_TM_FUNCTION_admin","esm_prov_service","Cmd_MapAct_SbnAddNode","DeleteRestoreNes","ActiveSoftware","NavigationToHostNe","AdvancedViewer","wdm_alarmManagementFunction","AllUsers","CreateSUBNE","ShowSwdlSession","EditJob","ManageSupervision","CreateNE","esm_modify_tunnel","ModifyUserLabel","Cmd_MapWiz_SbnCreateSimple","esm_modify_npa","GetNeType","Cmd_MapAct_DataSynchronize","OpenGetSwByNameWizard","InternalCmd_MapView_View_ChangeBackground","ResynchroAll","RunPerfMon_day","MulitpleNeBackup","ModifyComments","CreatePartialSwdlJobFromNeList","F_MODIFY_CUSTOMER_TEMPLATE","ManageLinkInventory","GetFtServer","esm_modify_service","ManageMib","EditRestorJobWizard","esm_deploy_networkConfig","Cmd_EventParameter","ShowJobStatus","ShowStatuses","addNode","Cmd_CLOG_User_Header_P","NavigationToAlarms","InternalCmd_MapView_Obj_Save","EditBkupJobWizard","InternalCmd_MapView_View_SwitchLayer","CreateRestoreSession","esm_job_delete","esm_prov_erp","Cmd_MapCrt_SbnCreate","Cmd_MapInv_ListObjsForCreateSbn","GetSwStatusByName","CreateSwdlSessionWizard","DirInventory","removeNode","ManageAdmin","esm_prov_lag","DeleteJob","CreateBkupSession","wdm_provisionFunction","ManageClone","modifySubnet","CommitSw","CreateME","ShowAlarms","InternalCmd_MapView_View_NormalSize","CreateRNE","InternalCmd_MapView_View_Miniature","ShowBackupNes","Construct","Cmd_Inv_SbnListPhyconnections","NERemoteConfig","ShowSWStatus","esm_modify_erp","ManageStaticLink","ForceActivateSw","GetNeTime","AlarmDebounceModify","SetMeTime","DeleteME","esm_prov_tunnel","CreateJob","GlobalAlignDownwards","esm_prov_mclag","removeSubnet","MapNodeMovement","ModifyRNEIndex","ViewSwdlJobReport","GetSwPkg","AdvancedConstruct","wdm_performanceManagementFunction","Map_FUNCTION_admin_operator","ManageNEInventory","GetNeList","OSAddressRW","ShowInformations","SwStatusSwdlJob","CreateSwdlSession","RunPerfMon_15m","cpb_operationFunction","AbortSwdlSession","Cmd_MapInv_SbnListNodes","F_DELETE_CUSTOMER_TEMPLATE","esm_prov_npa","InternalCmd_MapView_View_ZoomIn","OSAddressRO","Cmd_TM_FUNCTION_allUser","esm_modify_lag","AlignUpwards","DeleteSwdlNes","InternalCmd_MapView_Obj_Undo","ShowBkupSession","CreateDynamicLink","ViewRestoreJobReport","DisplayRI","PathBuild","ManageMonitoring","ShowRestoreNes","ManageMeTime","SysMonRead","DeleteBkpNes","DeleteSwdlSession","RemoveGeneric","ShowSwdlNes","ManageNTP","Cmd_CLOG_Admin_Header_P","ShowEqp","DeleteBkpSession","DeleteRestoreSession","Cmd_CLOG_Admin_Header","ManageNEPopulation","GetSwStatusByType","ShowIsaSWStatus","MEAddressRW","ActivateSw","esm_generate_pm_report","Cmd_Inv_LinkListConnections","Cmd_MapNav_ObjPtrToNPAview","Cmd_CLOG_Ack","MSGUI_FUNCTION_ADMIN_TOPOLOGICALVIEWS","PingNE","esm_modify_mclag","AbortJob","ManualAlignDownwards","Cmd_MapInv_ListTopLevelNtws_PHY_TOPVIEW","Cmd_MapInv_SbnListChildSbns","ShowRestoreSession","Cmd_MapAct_SbnRemoveNode","F_ADD_CUSTOMER_TEMPLATE","ManageAccess","CreateBkupSessionWizard","CreateRestoreSessionWizard","ShowJobList","MigrateME","OpenJobWizard","Cmd_TM_FUNCTION_admin_construct","SysMonAdmin","ModifyLocation","wdm_neManagementFunction","OpenGetSwByTypeWizard","ModifyME","Cmd_MapNav_OpcToMap","NavigationToELM","Cmd_MapAct_SbnDeleteSbnAndSubordinates","AbortNe","Cmd_CLOG_User_Header","MultipleNeDownload","Cmd_TM_FUNCTION_admin_construct_operator","Admin","AddSwdlNes","saveSubnet","wdm_legacyManagementFunction","GetSwdl","Cmd_MapInv_SbnCreateMap","Cmd_MapAct_SbnCommitMapModify","Cmd_MapInv_ListTopLevelNtws_PHY_Test","esm_prov_customer","physconnInNet","UploadRemoteInventory","GetSwDetail","ActiveJob","NEInventory","ManageResynchro","RunPerfMon_hour","Operations","Cmd_MapWiz_ModifySubnet","AbortBkupSession","GetNeLabel","SetNeTime","InternalCmd_MapView_View_ZoomOut","NavigationToEquipments","ManageFilter","ViewBkupJobReport","DeleteSw","wdm_inventoryFunction","ManageLocalAccess","NetworkAddressRW","esm_job_reschedule","AddBkpNes","SysMonView","esm_design_template","Cmd_MapInv_PopulateSbnsInTree_PHY","addSubnet","InternalCmd_MapView_Obj_Redo","Cmd_MapInv_ShowOpcView","NetworkAddressRO","esm_design_feature","InternalCmd_MapView_View_ZoomBestFit","AlarmDebounceView","EditSwdlJobWizard","CreatePartialSwdlJob","ModifyACD","NavigationToURLs","EMLInventory","viewMap","Cmd_TM_FUNCTION_admin","esm_prov_service","Cmd_MapAct_SbnAddNode","DeleteRestoreNes","ActiveSoftware","NavigationToHostNe","AdvancedViewer","wdm_alarmManagementFunction","AllUsers","CreateSUBNE","ShowSwdlSession","EditJob","ManageSupervision","CreateNE","esm_modify_tunnel","ModifyUserLabel","Cmd_MapWiz_SbnCreateSimple","esm_modify_npa","GetNeType","Cmd_MapAct_DataSynchronize","OpenGetSwByNameWizard","InternalCmd_MapView_View_ChangeBackground","ResynchroAll","RunPerfMon_day","MulitpleNeBackup","ModifyComments","CreatePartialSwdlJobFromNeList","F_MODIFY_CUSTOMER_TEMPLATE","ManageLinkInventory","GetFtServer","esm_modify_service","ManageMib","EditRestorJobWizard","esm_deploy_networkConfig","Cmd_EventParameter","ShowJobStatus","ShowStatuses","addNode","Cmd_CLOG_User_Header_P","NavigationToAlarms","InternalCmd_MapView_Obj_Save","EditBkupJobWizard","InternalCmd_MapView_View_SwitchLayer","CreateRestoreSession","esm_job_delete","esm_prov_erp","Cmd_MapCrt_SbnCreate","Cmd_MapInv_ListObjsForCreateSbn","GetSwStatusByName","CreateSwdlSessionWizard","DirInventory","removeNode","ManageAdmin","esm_prov_lag","DeleteJob","CreateBkupSession","wdm_provisionFunction","ManageClone","modifySubnet","CommitSw","CreateME","ShowAlarms","InternalCmd_MapView_View_NormalSize","CreateRNE","InternalCmd_MapView_View_Miniature","ShowBackupNes","Construct","Cmd_Inv_SbnListPhyconnections"],"nadString":"Voda unknown","userNadRole":"GLOBAL"}
but if i did that :
x = open(r'D:\json.txt').readline()
i get this error :
D:\Python\My Projects\venv\Scripts\python.exe" C:/Users/ahmedabd/AppData/Roaming/JetBrains/PyCharmCE2020.1/scratches/scratch_9.py
C:/Users/ahmedabd/AppData/Roaming/JetBrains/PyCharmCE2020.1/scratches/scratch_9.py:4: FutureWarning: pandas.io.json.json_normalize is deprecated, use pandas.json_normalize instead
df = json_normalize(x)
Traceback (most recent call last):
File "C:/Users/ahmedabd/AppData/Roaming/JetBrains/PyCharmCE2020.1/scratches/scratch_9.py", line 4, in <module>
df = json_normalize(x)
File "D:\Python\My Projects\venv\lib\site-packages\pandas\util\_decorators.py", line 66, in wrapper
return alternative(*args, **kwargs)
File "D:\Python\My Projects\venv\lib\site-packages\pandas\io\json\_normalize.py", line 274, in _json_normalize
if any([isinstance(x, dict) for x in y.values()] for y in data):
File "D:\Python\My Projects\venv\lib\site-packages\pandas\io\json\_normalize.py", line 274, in <genexpr>
if any([isinstance(x, dict) for x in y.values()] for y in data):
AttributeError: 'str' object has no attribute 'values'
Process finished with exit code 1

you can easily do that using pandas:
from pandas.io.json import json_normalize
import json
x = open("test.txt").readline()
x = json.loads(x)
df = json_normalize(x)
df = df.explode('fadList')
df.iloc[1:, 0:3] = ""
df.iloc[1:, 4:] = ""
df.to_csv("file.csv")

Related

Parsing JSON into CSV in Python

I'm trying to parse JSON files into CSV. I've been able to get the headers of the JSON file to be output into the CSV but I can't figure out how to get the data into the file.
# Python program to convert
# JSON file to CSV
import json
import csv
# Opening JSON file and loading the data
# into the variable data
with open('test1.json') as json_file:
data = json.load(json_file)
for i in range(len(data)):
training_data = data[i]['profile']
# now we will open a file for writing
data_file = open('data_file.csv', 'w')
# create the csv writer object
csv_writer = csv.writer(data_file)
# Counter variable used for writing
# headers to the CSV file
count = 0
#type(training_data)
for profile in training_data:
if count == 0:
header = training_data.keys()
csv_writer.writerow(header)
count += 1
csv_writer.writerow(training_data.values())
data_file.close()
This is the file im trying to parse
https://textdoc.co/OuphoV5saiwWYS8g
If someone could help me out I'd be eternally grateful
would something like this work for you?
import pandas as pd
df = pd.read_json(json_file)
df.to_csv('data_file.csv')
or for more complex nested json, you may have to load as a dictionary and manipulate:
data = json.loads(json_str)
data_transformed = [i for i in data['data']]
df = pd.DataFrame(data_transformed )
df.to_csv('data_file.csv')

How can I convert JSON format text to dataframe?

I am trying to convert below JSON format text to pandas or spark data frame, but it is giving below error.
ERROR: JSONDecodeError: Expecting property name enclosed in double quotes: line 1 column 2 (char 1)
Python CODE:
# import pandas to read json file
import json
path = "sample.json"
with open(path, 'r') as myfile:
data=myfile.read()
data = data.replace('\t','')
data = data.replace('\n','')
data = data.replace(',}','}')
data = data.replace(',]',']')
obj = json.loads(data)
JSON file format
Output of data after reading .json file by using open function
How can I convert above text as a data frame?
I got, I added few lines of code
path = "sample.json"
with open(path, 'r') as myfile:
data=myfile.read()
data = data.replace('\t','')
data = data.replace('\n','')
data = data.replace(',}','}')
data = data.replace(',]',']')
data = data.replace("null", "''")
liss = []
data1 = data[1:-1]
data2 = data1.split("},")
for i in data2:
last_value = i[len(i)-1]
if last_value != "}":
new_text = i+"}"
liss.append(new_text)
else:
new_text = i
liss.append(new_text)
sample_df = pd.DataFrame({"Col1":liss})
sample_df["Col1"] = sample_df["Col1"].apply(lambda x : dict(eval(x)) )
df3 = sample_df["Col1"].apply(pd.Series )
df3
I think you can read the json and save it in a dictionary.
Once you have this dictionary you can create a spark dataframe with the following line of code
df = spark.createDataFrame(dict)

iterating new pandas dataframe to a new excel sheet in the same workbook

I'm attempting to create a web scraper that that get information of a similar table (with different values) from multiple pages (though in my case it's scrapping html stored in text documents)
The following code is a snippet from my program where a data frame is made from a previously made list along with the header. It then opens a new excel workbook and writes the data frame to the workbook in a worksheet with a specified name (which changes with each iteration)
import os
from bs4 import BeautifulSoup # imports BeautifulSoup
import pandas # imports pandas
from pandas import ExcelWriter
#creates the lists
list_of_rows = []
list_of_lists = []
list_of_headers = []
for File in range(len(os.listdir())):
if os.listdir()[File].endswith('.txt'):
file = open(os.listdir()[File])
data = file.read()
file.close()
#Converts the text file into something the program can use
soup = BeautifulSoup(data,'lxml')
tables = soup.find_all(class_="overthrow table_container") #Creates a resutset that will show all of the tables with this class name
#grabs from the header
find_header = tables[2].thead
header = find_header.find_all("th")
#grabs from the table
find_table = tables[2].tbody #creates a tag element from the desired table and highlights the tbody section
rows = find_table.find_all("tr") #creates another resultset signle out the elements with a tr ta
#for loop that creates the list for the header
header_list = []
for i in range(len(header)):
if i < 14:
pass
else:
list_of_headers.insert(i,header[i].get_text())
#for loop that creates the lists for data frame table
for j in range(len(rows)):
row_finder = rows[j]
tag_row = row_finder.find_all("td")
for i in range(len(tag_row)):
list_of_rows.insert(i,tag_row[i].get_text())
list_of_lists.append(list_of_rows)
list_of_rows = []
#creates the DataFrame
df = pandas.DataFrame(list_of_lists,columns=list_of_headers)
writer = ExcelWriter('testing document.xlsx', engine='xlsxwriter')
df.to_excel(writer,sheet_name=os.listdir()[File])
writer.save()
df.drop(df.index)
print("worked once")
else:
pass
and I get the following error code.
worked once
Traceback (most recent call last):
File "test3.py", line 47, in <module>
df = pandas.DataFrame(list_of_lists,columns=list_of_headers)
File "C:\Python3.6\lib\site-packages\pandas\core\frame.py", line 314, in __init__
arrays, columns = _to_arrays(data, columns, dtype=dtype)
File "C:\Python3.6\lib\site-packages\pandas\core\frame.py", line 5617, in _to_arrays
dtype=dtype)
File "C:\Python3.6\lib\site-packages\pandas\core\frame.py", line 5696, in _list_to_arrays
coerce_float=coerce_float)
File "C:\Python3.6\lib\site-packages\pandas\core\frame.py", line 5755, in _convert_object_array
'columns' % (len(columns), len(content)))
AssertionError: 56 columns passed, passed data had 28 columns
So between the first line printing the code "this worked once" and the fact that it does, in fact, create the excel file, my guess is the problem is it's not creating a new worksheet? THough this code could not be doing at all what I think it is.

python writing program to iterate a csv file to match field and save the result in a different data file

I am trying to write a program to do the following :
specify a field from a record in a csv file called data.
specify a field from a record in a csv file called log.
compare the position of the two in the data and in the log. If they are on the same line proceed to write the record in the file called log in a new file called result.
If the field does not match the record position in the log file proceed to move to the next record in the log file and compare it until a matching record is found and then the record is saved in the file called result.
reset the index of the log file
go to the next line in the data file and proceed to do the verification until the data file reaches the end.
This is whay i was able to do but i am stuck
import csv
def main():
datafile_csv = open('data.txt')
logfile_csv = open('log.txt')
row_data = []
row_log = []
row_log_temp = []
index_data = 1
index_log = 1
index_log_temp = index_log
counter = 0
data = ''
datareader = ''
logreader = ''
log = ''
# row = 0
logfile_len = sum (1 for lines in open('log.txt'))
with open('resultfile.csv','w') as csvfile:
out_write = csv.writer(csvfile, delimiter=',',quotechar='"')
with open('data.txt','r') as (data):
row_data = csv.reader(csvfile, delimiter=',', quotechar='"')
row_data = next(data)
print(row_data)
with open ('log.txt','r') as (log):
row_log = next(log)
print(row_log)
while counter != logfile_len:
comp_data = row_data[index_data:]
comp_log = row_log[index_log:]
comp_data = comp_data.strip('"')
comp_log = comp_log.strip('"')
print(row_data[1])
print(comp_data)
print(comp_log)
if comp_data != comp_log:
while comp_data != comp_log:
row_log = next(log)
comp_log = row_log[index_log]
out_write.writerow(row_log)
row_data = next(data)
else :
out_write.writerow(row_log)
row_data = next(data)
log.seek(0)
counter +=1
The problem i have are the following :
I cannot convert the data line in a string properly and i cannot compare correctly.
Also i need to be able to reset the pointer in the log file but seek does not seem to be working....
This is the content of the data file
"test1","test2","test3"
"1","2","3"
"4","5","6"
This is the content of the log file
"test1","test2","test3"
"4","5","6"
"1","2","3"
This is what the compiler return me
t
"test1","test2","test3"
t
test1","test2","test3"
test1","test2","test3"
1
1","2","3"
test1","test2","test3"
Traceback (most recent call last):
File "H:/test.py", line 100, in <module>
main()
File "H:/test.py", line 40, in main
comp_log = row_log[index_log]
IndexError: string index out of range
Thank you very much for the help
Regards
Danilo
Joining two files by columns (rowcount and a Specific Column[not defined]), and returning the results limited to the columns of the left/first file.
import petl
log = petl.fromcsv('log.txt').addrownumbers() # Load csv/txt file into PETL table, and add row numbers
log_columns = len(petl.header(log)) # Get the amount of columns in the log file
data = petl.fromcsv('data.txt').addrownumbers() # Load csv/txt file into PETL table, and add row numbers
joined_files = petl.join(log, data, key=['row', 'SpecificField']) # Join the tables using row and a specific field
joined_files = petl.cut(joined_files, *range(1, log_columns)) # Remove the extra columns obtained from right table
petl.tocsv(joined_files, 'resultfile.csv') # Output results to csv file
log.txt
data.txt
resultfile.csv
Also Do not forget to pip install (version used for this example):
pip install petl==1.0.11

Edit CSV file in python which reads values from another json file in python

I wanted to edit a csv file which reads the value from one of my another json file in python 2.7
my csv is : a.csv
a,b,c,d
,10,12,14
,11,14,15
my json file is a.json
{"a":20}
i want my where the column 'a' will try to match in json file. if their is a match. it should copy that value from json and paste it to my csv file and the final output of my csv file should be looks like this.
a,b,c,d
20,10,12,14
20,11,14,15
Till now I what I have tried is
fileCSV = open('a.csv', 'a')
fileJSON = open('a.json', 'r')
jsonData = fileJSON.json()
for k in range(jsonData):
for i in csvRow:
for j in jsonData.keys():
if i == j:
if self.count == 0:
self.data = jsonData[j]
self.count = 1
else:
self.data = self.data + "," + jsonData[j]
self.count = 0
fileCSV.write(self.data)
fileCSV.write("\n")
k += 1
fileCSV.close()
print("File created successfully")
I will be really thankful if anyone can help me for this.
please ignore any syntactical and indentation error.
Thank You.
Some basic string parsing will get you here.. I wrote a script which works for the simple scenario which you refer to.
check if this solves your problem:
import json
from collections import OrderedDict
def list_to_csv(listdat):
csv = ""
for val in listdat:
csv = csv+","+str(val)
return csv[1:]
lines = []
csvfile = "csvfile.csv"
outcsvfile = "outcsvfile.csv"
jsonfile = "jsonfile.json"
with open(csvfile, encoding='UTF-8') as a_file:
for line in a_file:
lines.append(line.strip())
columns = lines[0].split(",")
data = lines[1:]
whole_data = []
for row in data:
fields = row.split(",")
i = 0
rowData = OrderedDict()
for column in columns:
rowData[columns[i]] = fields[i]
i += 1
whole_data.append(rowData)
with open(jsonfile) as json_file:
jsondata = json.load(json_file)
keys = list(jsondata.keys())
for key in keys:
value = jsondata[key]
for each_row in whole_data:
each_row[key] = value
with open(outcsvfile, mode='w', encoding='UTF-8') as b_file:
b_file.write(list_to_csv(columns)+'\n')
for row_data in whole_data:
row_list = []
for ecolumn in columns:
row_list.append(row_data.get(ecolumn))
b_file.write(list_to_csv(row_list)+'\n')
CSV output is not written to the source file but to a different file.
The output file is also always truncated and written, hence the 'w' mode.
I would recommend using csv.DictReader and csv.DictWriter classes which will read into and out of python dicts. This would make it easier to modify the dict values that you read in from the JSON file.

Categories

Resources