I'm making an API call and I'm getting an None value as I loop through my file.
I would place my csv in here, but it's over 100k records.
My code:
import csv
import xml.etree.ElementTree as ET
import xml.dom.minidom
import requests
import json
import pandas as pd
from pandas.io.json import json_normalize
#Storing results
api_results = []
error_results = []
none_results = []
print('Starting XML loop')
with open("C:/Users/template_v2.csv") as f:
reader = csv.DictReader(f)
# WRITING TO XML NODES
for i, row in enumerate(reader, start=1):
# INITIALIZING XML FILE
root = ET.Element('cbcalc')
icdNode = ET.SubElement(root, "icdcodes")
#handling first level ICD codes
for code in row['D'].split('~'):
ET.SubElement(icdNode, "code").text = code
#handling Client ID, state, country, age, job class and output
ET.SubElement(root, "clientid").text = row['CLAIM_NUM']
ET.SubElement(root, "state").text = row['BEN_ST']
ET.SubElement(root, "country").text = "US"
ET.SubElement(root, "age").text = row['AGE']
ET.SubElement(root, "jobclass").text = "1"
ET.SubElement(root, "fulloutput").text ="Y"
#handling the cfactors:
cfNode = ET.SubElement(root, "cfactors")
for k in ['legalrep', 'depression', 'diabetes',
'hypertension', 'obesity', 'smoker', 'subabuse']:
ET.SubElement(cfNode, k.lower()).text = str(row[k])
psNode = ET.SubElement(root, "prosummary")
psicdNode = ET.SubElement(psNode, "icd")
for code in row['P'].split('~'):
ET.SubElement(psNode, "code").text = code
psndcNode = ET.SubElement(psNode, "ndc")
for code in row['NDC_codes'].split('~'):
ET.SubElement(psNode, "code").text = code
cptNode = ET.SubElement(psNode, "cpt")
for code in row['CPT_codes'].split('~'):
ET.SubElement(cptNode, "code").text = code
ET.SubElement(psNode, "hcpcs")
doc = ET.tostring(root, method='xml', encoding="UTF-8")
response = requests.post(target_url, data=doc, headers=login_details)
response_data = json.loads(response.text)
if type(response_data)==dict and 'error' in response_data.keys():
error_results.append(response_data)
elif response_data == None or response_data == '':
none_results.append(response_data)
else:
api_results.append(response_data)
print('creating dataframe')
strategic_df = pd.json_normalize(api_results)
print("Writing out csv file")
strategic_df.to_csv(r'C:\Users\_dataframe2.csv', index = False, header=True)
Here is my error message:
Traceback (most recent call last):
File "c:\Users\Python\sc_ras_api.py", line 66, in <module>
response_data = json.loads(response.text)
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\json\__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\json\decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\json\decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
What did I break and how do I fix it?
Related
So I was making a poll/voting system for fun using JSON, and I encountered this error when I was coding it:
Traceback (most recent call last):
File "C:\Users\ooich\PycharmProjects\16dec2022\main.py", line 48, in <module>
poll()
File "C:\Users\ooich\PycharmProjects\16dec2022\main.py", line 32, in poll
fp = json.load(f)
File "C:\Python310\lib\json\__init__.py", line 293, in load
return loads(fp.read(),
File "C:\Python310\lib\json\__init__.py", line 346, in loads
return _default_decoder.decode(s)
File "C:\Python310\lib\json\decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "C:\Python310\lib\json\decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
I don't really understand 100% of what it means, given I have not touched code for over ~6 months.
Here's my code:
import json
def poll():
with open("poll.json", "r") as f:
fp = json.load(f)
le = fp.keys()
print(f"What would you like to vote?\n {le}")
i = input("Please write: ")
if fp.get(i) is None:
print("Key not found.")
return False
with open("poll.json", "w") as fe:
fep = json.load(fe)
fep[i] += 1
json.dump(fep, fe)
print("Voted!")
return True
poll()
Please do let me know if I missed something or you know how to solve it. Thanks.
I am guessing your json file is empty the same error as you when my json fill "poll.json" is empty.
with this json
poll.json
{"plop": 0}
and this python
import json
def poll():
with open("poll.json", "r") as f:
fp = json.load(f)
le = fp.keys()
print(f"What would you like to vote?\n {le}")
i = input("Please write: ")
if fp.get(i) is None:
print("Key not found.")
return False
with open("poll.json", "w") as f:
fp[i] += 1
json.dump(fp, f)
print("Voted!")
return True
poll()
result for ok
What would you like to vote?
dict_keys(['plop'])
Please write: plop
Voted!
result if no key present in json
What would you like to vote?
dict_keys(['plop'])
Please write: not
Key not found.
Im learning how to update, write and read json files in python.
When I update my json file with exception handling it gives an error:
Exception in Tkinter callback Traceback (most recent call last):
File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/tkinter/__init__.py",
line 1921, in __call__
return self.func(*args) File "/Users/montekkundan/Downloads/coding/python/password-manager/main.py",
line 53, in save
data = json.load(data_file) File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/json/__init__.py",
line 293, in load
return loads(fp.read(), File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/json/__init__.py",
line 346, in loads
return _default_decoder.decode(s) File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/json/decoder.py",
line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end()) File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/json/decoder.py",
line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
Process finished with exit code 0
python function:
def save():
website = website_entry.get()
email = email_entry.get()
password = password_entry.get()
new_data = {
website: {
"email": email,
"password": password,
}
}
if len(website) == 0 or len(password) == 0:
messagebox.showerror(title="Oops!", message="Please make sure you haven't left any fields empty.")
else:
try:
with open("data.json", "r") as data_file:
# Reading old data
data = json.load(data_file)
except FileNotFoundError:
with open("data.json", "w") as data_file:
json.dump(new_data, data_file, indent=4)
else:
# Updating old data with new data
data.update(new_data)
with open("data.json", "w") as data_file:
# Saving updated data
json.dump(data, data_file, indent=4)
finally:
website_entry.delete(0, END)
password_entry.delete(0, END)
Check what you have in file - it seems it is empty.
And empty file/string is uncorrect JSON and it raises error.
You should rather create new empty dict data when it can't find file or it has problem to read it.
try:
with open("data.json", "r") as data_file:
# Reading old data
data = json.load(data_file)
except FileNotFoundError:
print("Problem: FileNotFoundError")
data = dict()
except json.JSONDecodeError:
print("Problem: JSONDecodeError")
data = dict()
finally:
# --- always ---
data.update(new_data)
with open("data.json", "w") as data_file:
# Saving updated data
json.dump(data, data_file, indent=4)
website_entry.delete(0, END)
password_entry.delete(0, END)
This script is designed to convert a CSV to JSON for user with Auth0, and was previously working until I moved to a new machine - after multiple python upgrades, module installs, and attempted fixes, I've reached the end of my bug-whacking prowess.
import csv, json, bcrypt, sys
csvPath = sys.argv[1]
jsonPath = sys.argv[2]
data = []
f = open( csvPath, 'r' )
reader = csv.DictReader( f, fieldnames = ( "name","email","password" ))
next(reader)
for row in reader:
entry = {}
sub1 = {}
sub2 = {}
pwd = row['password']
password = pwd.encode('utf-8')
salt = bcrypt.gensalt(rounds=10)
sub2['value'] = bcrypt.hashpw(password, salt)
entry['name'] = row['name']
entry['email'] = row['email']
entry['email_verified'] = True
sub1['algorithm'] = 'bcrypt'
sub1['hash'] = sub2
entry['custom_password_hash'] = sub1
data.append(entry)
out = json.dumps( data )
print ("JSON parsed!")
f = open( jsonPath , 'w')
f.write(out)
print ("JSON saved")
I was initially getting a TypeError: Unicode-objects must be encoded before hashing error, which prompted me to add the .encode on line 16.
That changed the error to this:
Traceback (most recent call last):
File "python-auth0.py", line 28, in <module>
out = json.dumps( data )
File "/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.8/lib/python3.8/json/__init__.py", line 231, in dumps
return _default_encoder.encode(obj)
File "/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.8/lib/python3.8/json/encoder.py", line 199, in encode
chunks = self.iterencode(o, _one_shot=True)
File "/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.8/lib/python3.8/json/encoder.py", line 257, in iterencode
return _iterencode(o, 0)
File "/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.8/lib/python3.8/json/encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type bytes is not JSON serializable
I attempted to add a decode attribute to 'data' in line 28, which led to AttributeError: 'list' object has no attribute 'decode'
So I'm clearly just making wild stabs in the dark here - any help to get this working again would be appreciated!
I think what you can do is that you decode the hashed back into a python string
import json, bcrypt
pwd = "mypassword"
password = pwd.encode('utf-8')
salt = bcrypt.gensalt(rounds=10)
hashed_pwd = bcrypt.hashpw(password, salt)
decoded_hash = hashed_pwd.decode('utf-8')
# this will raise exception
print(json.dumps(hashed_pwd))
# this will not
print(json.dumps(decoded_hash))
in your case, try:
import csv, json, bcrypt, sys
csvPath = sys.argv[1]
jsonPath = sys.argv[2]
data = []
f = open( csvPath, 'r' )
reader = csv.DictReader( f, fieldnames = ( "name","email","password" ))
next(reader)
for row in reader:
entry = {}
sub1 = {}
sub2 = {}
pwd = row['password']
password = pwd.encode('utf-8')
salt = bcrypt.gensalt(rounds=10)
sub2['value'] = bcrypt.hashpw(password, salt).decode('utf-8') # <-- here
entry['name'] = row['name']
entry['email'] = row['email']
entry['email_verified'] = True
sub1['algorithm'] = 'bcrypt'
sub1['hash'] = sub2
entry['custom_password_hash'] = sub1
data.append(entry)
out = json.dumps( data )
print ("JSON parsed!")
f = open( jsonPath , 'w')
f.write(out)
print ("JSON saved")
Hello I am reading and save a dict of dicts in JSON format, but when I use the json load I get this error.I still need to figure out what problem is it, what's the problem there? Thank you!!
JSON example:
data = {'multiplayer.it': {'news1.it': (title,date), 'news2.it': (title,date)},
'site2.it':{'news2.it':(title,date), 'news3.it': (title,date)}}
#tasks.loop(minutes=30)
async def get_gamesnews():
sites = ['https://multiplayer.it/articoli/notizie/']
for site in sites:
async with aiohttp.ClientSession() as ses:
async with ses.get(site) as response:
if response.status == 200:
text = await response.text()
soup = BeautifulSoup(text, 'lxml')
if site == 'https://multiplayer.it/articoli/notizie/':
div_news = soup.find_all('div', class_='media-body')
for news in div_news:
titles = news.find_all('a', class_='text-decoration-none')
for title in titles:
title_news = title.text.strip()
link_news = 'https://multiplayer.it' + title['href']
with open('dictionary_news.json', 'r+') as f:
dict_news = json.load(f)
dictvalues_news = dict_news.get('multiplayer.it')
if link_news not in dictvalues_news:
date_news = datetime.date.today().strftime('%Y-%m-%d')
dict_news['multiplayer.it'][link_news] = (title_news, date_news)
print((title_news, link_news, date_news))
channel = client.get_channel(855220263917191228)
await channel.send(f'{title_news} {link_news}')
json.dump(dict_news, f)
That's how I create the json file:
import json
data = {'multiplayer.it': {}}
with open('dictionary_news.json', 'w') as fp:
json.dump(data, fp)
Traceback:
Unhandled exception in internal background task 'get_gamesnews'.
Traceback (most recent call last):
File "C:\Users\Thund\AppData\Local\Programs\Python\Python39\lib\site-packages\discord\ext\tasks\__init__.py", line 101, in _loop
await self.coro(*args, **kwargs)
File "C:\Users\Thund\Desktop\RepoBitbucket\DiscordBot\main.py", line 75, in get_gamesnews
dict_news = json.load(f)
File "C:\Users\Thund\AppData\Local\Programs\Python\Python39\lib\json\__init__.py", line 293, in load
return loads(fp.read(),
File "C:\Users\Thund\AppData\Local\Programs\Python\Python39\lib\json\__init__.py", line 346, in loads
return _default_decoder.decode(s)
File "C:\Users\Thund\AppData\Local\Programs\Python\Python39\lib\json\decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "C:\Users\Thund\AppData\Local\Programs\Python\Python39\lib\json\decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
The problem is that you're running json.dump(), after which json.load() can't be executed any more without reopening the file (at least to my knowledge, there might be workarounds).
Also remember that "r+" open the file in append mode, so that code would probably cause other problems latter.
I would recommend changing the code like this:
with open('dictionary_news.json', 'r') as f:
dict_news = json.load(f)
for title in titles:
title_news = title.text.strip()
link_news = 'https://multiplayer.it' + title['href']
dictvalues_news = dict_news.get('multiplayer.it')
if link_news not in dictvalues_news:
date_news = datetime.date.today().strftime('%Y-%m-%d')
dict_news['multiplayer.it'][link_news] = (title_news, date_news)
print((title_news, link_news, date_news))
channel = client.get_channel(855220263917191228)
await channel.send(f'{title_news} {link_news}')
with open('dictionary_news.json', 'w') as f:
json.dump(dict_news, f)
Since json.dump() writes the entire json, you should use mode 'w' for writing to the file.
I've got class for uploading my csv files with holidays to my fullcalendar. It looks like this:
class UploadVacationsView(APIView):
def put(self, request, *args, **kwargs):
try:
# check file type
mime = MimeTypes()
url = urllib.pathname2url(request.FILES['file']._name)
mime_type = mime.guess_type(url)
if 'text/csv' not in mime_type:
raise APIException(code=400, detail='File type must be CSV')
vacations_list =[]
csv_file = StringIO(request.data.get('file', None).read().decode('utf-8'))
user_tz = pytz.timezone(request.user.common_settings.time_zone)
schedule_file = ScheduleFile.objects.create(user=request.user)
instance_hebcal = HebcalService()
events = instance_hebcal.process_csv(csv_file, user_tz)
...
And in the other class, I've got a method that works with csv files:
class HebcalService(...):
def process_csv(self, csv_file, user_tz):
events = []
csv_input = csv.reader(csv_file.readlines(), dialect=csv.excel)
curr_row = 1
start_date = None
end_date = None
start_name = None
holiday_name = ''
last_event = {'subject': '',
'date': '',
}
for row in list(csv_input)[1:]:
subject, date, time, _, _, _, _ = row[:7]
curr_row += 1
row = [unicode(cell.strip(), 'utf-8') for cell in row]
if 'lighting' in subject and not start_date:
start_date = user_tz.localize(format_datetime(date, time))
if date == last_event['date']:
start_name = last_event['subject']
Everything is ok when working with english holiday's names but when I encounter hebrew names it shots an error:
Traceback (most recent call last):
File "/home/stas/work/vacation/vmode/apps/marketplaces/base/api/views.py", line 47, in put
events = instance_hebcal.process_csv(csv_file, user_tz)
File "/home/stas/work/vacation/vmode/apps/marketplaces/base/services/hebcal.py", line 106, in process_csv
for row in list(csv_input)[1:]:
UnicodeEncodeError: 'ascii' codec can't encode characters in position 19-23: ordinal not in range(128)
I've read about making all strings to unicode but don't understand where it gets that default ASCII encoding, how can I handle it and save string with holiday_name from csv file?