I am looking to take a list that contains data in it and append it so that it will save next time i execute the code. So far i have this that works, but when i execute the code again it does not include the new data that was entered. Any suggestions?
def the_list():
data = ['data1', 'data2', 'data3 ' , 'data4', 'data5' ]
for i in data:
print (i)
print (' would you like to add')
a = input()
if a == ('yes'):
b = input()
data.append(b)
print (data)
the_list()
If you mean to save data across executions, everything in memory of the "live" program gets discarded at the end of execution, so you must save data to a file (or some other persistent medium), then read it back in.
You can save to a file as simple strings, or you can use pickle to easily serialize objects like lists.
Using simple strings
Wrap your program with code to load and save data to a file:
data=[]
try:
old_data= open("save_data", "r").read().split('\n')
data.extend(old_data)
except:
print ("Unable to load old data!")
data = ['data1', 'data2', 'data3 ' , 'data4', 'data5' ]
#YOUR PROGRAM HERE
try:
with open("save_data", "w") as out:
out.write('\n'.join(data))
except:
print ("Unable to write data!")
Of course, you have to do some work to prevent having duplicates in data if they must not appear.
Using pickle
import pickle
data_fn = "save_data"
data=[]
try:
with open(data_fn, "rb") as data_in:
old_data = pickle.load(data_in)
data.extend(old_data)
except:
print ("Unable to load last data")
data = ['data1', 'data2', 'data3 ' , 'data4', 'data5' ]
#REST OF YOUR PROGRAM
try:
with open(data_fn, "w") as data_out:
pickle.dump(data, data_out)
except:
print ("Unable to save data")
def the_list():
try:
with open('data.txt', 'r') as f:
data = [line.strip() for line in f]
except:
data = []
for i in data:
print(i)
print (' would you like to add')
a = raw_input()
if a == 'yes':
b = input()
data.append(b)
print (data)
with open('data.txt', 'r') as nf:
nf.write('\n'.join(data))
the_list()
but when i execute the code again it does not include the new data
that was entered
Of course, because the list is in memory. You would have to save it to a file or to a database in order for it to persist.
For example, read about Input and Output in python.
Related
I am reading some .json files. Some of the files are empty. So I am using try-except block to read.
If it is empty then except condition will be executed. My code snippet looks like this:
exec = True
for jsonidx, jsonpath in enumerate(jsonList):
print("\n")
print(">>> Reading {} of {} json file: {}".format(jsonidx, len(jsonList), os.path.basename(jsonpath)))
try:
with open(jsonpath, "r") as read_file:
jsondata = json.load(read_file)
outPath = r'{}'.format(outPath)
dicomPath = os.path.join(outPath, 'dicoms')
nrrdPath = os.path.join(outPath, 'nrrds')
if exec: # if you want to execute the move
if not os.path.isdir(outPath): # if the outPath directory doesn't exist.
os.mkdir(outPath)
os.mkdir(dicomPath)
os.mkdir(nrrdPath)
thisJsonDcm = []
for widx, jw in enumerate(jsondata['workItemList']):
# print('\n')
print('-------------------- Extracting workitem #{} --------------------'.format(widx))
seriesName = jw['imageSeriesSet'][0]['seriesLocalFolderName'] # this is dicom folder whole path
thisJsonDcm.append(seriesName)
except:
print("Json empty")
The code ran perfectly at the first couple of times or so, where it iterates the second for loop with jsondata["workItemList"].
But when I run later again, the second for loop doesn't iterate and all the iterations show the print statement inside except json empty.
Does try-except block have any state or specific behavior?? Do I need to delete or refresh something after running the first time to repeat again?
All you need is json.decoder.JSONDecodeError exception.
It looks like this:
try:
pass
"""Some stuff with json..."""
except json.decoder.JSONDecodeError:
print("Json empty")
More about in Json Docs
Or you can handle error only when loading json
exec = True
for jsonidx, jsonpath in enumerate(jsonList):
print("\n")
print(">>> Reading {} of {} json file: {}".format(jsonidx, len(jsonList), os.path.basename(jsonpath)))
with open(jsonpath, "r") as read_file:
try:
jsondata = json.load(read_file)
except json.decoder.JSONDecodeError:
print("Json empty")
continue
outPath = r'{}'.format(outPath)
dicomPath = os.path.join(outPath, 'dicoms')
nrrdPath = os.path.join(outPath, 'nrrds')
if exec: # if you want to execute the move
if not os.path.isdir(outPath): # if the outPath directory doesn't exist.
os.mkdir(outPath)
os.mkdir(dicomPath)
os.mkdir(nrrdPath)
thisJsonDcm = []
for widx, jw in enumerate(jsondata['workItemList']):
# print('\n')
print('-------------------- Extracting workitem #{} --------------------'.format(widx))
seriesName = jw['imageSeriesSet'][0]['seriesLocalFolderName'] # this is dicom folder whole path
thisJsonDcm.append(seriesName)
You can read more about try/except in Python Documentation
I have below code
import requests, json
r = requests.get('http://someurl.com/api')
data = r.json()
flights = []
for flight in data:
# print(flight['identifier'])
if flight['STDudt'].strip() < flight['ETDudt'].strip() and flight['OUTudt'].strip() == '':
flights.append(flight)
print(flights)
How can I convert it to csv file with keys of object as column names?
output i get from code looks like this:
[{'numGMTDate': '20180515', 'STDudt': '1405', 'STAudt': '1545', 'ETDudt': '1415', 'OUTudt': '', 'OFFudt': ''},{'numGMTDate': '20180515', 'STDudt': '1530', 'STAudt': '1831', 'ETDudt': '1540', 'OUTudt': '', 'OFFudt': ''}]
One way that I know to go about this problem requires a bit of work,
first save the python code to a place where you can easily set up a file.
Now I am pretty lengthy with this and there are probably other ways but.
outfile = open ("STRING NAME.csv","w")
Will create the CSV file by "String Name" the .csv will make the file easier to open
outfile.write(value)
outfile.write(",")
will separate everything by commas
and at the end of each line that you want to write
outfile.write("\n")
will make a new line for the csv
outfile.close()
to finish it off
hope this helps
example of where I have used before
while h<=math.sqrt(float(d)):
if float(d/(h*h))!=float(int(d/(h*h))):
h=h+1
else:
j=h
h=h+1
if j==int(1) or int(d/(j*j))==1:
a=a
else:
if j/2==int(j/2):
a=a
else:
outfile.write(str(int(b)))
outfile.write(",")
outfile.write(str(int(c)))
outfile.write(",")
outfile.write(str(int(d)))
outfile.write(",")
outfile.write(str(int(g)))
outfile.write(",")
outfile.write(str(int(j)))
outfile.write(",")
outfile.write(str(int(d/(j*j))))
outfile.write(",")
print("b=",int(b)," and c=",c)
print(float(d)," has a square root of ",float(j)," rad ",float(d/(j*j)))
a=a+1
if a==16:
z=input("SO....")
if z==str("good"):
a=16
else:
a=1
print("Fui")
outfile.write("Bad")
outfile.write("\n")
while a==16 and p<10:
outfile.write("\n")
p=p+1
print("new line")
a=1
outfile.close()
I'm almost there based on code found on suggested duplicate. Only problem is it leaves a blank row between each row.
import requests, json
r = requests.get('http://someurl.com/api')
data = r.json()
flights = []
for flight in data:
# print(flight['identifier'])
if flight['STDudt'].strip() < flight['ETDudt'].strip() and flight['OUTudt'].strip() == '':
flights.append(flight)
keys = flights[0].keys()
with open('filteredData.csv', 'w') as output_file:
dict_writer = csv.DictWriter(output_file, keys)
dict_writer.writeheader()
dict_writer.writerows(flights)
Im a python noob to get that out of the way and I am writing these functions using the OOP method just a FYI. My save_roster function works correctly, it saves all of the dictionary player roster to my text file 'roster'. I confirmed this by looking in the text file making sure it is all there. Now when I go to load_roster function it only loads the first key and value and none of the rest and I cant figure out why. Any help as to how I can load the entire dictionary or what I am doing wrong would be greatly appreciated.
def save_roster(player_roster):
print("Saving data...")
file = open("roster.txt", "wt")
import csv
w = csv.writer(open("roster.txt", "w"))
for key, val in player_roster.items():
w.writerow([key, val])
print("Data saved.")
file.close()
def load_roster(player_roster):
print("Loading data...")
import csv
file = open("roster.txt", "rt")
for key, val in csv.reader(file):
player_roster[key] = eval(val)
file.close()
print("Data Loaded Successfully.")
return (player_roster)
Your return (player_roster) statement is inside of the for loop, which means it only reads the first line before returning. You need to put the statement outside the loop like so:
def load_roster(player_roster):
print("Loading data...")
import csv
file = open("roster.txt", "rt")
for key, val in csv.reader(file):
player_roster[key] = eval(val)
file.close()
print("Data Loaded Successfully.")
return (player_roster)
So here is the code, it's in loop but storing very last record only:
try:
with open('records.csv', 'a') as csv_file:
for url in urls:
if url.strip() != '':
print('Processing URL:- '+url)
data = fetch_data(url)
csv_file.write(data)
status = 'OK'
csv_file.close()
except Exception,e:
status = 'FAIL'
print str(e)
finally:
with open('process.log', 'a') as the_file:
ts = time.strftime("%H:%M")
the_file.write(ts+'\t'+url+'\t'+status+'\n')
if driver is not None:
driver.quit()
records.csv saves only last record
file.write does not add newlines to the file, so it's likely that it's processing everything but only writing one extra line to the file.
Some nitpicks:
You don't need to do csvfile.close() since you're using a with block.
You don't need to set status="OK" in every iteration of the loop, just once above the loop.
It might be faster to process data separately from I/O, as #bruno suggests below.
datalines = []
for url in urls:
if url.strip():
datalines.append(fetch_data(url))
csvfile.write("\n".join(datalines))
I have a list of strings to write in a field in CSV. I read this list in another program. However when I read it back , it is read as a string and not list. Code so far
import csv
import codecs
def write_csv(outputcsv, rows, fieldnames):
try:
restval=""
extrasaction="ignore"
dialect="excel"
outputfile = codecs.open(outputcsv + ".csv",'w')
csv_writer = csv.DictWriter(outputfile, fieldnames, restval, extrasaction, dialect, quoting=csv.QUOTE_NONNUMERIC)
csv_writer.writeheader()
csv_writer.writerows(rows)
outputfile.close()
except csv.Error :
print "outputcsverror"
def read_csv(inputcsv):
try:
inputfile = codecs.open(inputcsv+ ".csv",'r')
result = list(csv.DictReader(inputfile))
return result
except csv.Error:
print "inputcsverror"
if __name__ == '__main__':
rowdict=dict()
Allrows=list()
rowdict["stringlist"]=["Abc", "Def"]
Allrows.append(rowdict)
fieldnames=["stringlist"]
outputcsv="teststrings"
write_csv(outputcsv, Allrows, fieldnames)
listRows=read_csv(outputcsv)
print listRows[0]["stringlist"][0]
When I access listRows[0]["stringlist"], it returns a string.
The above print gives
[
as compared to Abc
Your ["Abc", "Def"] are in the same field under the name "stringlist". If you want the list result. Try
your_list = listRows[0]["stringlist"].split(',')