Python Matplotlib Plot Hourly Data Saved in Defaultdict Line in File - python
I have a file that contains a few thousand lines of data that looks like this:
defaultdict(<type 'int'>, {'2012021310': 76422, '2012021311': 94188, '2012021323': 139363, '2012021312': 111817, '2012021307': 71316, '2012021306': 82418, '2012021305': 65217, '2012021313': 127002, '2012021314': 141099, '2012021315': 147830, '2012021316': 136330, '2012021317': 122252, '2012021318': 118619, '2012021319': 115763, '2012021322': 137658, '2012021321': 130022, '2012021320': 121393, '2012021309': 69406, '2012021308': 66833})
defaultdict(<type 'int'>, {'2012021408': 139745, '2012021409': 143658, '2012021414': 288286, '2012021418': 31216, '2012021416': 268214, '2012021400': 207878, '2012021401': 269296, '2012021402': 270258, '2012021403': 275882, '2012021404': 232521, '2012021405': 195062, '2012021406': 166669, '2012021407': 142855, '2012021417': 245582, '2012021411': 194360, '2012021413': 262078, '2012021410': 158954, '2012021415': 296457, '2012021412': 237083})
defaultdict(<type 'int'>, {'2012021523': 676350, '2012021522': 670147, '2012021521': 650984, '2012021520': 617401, '2012021501': 170448, '2012021503': 246600, '2012021502': 250013, '2012021505': 363866, '2012021504': 300809, '2012021507': 333080, '2012021506': 370454, '2012021509': 343671, '2012021508': 330452, '2012021512': 549736, '2012021513': 622690, '2012021510': 387871, '2012021511': 456171, '2012021516': 647559, '2012021517': 600969, '2012021514': 692257, '2012021515': 706377, '2012021518': 579669, '2012021519': 587969})
defaultdict(<type 'int'>, {'2012021608': 333986, '2012021609': 344126, '2012021602': 651692, '2012021603': 676458, '2012021600': 664484, '2012021601': 686408, '2012021620': 932692, '2012021621': 1065501, '2012021604': 589033, '2012021605': 465191, '2012021623': 1316907, '2012021606': 389669, '2012021607': 342613, '2012021619': 828190, '2012021618': 617836, '2012021622': 1111334, '2012021611': 467532, '2012021610': 387220, '2012021613': 634585, '2012021612': 560227, '2012021615': 718498, '2012021614': 704008, '2012021617': 606396, '2012021616': 665030})
The keys are dates/hours like '2012021310' is "02-13-2012 10am" the values are simply counts of an event that occurred.
I'm have a decent understanding for plotting with matplotlib however I'm not sure how to address the data. Is there an easy way to read in each of these lines and address them in a plot similar to the way I would use csv2rec to read in two columns? Or is there a better way?
edit:
This replaces defaultdict(, with "data ="
import fileinput
filein = 'list.txt'
for line in fileinput.input([filein]):
line = line.replace("defaultdict(<type 'int'>,", "data =")
line = line.replace(")", " ")
print line
this results in:
data = {'2012021310': 76422, '2012021311': 94188, '2012021323': 139363, '2012021312': 111817, '2012021307': 71316, '2012021306': 82418, '2012021305': 65217, '2012021313': 127002, '2012021314': 141099, '2012021315': 147830, '2012021316': 136330, '2012021317': 122252, '2012021318': 118619, '2012021319': 115763, '2012021322': 137658, '2012021321': 130022, '2012021320': 121393, '2012021309': 69406, '2012021308': 66833}
data = {'2012021408': 139745, '2012021409': 143658, '2012021414': 288286, '2012021418': 31216, '2012021416': 268214, '2012021400': 207878, '2012021401': 269296, '2012021402': 270258, '2012021403': 275882, '2012021404': 232521, '2012021405': 195062, '2012021406': 166669, '2012021407': 142855, '2012021417': 245582, '2012021411': 194360, '2012021413': 262078, '2012021410': 158954, '2012021415': 296457, '2012021412': 237083}
data = {'2012021523': 676350, '2012021522': 670147, '2012021521': 650984, '2012021520': 617401, '2012021501': 170448, '2012021503': 246600, '2012021502': 250013, '2012021505': 363866, '2012021504': 300809, '2012021507': 333080, '2012021506': 370454, '2012021509': 343671, '2012021508': 330452, '2012021512': 549736, '2012021513': 622690, '2012021510': 387871, '2012021511': 456171, '2012021516': 647559, '2012021517': 600969, '2012021514': 692257, '2012021515': 706377, '2012021518': 579669, '2012021519': 587969}
data = {'2012021608': 333986, '2012021609': 344126, '2012021602': 651692, '2012021603': 676458, '2012021600': 664484, '2012021601': 686408, '2012021620': 932692, '2012021621': 1065501, '2012021604': 589033, '2012021605': 465191, '2012021623': 1316907, '2012021606': 389669, '2012021607': 342613, '2012021619': 828190, '2012021618': 617836, '2012021622': 1111334, '2012021611': 467532, '2012021610': 387220, '2012021613': 634585, '2012021612': 560227, '2012021615': 718498, '2012021614': 704008, '2012021617': 606396, '2012021616': 665030}
I'm still not sure where to go from here
edit:
I've gotten much closer with a hybrid of everyone's suggestions:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from pylab import *
from matplotlib.mlab import csv2rec
filein = 'list.txt'
output_name = "image.png"
dicts = []
line = ""
for line in fileinput.input([filein]):
line = line.replace("defaultdict(<type 'int'>,", "data = ")
line = line.replace(")", "")
line = line.strip()
exec(line)
objects = data.items()
print objects
which returns:
[('2012021307', 71316), ('2012021306', 82418), ('2012021305', 65217), ('2012021309', 69406), ('2012021310', 76422), ('2012021311', 94188), ('2012021312', 111817), ('2012021313', 127002), ('2012021314', 141099), ('2012021315', 147830), ('2012021316', 136330), ('2012021317', 122252), ('2012021318', 118619), ('2012021319', 115763), ('2012021308', 66833), ('2012021321', 130022), ('2012021320', 121393), ('2012021323', 139363), ('2012021322', 137658)]
[('2012021408', 139745), ('2012021409', 143658), ('2012021403', 275882), ('2012021418', 31216), ('2012021400', 207878), ('2012021416', 268214), ('2012021402', 270258), ('2012021414', 288286), ('2012021404', 232521), ('2012021405', 195062), ('2012021406', 166669), ('2012021407', 142855), ('2012021417', 245582), ('2012021411', 194360), ('2012021401', 269296), ('2012021413', 262078), ('2012021410', 158954), ('2012021415', 296457), ('2012021412', 237083)]
[('2012021523', 676350), ('2012021522', 670147), ('2012021521', 650984), ('2012021520', 617401), ('2012021501', 170448), ('2012021503', 246600), ('2012021502', 250013), ('2012021505', 363866), ('2012021504', 300809), ('2012021507', 333080), ('2012021506', 370454), ('2012021509', 343671), ('2012021508', 330452), ('2012021512', 549736), ('2012021513', 622690), ('2012021510', 387871), ('2012021511', 456171), ('2012021516', 647559), ('2012021517', 600969), ('2012021514', 692257), ('2012021515', 706377), ('2012021518', 579669), ('2012021519', 587969)]
[('2012021605', 465191), ('2012021608', 333986), ('2012021609', 344126), ('2012021602', 651692), ('2012021603', 676458), ('2012021600', 664484), ('2012021601', 686408), ('2012021606', 389669), ('2012021607', 342613), ('2012021622', 1111334), ('2012021623', 1316907), ('2012021620', 932692), ('2012021621', 1065501), ('2012021619', 828190), ('2012021618', 617836), ('2012021604', 589033), ('2012021611', 467532), ('2012021610', 387220), ('2012021613', 634585), ('2012021612', 560227), ('2012021615', 718498), ('2012021614', 704008), ('2012021617', 606396), ('2012021616', 665030)]
[('2012021605', 465191), ('2012021608', 333986), ('2012021609', 344126), ('2012021602', 651692), ('2012021603', 676458), ('2012021600', 664484), ('2012021601', 686408), ('2012021606', 389669), ('2012021607', 342613), ('2012021622', 1111334), ('2012021623', 1316907), ('2012021620', 932692), ('2012021621', 1065501), ('2012021619', 828190), ('2012021618', 617836), ('2012021604', 589033), ('2012021611', 467532), ('2012021610', 387220), ('2012021613', 634585), ('2012021612', 560227), ('2012021615', 718498), ('2012021614', 704008), ('2012021617', 606396), ('2012021616', 665030)]
So I'm definitely closer:
edit:
And further yet I have:
import fileinput
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from pylab import *
from matplotlib.mlab import csv2rec
filein = 'list.txt'
output_name = "image.png"
dicts = []
times = []
values = []
line = ""
for line in fileinput.input([filein]):
line = line.replace("defaultdict(<type 'int'>,", "data = ")
line = line.replace(")", "")
line = line.strip()
exec(line)
for k in sorted(data.iterkeys()):
times.append(k)
values.append(data[k])
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(times, values)
hours = mdates.HourLocator()
fmt = mdates.DateFormatter('%Y - %M:%D:%H')
ax.xaxis.set_major_locator(hours)
ax.xaxis.set_major_formatter(fmt)
fig.autofmt_xdate(bottom=0.2, rotation=90, ha='left')
ax.grid()
plt.savefig(output_name)
This gets me to the point where I've generated a plot at least. The issue now is that x access doesn't generate labels properly from the data being input. I believe that this is due to the somewhat unconventional time-stamp. Any last suggestions?
I would find a way to get that output into something more userful if at all possible.If not, you could replace this
defaultdict(<type 'int'>,
with
data =
while looping over the lines, using string.replace(), and then use the exec() function to execute the literal code that is in that line.
Try:
dicts = []
with open(filename,'r') as f:
for l in f:
exec("dicts.append(" + l[l.index('{'):l.index('}')+1] + ")")
which should read in all of the dicts and store them in the list dicts. You could then construct a list of key and value pairs with:
tsvals = []
for d in dicts:
tsvals.append(d.items())
and if you wanted them sorted you could of course sort them once you were done constructing that. I would probably save the key and value pairs once I had them like that. A csv file would probably suffice for that purpose.
The exec command should only be used on trusted data, though. It is very insecure, but if you are the one generating the data, or if you trust the person generating the data to give you only good data, then I think that is the best way to do this.
Related
How to get rid of the rest of the text after getting the results I want?
import urllib.request import json from collections import Counter def count_coauthors(author_id): coauthors_dict = {} url_str = ('https://api.semanticscholar.org/graph/v1/author/47490276?fields=name,papers.authors') respons = urllib.request.urlopen(url_str) text = respons.read().decode() for line in respons: print(line.decode().rstip()) data = json.loads(text) print(type(data)) print(list(data.keys())) print(data["name"]) print(data["authorId"]) name = [] for lines in data["papers"]: for authors in lines["authors"]: name.append(authors.get("name")) print(name) count = dict() names = name for i in names: if i not in count: count[i] = 1 else: count[i] += 1 print(count) c = Counter(count) top = c.most_common(10) print(top) return coauthors_dict author_id = '47490276' cc = count_coauthors(author_id) top_coauthors = sorted(cc.items(), key=lambda item: item[1], reverse=True) for co_author in top_coauthors[:10]: print(co_author) This is how my code looks this far, there are no error. I need to get rid of the rest of the text when I run it, so it should look like this: ('Diego Calvanese', 47) ('D. Lanti', 28) ('Martín Rezk', 21) ('Elem Güzel Kalayci', 18) ('B. Cogrel', 17) ('E. Botoeva', 16) ('E. Kharlamov', 16) ('I. Horrocks', 12) ('S. Brandt', 11) ('V. Ryzhikov', 11) I have tried using rstrip and split on my 'c' variable but it doesn't work. Im only allowed importing what I already have imported and must use the link which is included. Tips on simplifying or bettering the code is also appreciated! ("Extend the program below so that it prints the names of the top-10 coauthors together with the numbers of the coauthored publications")
From what I understand you are not quite sure where your successful output originates from. It is not the 5 lines at the end. Your result is printed by the print(top) on line 39. This top variable is what you want to return from the function, as the coauthors_dict you are currently returning never actually gets any data written to it. You will also have to slightly adjust your sorted(...) as you now have a list and not a dictionary, but you should then get the correct result.
If I understand correctly you are wanting this function to return a count of each distinct co-author (excluding the author), which it seems like you already have in your count variable, which you don't return. The variable you DO return is empty. Instead consider: import urllib.request import json from collections import Counter def count_coauthors(author_id): url_str = (f'https://api.semanticscholar.org/graph/v1/author/{author_id}?fields=name,papers.authors') response = urllib.request.urlopen(url_str) text = response.read().decode() data = json.loads(text) names = [a.get("name") for l in data["papers"] for a in l["authors"] if a['authorId'] != author_id] #The statement above can be written long-hand like: #names=[] #for l in data["papers"]: # for a in l["authors"]: # if a['authorId'] != author_id: # names.append(a.get("name")) return list(Counter(names).items()) author_id = '47490276' cc = count_coauthors(author_id) top_coauthors = sorted(cc, key=lambda item: item[1], reverse=True) for co_author in top_coauthors[:10]: print(co_author) ('Diego Calvanese', 47) ('D. Lanti', 28) ('Martín Rezk', 21) ('Elem Güzel Kalayci', 18) ('B. Cogrel', 17) ('E. Botoeva', 16) ('E. Kharlamov', 16) ('I. Horrocks', 12) ('S. Brandt', 11) ('V. Ryzhikov', 11) You might also consider moving the top N logic into the function as an optional paramter: import urllib.request import json from collections import Counter def count_coauthors(author_id, top=0): url_str = (f'https://api.semanticscholar.org/graph/v1/author/{author_id}?fields=name,papers.authors') response = urllib.request.urlopen(url_str) text = response.read().decode() data = json.loads(text) names = [a.get("name") for l in data["papers"] for a in l["authors"] if a['authorId'] != author_id] name_count = list(Counter(names).items()) top = top if top!=0 else len(name_count) return sorted(name_count, key=lambda x: x[1], reverse=True)[:top] author_id = '47490276' for auth in count_coauthors(author_id, top=10): print(auth)
How to build Numpy array from a String written in file in Python
I had to write an array of 1024 dimensional vectors in files, where it is a string. [[[-1.94079906e-03 -2.31655642e-01 2.79239640e-02 1.65049836e-01 -2.41711065e-02 4.76662189e-01 1.43999630e-03 2.74327975e-02 1.42574485e-03 -5.95342405e-02 7.44391233e-02 -2.52876729e-01 -1.00990515e-02 -3.12404502e-02 -3.15531623e-03 -1.05645694e-02 5.35479194e-05 -2.71148677e-03 1.39582576e-02 1.48318922e-02 2.73350552e-02 1.10329792e-03 1.87656947e-03 7.87315845e-01 1.48554507e-03 2.52872050e-01 9.04035103e-03 2.23065093e-02 -7.66102970e-02 -1.07561275e-02 -8.81098136e-02 -4.76480462e-03 -4.59164307e-02 9.71463993e-02 5.88618889e-02 1.50974870e-01 5.95004633e-02 -3.18388380e-02 -7.35895988e-03 -1.16585912e-02 -1.20033743e-02 2.28719711e-02 1.69246215e-02 4.68009058e-03 4.62086290e-01 3.05133080e-03 8.51295609e-03 5.41299023e-03 -3.86441469e-01 5.54564409e-03 -6.44444255e-04 -2.51195673e-03 -2.19698269e-02 -5.54086491e-02 -9.91180446e-03 -3.82097751e-01 -6.40135631e-02 5.74917234e-02 -6.93778619e-02 -4.82289121e-02 -8.80530046e-04 -1.46750783e-04 -7.59039745e-02 -4.49791476e-02 2.48764846e-02 3.42623852e-02 -1.47593305e-01 -1.06661461e-01 -4.47107572e-03 -1.18165351e-01 -5.77774234e-02 -1.18526910e-02 5.31119347e-01 8.07319826e-04 -1.33625632e-02 5.63439801e-02 1.42297670e-01 8.73860158e-03 2.81544123e-02 4.02801447e-02 -6.31053932e-04 -2.51916498e-02 7.66465114e-03 -1.44701991e-02 -2.28392556e-01 -1.52886540e-01 -2.64241938e-02 2.32990598e-03 -4.51011993e-02 -1.23286285e-01 -2.73374505e-02 4.57140617e-02 -3.10144224e-03 1.67872999e-02 2.82388210e-01 2.89291646e-02 1.07920721e-01 -2.94156536e-03 8.14718530e-02 1.23365335e-02 5.25736660e-02 1.07018501e-01 1.12216305e-02 -6.42671343e-03 -6.08792575e-03 -2.75880508e-02 -4.29682955e-02 2.29907827e-03 -1.71477318e-01 1.06728065e-03 -8.91154166e-03 -6.74131513e-02 1.85923085e-01 6.31039217e-02 -3.01594753e-03 -8.02652538e-03 2.99957022e-03 3.97651680e-02 7.88619071e-02 -3.88861895e-02 5.77857941e-02 -2.28491463e-02 -1.84029698e-01 1.88755430e-02 9.35062673e-03 -7.68110529e-03 -1.34624686e-04 -1.70862395e-02 2.90840361e-02 3.08215886e-01 9.31615767e-04 8.56576301e-03 -1.04340032e-01 5.97063676e-02 -5.19387908e-02 -2.51019020e-02 4.86317575e-02 -9.02486816e-02 -7.01272041e-02 -7.94084519e-02 3.42463702e-02 1.30526712e-02 -3.89413163e-02 7.12394621e-03 -7.80236796e-02 6.86016073e-03 -7.09471107e-03 -8.76432285e-04 2.70982862e-01 -6.52055591e-02 -1.06929066e-02 1.09912697e-02 5.81025705e-02 5.25720865e-02 -1.09306075e-01 -6.08567521e-02 1.57058761e-02 -2.72464026e-02 -5.55893872e-03 -1.02705369e-02 7.71081820e-02 1.22725129e-01 -8.34318772e-02 -1.34621300e-02 3.10156047e-02 -2.27223411e-02 -2.04452332e-02 8.44530482e-03 1.39629871e-01 4.65201735e-02 -1.80061292e-02 2.62141787e-02 1.14859547e-02 3.29010077e-02 -6.33681715e-02 -6.66326750e-03 2.45806739e-01 6.82016835e-03 1.53852955e-01 -9.26824214e-05 -2.24408537e-01 3.46144172e-03 -3.13628651e-02 5.43741090e-03 4.37655784e-02 7.23502412e-02 -2.27418393e-02 -5.73312230e-02 -9.63647943e-03 -3.06532886e-02 -1.83402985e-01 1.51326170e-03 7.01990649e-02 5.20174392e-04 3.25092584e-01 8.53244448e-04 -6.61516309e-01 -2.40109339e-02 7.80446269e-03 -3.96349991e-04 2.09737703e-01 2.01223418e-02 -3.24580610e-01 1.17111066e-02 -2.12647036e-01 -1.07400909e-01 -2.13052216e-03 7.19884411e-02 6.29373044e-02 2.70579964e-01 2.31924746e-03 2.31584962e-02 6.68760389e-02 6.02776278e-03 1.25936240e-01 9.87441279e-03 -1.55544477e-02 9.21403050e-01 5.95429866e-03 1.69753373e-01 -2.77957093e-04 -3.74301686e-03 -8.01488161e-02 -2.92979274e-02 2.70357099e-03 2.04204336e-01 -2.55553108e-02 -4.37098294e-02 -9.27719846e-02 3.31938616e-03 -6.86550094e-03 -5.29704131e-02 4.67646532e-02 -4.76779230e-03 -2.47423232e-01 -1.95020065e-01 -1.37039032e-02 -2.88726222e-02 4.78997901e-02 -1.67194288e-02 -2.44984671e-01 4.14241031e-02 2.18765303e-01 -1.29683837e-02 -1.29538756e-02 -2.40630388e-01 -1.47552008e-03 1.04058897e-02 8.08705166e-02 2.98772845e-02 1.09445721e-01 -2.72222068e-02 3.15629435e-03 2.02676475e-01 -5.48119098e-03 1.11927085e-01 -8.10229045e-04 4.88724336e-02 -2.04598755e-02 7.28014559e-02 -7.50734936e-03 1.12967454e-01 -1.02381155e-01 -3.93344797e-02 -5.76012488e-03 -8.27285368e-03 4.68877656e-03 3.56422178e-02 7.47172013e-02 -4.50611580e-04 3.87510844e-02 5.87241072e-03 8.89619961e-02 3.71131860e-02 -1.27488390e-01 -2.87959836e-02 2.09520772e-01 6.84321998e-03 -1.79145802e-02 -4.92445230e-01 -1.11257724e-01 -3.41908028e-03 4.99965390e-03 -9.43376590e-03 1.45663433e-02 -1.44766182e-01 -7.85617332e-04 2.50194788e-01 -1.84243411e-01 -8.52207094e-02 4.91553724e-01 6.98826567e-04 9.67463851e-02 -9.53106880e-02 1.34932743e-02 9.78818908e-03 1.42090872e-01 -1.44538991e-02 3.38558876e-03 7.06599429e-02 1.05294399e-01 -1.04073426e-02 8.84406921e-03 -1.04884133e-02 1.08525844e-03 2.73697806e-04 5.48925027e-02 -1.37888163e-01 -7.36754341e-03 -1.68831810e-01 -1.55891664e-02 -1.46280183e-02 -4.65287874e-03 -4.55598114e-03 -2.31690466e-01 8.52278993e-02 3.69785540e-02 -5.14281213e-01 -3.15423198e-02 6.99804872e-02 -6.80299819e-01 5.70312254e-02 -4.68888041e-03 9.24175314e-04 1.46322310e-01 -2.26290636e-02 -1.17635960e-02 -1.97508056e-02 -5.63608184e-02 -2.35153502e-03 -3.02714780e-02 -7.00966269e-02 1.34322822e-01 -5.83322406e-01 6.99004158e-02 6.90660765e-03 -3.59833129e-02 -4.24275696e-02 -1.13462089e-02 1.26277313e-01 -2.11074743e-02 -2.75125448e-02 -1.09875022e-04 -1.36324633e-02 -1.61086857e-01 1.45391980e-03 8.35529938e-02 2.60230571e-01 -2.66209077e-02 -5.58552158e-04 -6.46227747e-02 -1.03871346e-01 7.65479207e-02 5.50469197e-03 -8.30342695e-02 -1.41060511e-02 5.78260748e-04 -7.48543143e-02 4.00875658e-02 -8.43505200e-04 1.30373640e-02 -3.70042515e-03 2.34432235e-01 -5.03952876e-02 -8.23894609e-03 5.68602094e-03 2.00182339e-03 2.67363917e-02 -1.65066063e-01 1.12882899e-02 -2.45540170e-03 -5.58771845e-03 -3.17897797e-02 3.54408443e-01 -2.19827844e-03 1.96579453e-02 -2.13856101e-01 -9.39935893e-02 -4.10447363e-03 5.64558394e-02 -7.74325207e-02 1.12799453e-02 5.74401207e-03 1.17533533e-02 3.13126147e-01 -9.65978801e-02 4.09295522e-02 -7.19228340e-03 1.61319543e-02 1.38026938e-01 -1.03539340e-02 2.02257689e-02 -1.22227691e-01 2.85219820e-03 -1.38053551e-01 1.04186237e-01 -3.85751538e-02 5.40270396e-02 2.01541111e-02 1.11547910e-01 8.12843814e-03 -2.46938601e-01 2.63076667e-02 1.17913038e-02 -3.11973467e-02 -1.92823902e-01 1.21179201e-01 -1.25329243e-02 -8.06584358e-02 1.80483628e-02 -1.24767497e-01 -9.79684740e-02 1.42583912e-02 -2.01247111e-01 1.80912524e-01 -7.99218717e-04 4.43766803e-01 -1.50329890e-02 -5.67825418e-03 -1.08014653e-02 -5.36615476e-02 -2.61940453e-02 -4.53092828e-02 1.35193672e-02 4.11850251e-02 -6.01443984e-02 -2.50824913e-02 1.62832197e-02 -3.54980752e-02 1.94167010e-02 3.73407416e-02 -1.39249265e-02 1.24431606e-02 -2.71414319e-04 2.07198528e-03 -3.13255489e-02 -9.13586060e-04 -6.02687523e-03 -2.45338219e-04 3.06239817e-02 4.73723561e-03 -5.37761077e-02 1.07429676e-01 -3.07083246e-04 1.83322672e-02 1.11524528e-02 -3.02185658e-02 8.59791264e-02 1.52892053e-01 -5.19732898e-03 3.35007943e-02 -5.18686930e-03 4.59660180e-02 -5.97646236e-02 -5.91781130e-03 -9.71765146e-02 -5.03088348e-03 -2.63293982e-02 2.98104137e-02 1.78570561e-02 -1.43652499e-01 -9.18011591e-02 -1.30667742e-02 5.08407876e-02 -1.08531557e-01 4.97847283e-03 8.63077492e-03 -9.12626833e-02 7.65377134e-02 -1.38562605e-01 1.26607893e-02 2.06802897e-02 -4.38775197e-02 -1.66922156e-02 1.40064796e-02 4.57174098e-03 -1.03261333e-03 5.39288446e-02 -2.95640323e-02 3.11757829e-02 -6.83855964e-03 4.09243908e-03 1.86475474e-04 -5.27539887e-02 -2.06441600e-02 1.07906722e-02 -3.73281129e-02 4.83028293e-02 7.07745552e-03 4.19170335e-02 4.73949760e-02 -3.53640225e-03]] [[ -8.35226402e-02 9.30680893e-03 -1.19281253e-02 6.61145672e-02 3.26909008e-03 4.63765323e-01 3.27814883e-03 3.66053022e-02 1.09708719e-02 7.35405181e-03 -1.55491112e-02 1.80269778e-02 -3.51096364e-03 3.14430892e-02 1.74052753e-02 5.27430698e-02 1.55686750e-03 8.57205018e-02 -4.71130945e-03 -1.66305136e-02 -1.67975724e-02 -3.89260314e-02 6.36396930e-02 7.63485655e-02 -4.48105129e-04 -6.54933006e-02 2.95349136e-02 -2.45081261e-02 4.96003777e-02 6.64148927e-02 -2.33270358e-02 3.41189057e-02 1.60509553e-02 -5.32507300e-02 1.29752964e-01 1.31994952e-03 7.20134820e-04 -1.98553465e-02 -9.42229573e-03 -1.28459960e-01 9.08048358e-03 -7.04445392e-02 2.49016993e-02 -2.02601706e-03 2.32810881e-02 8.85246042e-03 -6.18432183e-03 4.91291285e-02 -1.68560803e-01 -1.95360109e-01 1.66883413e-02 4.46498543e-01 8.62153433e-03 -2.90389471e-02 -1.62739735e-02 1.17358692e-01 -7.42233777e-03 1.05169125e-01 1.42013326e-01 -8.94571014e-04 -3.26711014e-02 -1.27530540e-03 6.44282848e-02 4.75694202e-02 1.49227241e-02 -5.22350743e-02 2.38056909e-02 2.92309709e-02 -2.19096199e-01 1.55504672e-02 -1.53067306e-01 2.74171904e-02 -7.00602890e-04 6.28591049e-03 -5.55272587e-03 -1.07895516e-01 -5.65112336e-03 -5.24670519e-02 -4.89811152e-02 5.01033179e-02 6.93804584e-03 1.05955578e-01 -7.62372569e-04 8.43433514e-02 4.34672460e-03 -4.59782854e-02 8.87977891e-03 -1.24188691e-01 -1.83345191e-02 1.53444866e-02 -3.22986394e-02 6.25251094e-04 9.15350902e-05 -4.84672701e-03 -2.58674123e-03 3.35796690e-03 4.89366241e-02 4.02635749e-04 1.59860756e-02 7.69474208e-01 4.08853181e-02 4.34358567e-02 2.25415756e-03 1.78180281e-02 -1.54250145e-01 -8.23742151e-03 -2.62587331e-03 -5.17571764e-03 -1.17875308e-01 -2.17287224e-02 2.03241524e-03 3.59281600e-02 7.83091187e-02 -6.30748495e-02 -4.43855999e-03 1.38513371e-01 -1.49395345e-02 1.73051998e-01 -1.08843846e-02 -4.09610756e-03 1.47934854e-01 2.77046226e-02 9.71657410e-03 -4.75595258e-02 -8.04194286e-02 -6.39762543e-03 -1.78411976e-03 -2.07632020e-05 -1.20369075e-02 -5.30476645e-02 -2.65281554e-02 -1.27506420e-01 1.61716379e-02 -1.23544305e-03 5.12566231e-02 -2.99007017e-02 -2.24648491e-02 -1.08299348e-04 -2.56778561e-02 5.01158787e-03 1.46491397e-02 8.26944504e-03 9.98148024e-02 -1.25672504e-01 -1.02828294e-02 -1.48208633e-01 -5.26465550e-02 -1.39929978e-02 5.14968075e-02 -7.84798432e-03 1.69349450e-03 1.39911417e-02 9.87590104e-02 8.83786604e-02 -2.10539266e-01 1.15139121e-02 -2.02425104e-03 -2.96202842e-02 2.34789047e-02 -2.69730925e-03 -1.28652789e-02 -9.96383606e-04 -1.49479583e-02 2.74939630e-02 2.21133493e-02 5.40200956e-02 -1.40580133e-01 -1.88481361e-02 5.64605594e-02 8.01192969e-03 2.08442245e-04 -7.04166517e-02 -6.91418424e-02 3.14796274e-03 2.43515684e-03 5.66407405e-02 7.94060528e-03 -9.53438058e-02 1.21964351e-03 1.47800058e-01 -1.13360971e-01 2.98349423e-05 1.53450280e-01 1.23541877e-02 7.23110735e-02 -3.53798419e-02 2.02055108e-02 2.04957579e-03 2.06349175e-02 -3.23418826e-02 1.54947313e-02 3.00867227e-03 6.27451837e-02 -6.16709888e-03 -4.44535079e-04 -8.95639881e-03 -9.29646849e-05 4.07954591e-04 1.03105552e-01 -1.26244530e-01 -1.03752740e-01 7.28853717e-02 1.41500132e-02 -1.69651657e-02 -7.86132237e-04 -1.02092149e-02 -4.33625951e-02 4.45875973e-02 3.16701494e-02 -5.97860694e-01 -1.04297372e-02 -3.53601612e-02 -5.31432092e-01 1.20885514e-01 -4.41514747e-03 -1.37506695e-02 7.41118193e-02 -1.80932339e-02 -2.18909793e-02 -1.78918764e-02 -4.53138798e-02 -9.50210169e-03 -2.68109199e-02 -4.37963568e-02 2.57522136e-01 -1.55035645e-01 8.70719627e-02 2.36497819e-02 4.50039990e-02 -1.05908541e-02 -6.30994067e-02 5.25860228e-02 1.30218547e-02 -4.60572615e-02 -1.27857761e-03 -2.23273393e-02 -1.01790786e-01 3.97628173e-03 4.75442037e-03 1.58620045e-01 -1.53025975e-02 -5.05035557e-02 -1.91611588e-01 -5.24769202e-02 2.29781922e-02 1.23675652e-02 -1.08054029e-02 -5.12798950e-02 -3.13344353e-05 3.84442252e-03 2.34415475e-02 -6.06570218e-04 -8.98333564e-02 5.93989454e-02 2.30413303e-02 -1.63186044e-02 -4.73568274e-04 1.26233101e-02 -2.33891979e-02 -3.67069454e-03 -3.11445054e-02 6.57939492e-03 -7.66414171e-03 8.54648836e-03 -5.53270467e-02 6.56915456e-02 5.93487732e-02 5.15440246e-03 -9.67373177e-02 -6.51745126e-02 -3.94938029e-02 5.34577072e-02 -1.40887564e-02 -3.21616903e-02 4.82407846e-02 2.16054339e-02 1.52342409e-01 -1.26091624e-02 -3.95356715e-02 -3.49778347e-02 1.04348501e-02 7.99501091e-02 -5.54051399e-02 1.87601745e-02 -1.87667310e-01 8.61896726e-04 -3.23137492e-02 5.16451187e-02 2.22476479e-02 -8.02607462e-03 -2.70199757e-02 5.83411716e-02 1.13461642e-02 -6.43412471e-02 1.39366373e-01 5.69017865e-02 -2.33416390e-02 -2.85612494e-01 5.10325246e-02 4.62737009e-02 -1.91203970e-02 1.32384151e-03 -6.35378435e-02 3.24819842e-03 -1.32869254e-03 1.75510924e-02 1.41231939e-01 1.28516573e-02 1.02439635e-02 -4.39045802e-02 -1.10270549e-02 7.96073973e-02 5.91337262e-03 -7.50695616e-02 1.84723474e-02 9.38338228e-03 -8.35675339e-04 5.00328392e-02 -4.32583364e-03 1.02049168e-02 -5.58866095e-03 -4.91249859e-02 5.04319072e-02 4.64591458e-02 -6.06061965e-02 2.36203405e-03 6.63211709e-03 1.16420723e-02 -1.95871443e-02 -1.81973800e-02 4.64237249e-03 2.86748866e-03 5.38999680e-03 -8.34256224e-03 9.33683477e-03 -5.39150741e-03 -1.86683554e-02 2.48070788e-02 5.92112215e-03 8.56748223e-03 3.22964415e-02 -5.00624850e-02 -2.52336301e-02 9.17474180e-03 -8.92325118e-03 4.67876205e-03 -5.74534759e-02 3.73787177e-03 1.28581151e-02 -1.67223103e-02 7.50076920e-02 -3.98502016e-04 -1.61230732e-02 -2.75442414e-02 3.94548639e-04 -1.37498451e-03 -4.96492116e-03 2.53268499e-02 5.55252992e-02 -5.05645424e-02 -5.79147274e-03 -1.39349727e-02 1.05990777e-02 -4.14465889e-02 1.90671217e-02 2.38927081e-03 1.91597678e-02 7.26094469e-03 5.62381647e-05 -3.14109512e-02 -7.03963730e-03 5.72685339e-03 -4.51433298e-04 -1.14829629e-03 1.45282391e-02 -2.11534575e-02 1.55422445e-02 5.09766936e-02 -2.77528632e-02 -7.16304639e-03 -1.41595118e-02 -2.29488108e-02 -2.09072828e-02 2.64024753e-02 3.75947580e-02 3.53328399e-02 5.87556325e-03 -4.44848882e-03 3.95338349e-02 -2.64461972e-02 2.43276753e-03 -1.63620003e-02 3.14164497e-02 -8.08956474e-03 1.49463089e-02 2.09438596e-02 -8.90694279e-03 3.24987583e-02 6.67429902e-03 -1.50191346e-02 -1.16796941e-02 3.64522748e-02 -8.61222763e-03 1.09857088e-02 -3.57604139e-02 -5.16717369e-03 2.68327910e-02 3.17539498e-02 -1.13831903e-03 2.37232214e-03 8.16115085e-03 6.65428936e-02 1.27308011e-01 1.32651571e-02 -1.16856256e-03 1.92778313e-03 -5.13796732e-02 2.23806910e-02 2.64357291e-02 -9.60358977e-03 -1.81771331e-02 -2.23696785e-04 1.13886436e-02 3.41330506e-02 2.79273577e-02 3.79165076e-02 2.16118526e-03 -1.12676609e-03 -2.00524926e-03 -1.37742283e-02 -2.34187655e-02 3.20340917e-02 2.10321099e-02 4.94092591e-02 -2.68395413e-02 4.29884112e-03 5.21777458e-02 -3.28805707e-02 -2.18146648e-02 1.72189157e-03 1.45826461e-02 3.89755853e-02 2.16644220e-02 1.45681109e-02 -5.07474365e-03 2.73492723e-03 1.24841267e-02 1.98827442e-02 -5.53916628e-03 -1.07634841e-02 1.36590758e-02 -9.26163197e-02 -3.95117737e-02 -1.71744905e-03 7.74151552e-03 -1.19977084e-03 -1.42000178e-02 -3.76450969e-03 2.26605702e-02 -6.73604533e-02 -2.03648806e-02 3.09449919e-02 -3.69136743e-02 1.27421897e-02 -1.31009584e-02 -1.37193482e-02 1.25257829e-02 1.81069430e-02 -8.70318525e-03 2.53016800e-02 4.50089462e-02 -1.51383178e-02 3.90487462e-02 1.48353223e-02 -5.49035370e-02 1.52793583e-02 6.09628372e-02 2.26467788e-01 -1.58030409e-02 6.35875948e-03 -7.22567504e-03 2.31286865e-02 8.45125783e-03 9.40664485e-03 -7.69937038e-03 -4.84708603e-03 -5.00676176e-03 -1.35368295e-02 -1.90379925e-03 2.88588181e-02 1.81501191e-02 1.80631205e-02 1.93565357e-02 5.02644433e-03 6.69117644e-03 -1.11023095e-02 -1.24723874e-02 -1.13726296e-02 6.56644180e-02 2.00492796e-02 4.66159452e-03 -3.40204779e-03 1.02386642e-02 2.80533340e-02 -3.09105241e-03 6.22903183e-03 -4.17059404e-04 2.77249347e-02 -4.01747860e-02 2.38689836e-02 4.76544015e-02 3.70072108e-03 1.06910598e-02 -2.36127879e-02 3.88308056e-02 2.19260529e-03 -3.29560637e-02 -2.21122336e-02 -4.79903491e-03 1.51326843e-02 -3.50229093e-03 2.24676784e-02 1.43503872e-04 -3.17934118e-02 -1.51474942e-02 2.43635513e-02 1.99209489e-02 1.78901535e-02 -2.50796489e-02 -3.82525399e-02 -7.11742649e-03 1.94894355e-02 -2.41929526e-03 3.09266243e-02 -3.60688046e-02 1.30333030e-03 -1.69621110e-02 -4.34306217e-03 3.22814146e-03 1.35722384e-02 -3.22210952e-03 6.81779860e-03 5.64415827e-02 -4.67799557e-03 -3.43072243e-05 1.47441979e-02 1.27382390e-02 2.21132208e-02 1.00761396e-03 5.81906037e-03 -2.46797632e-02 2.25442611e-02 5.67185059e-02 1.56729892e-02 -1.27938166e-02 -4.06436957e-02 -7.34348316e-03 4.15970478e-03 5.33090569e-02 2.27547772e-02 -4.65768166e-02 -8.26780498e-03 -1.76717900e-02 1.14618763e-02 -6.11296529e-03 -9.91329923e-03 -1.77789368e-02 1.49894347e-02 2.00315397e-02 4.59903367e-02 -9.15393308e-02 -1.06830876e-02 -3.23898084e-02 -1.45315717e-03 -1.91982687e-02 5.13540730e-02 5.07519208e-03 1.16187809e-02 -7.04900697e-02 1.31804580e-02 9.46480874e-03 1.66993719e-02 9.73339472e-03 4.89528617e-03 1.25560202e-02 9.88730043e-03 2.34009465e-03 -1.93884056e-02 1.21425718e-01 1.60742402e-01 1.74952727e-02 -2.61049587e-02 2.36830972e-02 -2.52299607e-02 -3.02438326e-02 1.08230580e-02 -1.32970130e-02 -4.28082272e-02 -1.06386631e-03 -1.67271737e-02 -2.24054903e-02 2.09099753e-03 9.29628033e-03 -1.35044642e-02 5.04099466e-02 -2.71457713e-02 1.73648577e-02 2.48547047e-02 -1.89860701e-03 1.70966927e-02 -2.60377172e-02 4.39567082e-02 7.85375014e-04 1.94283966e-02 -1.33992946e-02 -2.70982515e-02 1.87142137e-02 3.46861593e-03 3.57702821e-02 2.04772372e-02 1.90474056e-02 4.41925647e-03 -7.10287225e-03 1.49543295e-02 1.38295190e-02 2.13973895e-02 2.09906921e-02 6.11540861e-03]]] I am trying to extract it using regex (or any other method if possible). So far I tried solutions like Extract values between square brackets ignoring single quotes but they are not working. How could I extract these arrays and use them as numpy vectors?
You can use the numpy.fromstring() method to accomplish this task once you have read-in the file's contents. Below is a quick and dirty implementation of this (no error detection), which works with your example array. import numpy as np with open("test.txt", 'r') as f: arr = np.fromstring(f.read(), np.dtype(np.float), 1024)
This would also get you there, using regular expressions: import numpy as np import re with open("somefile.txt") as f: content = f.read() content = re.sub('[\[\n\]]', '', content) arr = re.split(r'\s+', content) np_arr = np.array([float(i) for i in arr])
Replacing characters in python
I wrote a compressor based on the Huffman algorithm to compress text: # compressor Huffman quijote from collections import Counter import math import pickle import re quijote = open("quijote.txt", encoding="utf8") num_lletres = 0 llistaQ = [] for linia in quijote: for lletra in linia: llistaQ.append(lletra) num_lletres = num_lletres+1 c = Counter(llistaQ) c_ordenat = c.most_common() c_final=c_ordenat[::-1] for i,j in enumerate(c_final): c_final[i]=list(c_final[i]) diccionari=[] for i,j in c_final: diccionari.append([i,None]) while len(c_final) > 1: petit1=c_final[0] petit2=c_final[1] c_final.append([petit1[0]+petit2[0],petit1[1]+petit2[1]]) for i in petit1[0]: for pos,x in enumerate(diccionari): if x[0]==i: val_antic=diccionari[pos][1] diccionari[pos].pop(1) if val_antic==None: diccionari[pos].insert(1,"1") else: diccionari[pos].insert(1,"1"+val_antic) for i in petit2[0]: for pos,x in enumerate(diccionari): if x[0]==i: val_antic=diccionari[pos][1] diccionari[pos].pop(1) if val_antic==None: diccionari[pos].insert(1,"0") else: diccionari[pos].insert(1,"0"+val_antic) del c_final[0] del c_final[0] c_final.sort(key = lambda f: f[1]) s=0 entropia=0 llarg=0 llistat=c_ordenat[::-1] for i,j in llistat: freq=float(j)/float(num_lletres) s=freq*math.log(freq,2.0) entropia=entropia+s for pos,z in enumerate(diccionari): if z[0]==i: llarg=llarg+(len(z[1])*freq) entropia=-(entropia) with open("taula_Huffman.txt", "wb") as taula_final: pickle.dump(diccionari, taula_final) cadena='' with open("quijote.txt", encoding="utf8") as entrada, open('Huffman sortida', 'wb') as sortida: for line in entrada: for x, y in diccionari: line = line.replace(x, y) cadena=cadena+line cadena=str(1)+cadena bits=re.findall('........',cadena) for i in bits: sortida.write(bytes([int(i,2)])) ll=len(cadena) sob=ll%8 a=len(cadena)/8 inta=(int(a)) nc='' for n in range(1,sob+1): nc=nc+cadena[(inta*8)+n-1] penultim=nc+str(1)*(8-sob) ultim=str('{0:08b}'.format(sob)) sortida.write(bytes([int(penultim,2)])) sortida.write(bytes([int(ultim,2)])) print("entropia=",entropia) print("expected length=",llarg) quijote.close() And the decompressor: #Descompressor Huffman import binascii import pickle with open("Huffman sortida", "rb") as entrada, open('quijote descomprimit.txt','w',encoding='utf-8') as sortida, open("taula_Huffman.txt", "rb") as diccionari: byte = entrada.read() hexadecimal = binascii.hexlify(byte).decode() binary=bin(int(hexadecimal, 16))[2:].zfill(8) cadena=binary[1:] ultim=cadena[(len(cadena)-8):(len(cadena))] cadena=cadena[:(len(cadena)-8)] penultim=cadena[(len(cadena)-8):(len(cadena))] cadena=cadena[:(len(cadena)-8)] ultim=int(ultim,2) afegit=penultim[:ultim] cadena=cadena+afegit dic = pickle.load(diccionari) dicc={} for i in dic: dicc[i[0]] = i[1] dicci = {v: k for k, v in dicc.items()} temporal='' text='' for i in cadena: temporal+=i if temporal in dicci: text+=dicci[temporal] temporal='' sortida.write(text) The problem is that the decompressed file is fine except for the numbers, I mean, where on the original file were letters in the final file there are the same letters, but where on the original file were numbers in the final file apears a series of 1111 Csicauicau,11dunrdunr dunrdunrdunrdunr 111 1111dunrdunr dunrdunrdunrdunr or whatever. So I found that the part of the compressor that failed was the replace instruction (line 74-75): for line in entrada: for x, y in diccionari: line = line.replace(x, y) cadena=cadena+line I replaced that four lines by the following: for line in entrada: for ch in line: for x, y in diccionari: if ch==x: cadena=cadena+y break The problem is that this makes the program so slow (~40 seconds to run on my PC). The first option was about 9 seconds on my PC. Is there a way to do this for inside a for inside a for faster? Or, is there a solution to my .replace(x,y) which fails with the numbers?
My code doesn't produce any output -- Python
I have two columns of data (sample data) and I want to calculate total users for each week day. For instance, I'd want my output like this (dict/list anything will do): Monday: 25, Tuesday: 30, Wednesday:45, Thursday: 50, Friday:24, Saturday:22, Sunday:21 Here's my attempt: def rider_ship (filename): with open('./data/Washington-2016-Summary.csv','r') as f_in: Sdict = [] Cdict = [] reader = csv.DictReader(f_in) for row in reader: if row['user_type']=="Subscriber": if row['day_of_week'] in Sdict: Sdict[row['day_of_week']]+=1 else: Sdict [row['day_of_week']] = row['day_of_week'] else: if row ['day_of_week'] in Cdict: Cdict[row['day_of_week']] +=1 else: Cdict[row['day_of_week']] = row['day_of_week'] return Sdict, Cdict print (Sdict) print (Cdict) t= rider_ship ('./data/Washington-2016-Summary.csv') print (t) TypeError::list indices must be integers or slices, not str
How about using pandas? Let's first create a file-like object with io library: import io s = u"""day_of_week,user_type Monday,subscriber Tuesday,customer Tuesday,subscriber Tuesday,subscriber""" file = io.StringIO(s) Now to the actual code: import pandas as pd df = pd.read_csv(file) # "path/to/file.csv" Sdict = df[df["user_type"] == "subscriber"]["day_of_week"].value_counts().to_dict() Cdict = df[df["user_type"] == "customer"]["day_of_week"].value_counts().to_dict() Now we have: Sdict = {'Tuesday': 2, 'Monday': 1} Cdict = {'Tuesday': 1}
'can only join iterable' when fetching data using tia.bbg.datamgr (Python 2.7)
I'm writing a script that fetches data from Bloomberg using the TIA toolkit. I'm trying to place the PX_VALUE from the start date for each equity in stocks in a dictionary called dict1 so that i can manipulate those values later. Here is my script so far without the calculations: from __future__ import division import numpy as np import pandas as pd import datetime import tia import tia.bbg.datamgr as dm from operator import itemgetter start = datetime.date(2017, 1, 3) end = datetime.date(2017, 7, 25) diffdays = ((end - start).days)/365 resolution = 0.01 diff2dp = int(np.round(diffdays/resolution))*resolution diff = 1/diff2dp dict1 = {} stocks = ('GOOGL US Equity','MSFT US Equity', 'IBM US Equity') mgr = dm.BbgDataManager() eqt = mgr[stocks] for eq in eqt: df = eq.get_historical(['PX_LAST'], start, end) k = df.loc[start]['PX_LAST'] dict1 [stocks] = k print dict1 And here is the actual Output: Traceback (most recent call last): File "C:\Users\bloomberg\Desktop\examples\CAGR by LouisV2 BROKEN.py", line 23, in <module> for eq in eqt: File "C:\Python27\lib\site-packages\tia\bbg\datamgr.py", line 94, in __getitem__ return self.get_attributes(flds, **self.overrides) File "C:\Python27\lib\site-packages\tia\bbg\datamgr.py", line 90, in get_attributes frame = self.mgr.get_attributes(self.sids, flds, **overrides) File "C:\Python27\lib\site-packages\tia\bbg\datamgr.py", line 148, in get_attributes return self.terminal.get_reference_data(sids, flds, **overrides).as_frame() File "C:\Python27\lib\site-packages\tia\bbg\v3api.py", line 745, in get_reference_data return self.execute(req) File "C:\Python27\lib\site-packages\tia\bbg\v3api.py", line 711, in execute self.logger.info('executing request: %s' % repr(request)) File "C:\Python27\lib\site-packages\tia\bbg\v3api.py", line 432, in __repr__ fields=','.join(self.fields), TypeError: can only join an iterable >>> I have also written a script that works for 1 equity with the calculations: from __future__ import division import numpy as np import pandas as pd import datetime import tia import tia.bbg.datamgr as dm start = datetime.date(2017, 1, 3) end = datetime.date(2017, 7, 25) diffdays = ((end - start).days)/365 resolution = 0.01 diff2dp = int(np.round(diffdays/resolution))*resolution diff = 1/diff2dp mgr = dm.BbgDataManager() eqt = mgr['GOOGL US Equity'] datafetch = eqt.get_historical(['PX_LAST'], start, end) calc1 = ((datafetch.loc[end]['PX_LAST'])/(datafetch.loc[start]['PX_LAST'])) calc2 = (pow(calc1,diff))-1 calc22dp = int(np.round(calc2/resolution))*resolution print calc22dp
Your single-security solution does this: eqt = mgr['GOOGL US Equity'] but your multiple-security solution does (in effect) this: eqt = mgr[('GOOGL US Equity','MSFT US Equity', 'IBM US Equity')] Now, I obviously cannot test this without a Bloomberg installation, but it is clear from the error message that your problem is with eqt. Are you 100% sure you can pass a tuple of BBG ids as a key to dm.BbgDataManager()? The results you are getting suggests that you can't. Follow the line of your working one-security solution, but looping through the stocks of interest: stocks = ('GOOGL US Equity','MSFT US Equity', 'IBM US Equity') mgr = dm.BbgDataManager() for stock in stocks: eqt = mgr[stock] datafetch = eqt.get_historical(['PX_LAST'], start, end) calc1 = ((datafetch.loc[end]['PX_LAST'])/(datafetch.loc[start]['PX_LAST'])) calc2 = (pow(calc1,diff))-1 calc22dp = int(np.round(calc2/resolution))*resolution print calc22dp