Related
I'm struggling with FuncAnimation in matplotlib.animation, and I could not find out any examples or post looking similar to my problem (I mean, yes there is post concerning contourf used in funcAnimation but in those posts they succeed to delete the PathCollection object but in my case something is not working).
Context:
In a school project concerning One-vs-All notion (multiple binary classifiers), I want to implement functions to animate a figure having 3 Axes and containing multiple Line2D objects, an PathCollection object from scatter method and a QuadContourSet from contourf method.
Here a screen of how it looks like (obtained when I plot the data at the end of the training of the One-vs-All):
Representation of the static graph
Legend:
Left: Boundary decision in (Herbology)-(Defense against Dark Arts) plane,
Top right: Loss function of each binary classifiers,
Bottom right: Precision and Recall metrics of each classifiers.
Methods:
I am trying to have a animated version of the plot using FuncAnimation from matplotlib.Animation module. Animated version of the plot is a bonus feature of my project, then the animation part/core is made in functions, you can see a simplification below (a bare bones representation) :
def anim_visu(models, data):
# initialization of the figure and object representing the data
...
def f_anim():
# Function which update the data at each frames
visu = FuncAnimation(fig, f_anim, ...)
return fig
[...]
if __name__ == "__main__":
[...]
if bool_dynamic: # activation of the dynamic visualization
anim_visu(models, data)
And here a minimal workish example:
# =========================================================================== #
# |Importation des lib/packages| #
# =========================================================================== #
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from matplotlib.animation import FuncAnimation
from matplotlib.gridspec import GridSpec
dct_palet = {"C1":"dodgerblue",
"C2":"red",
"C3":"green",
"C4":"goldenrod"}
fps = 15
# =========================================================================== #
# | Definition des fonctions | #
# =========================================================================== #
def one_vs_all_prediction(classifiers:list, X:np.array) -> np.array:
"""
... Docstring ...
"""
preds = np.zeros((X.shape[0],1))
for clf in classifiers:
tmp = clf.predict(X)
mask = preds == 0
preds[mask] = tmp[mask]
return preds
def one_vs_all_class_onehot(class_pred:np.array):
"""
... Docstring ...
"""
house = {"C1":1., "C2":2., "C3":3., "C4":4.}
onehot_pred = np.chararray((class_pred.shape[0],1), itemsize=2)
for key, item in house.items():
mask = class_pred == item
onehot_pred[mask] = key
return onehot_pred
def do_animation(clfs:list, data:np.ndarray):
""" Core function for the animated vizualisation.
The function defines all the x/y_labels, the titles.
"""
global idx, cost_clf1, cost_clf2, cost_clf3, cost_clf4, \
met1_clf1, met1_clf2, met1_clf3, met1_clf4, \
met2_clf1, met2_clf2, met2_clf3, met2_clf4, \
boundary, axes, \
l_cost_clf1, l_cost_clf2, l_cost_clf3, l_cost_clf4, \
l_met1_clf1, l_met1_clf2, l_met1_clf3, l_met1_clf4, \
l_met2_clf1, l_met2_clf2, l_met2_clf3, l_met2_clf4
plt.style.use('seaborn-pastel')
# -- Declaring the figure and the axes -- #
fig = plt.figure(figsize=(15,9.5))
gs = GridSpec(2, 2, figure=fig)
axes = [fig.add_subplot(gs[:, 0]), fig.add_subplot(gs[0, 1]), fig.add_subplot(gs[1, 1])]
# --formatting the different axes -- #
axes[0].set_xlabel("X_1")
axes[0].set_ylabel("X_2")
axes[0].set_title("Decision boundary")
axes[1].set_xlabel("i: iteration")
axes[1].set_xlim(-10, 1000)
axes[1].set_ylim(-10, 350)
axes[1].set_ylabel(r"$\mathcal{L}_{\theta_0,\theta_1}$")
axes[1].grid()
axes[2].set_xlabel("i: iteration")
axes[2].set_ylabel("Scores (metric_1 & metric_2)")
axes[2].set_xlim(-10, 1000)
axes[2].set_ylim(0.0,1.01)
axes[2].grid()
# -- Reading min and max values along X dimensions-- #
X = data[:,0:2]
X = X.astype(np.float64)
Y = data[:,2].reshape(-1,1)
idx = np.array([0])
X_min, X_max = X[:,:2].min(axis=0), X[:,:2].max(axis=0)
# -- Generate a grid of points with distance h between them -- #
h = 0.01
XX_1, XX_2 = np.meshgrid(np.arange(X_min[0], X_max[0], h),
np.arange(X_min[1], X_max[1], h))
zeros_arr = np.zeros((XX_1.shape[0] * XX_1.shape[1], 1))
XX = np.c_[XX_1.ravel(), XX_2.ravel(),
zeros_arr.ravel(), zeros_arr.ravel(), zeros_arr.ravel()]
# -- Predict the function value for the whole grid -- #
preds = one_vs_all_prediction(clfs, XX)
Z = preds.reshape(XX_1.shape)
## Initialisation of the PathCollection for the Axes[0] objects
boundary = axes[0].contourf(XX_1, XX_2, Z, 3,
colors=["red", "green", "goldenrod", "dodgerblue"], alpha=0.5)
lst_colors = np.array([dct_palet[house] for house in data[:,2]])
raw_data = axes[0].scatter(X[:,0], X[:,1], c=lst_colors, edgecolor="k")
## Initialisation of the Line2D object for the Axes[1] objects
cost_clf1 = clfs[0].cost()
cost_clf2 = clfs[1].cost()
cost_clf3 = clfs[2].cost()
cost_clf4 = clfs[3].cost()
l_cost_clf1, = axes[1].plot(idx, cost_clf1,
ls='-', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[0].house])
l_cost_clf2, = axes[1].plot(idx, cost_clf2,
ls='-', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[1].house])
l_cost_clf3, = axes[1].plot(idx, cost_clf3,
ls='-', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[2].house])
l_cost_clf4, = axes[1].plot(idx, cost_clf4,
ls='-', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[3].house])
## Initialisation of the Line2D object for the Axes[2] objects
met1_clf1 = clfs[0].dummy_metric1()
met1_clf2 = clfs[1].dummy_metric1()
met1_clf3 = clfs[2].dummy_metric1()
met1_clf4 = clfs[3].dummy_metric1()
met2_clf1 = clfs[0].dummy_metric2()
met2_clf2 = clfs[1].dummy_metric2()
met2_clf3 = clfs[2].dummy_metric2()
met2_clf4 = clfs[3].dummy_metric2()
l_met1_clf1, = axes[2].plot(idx, met1_clf1,
ls='-', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[0].house])
l_met1_clf2, = axes[2].plot(idx, met1_clf2,
ls='-', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[1].house])
l_met1_clf3, = axes[2].plot(idx, met1_clf3,
ls='-', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[2].house])
l_met1_clf4, = axes[2].plot(idx, met1_clf4,
ls='-', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[3].house])
l_met2_clf1, = axes[2].plot(idx, met2_clf1,
ls='--', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[0].house])
l_met2_clf2, = axes[2].plot(idx, met2_clf2,
ls='--', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[1].house])
l_met2_clf3, = axes[2].plot(idx, met2_clf3,
ls='--', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[2].house])
l_met2_clf4, = axes[2].plot(idx, met2_clf4,
ls='--', marker='o', ms=2, lw=1.2, color=dct_palet[clfs[3].house])
fig.canvas.mpl_connect('close_event', f_close)
anim_fig = FuncAnimation(fig, f_animate, fargs=(XX_1, XX_2, XX,), frames=int(1000/fps), repeat=False, cache_frame_data = False, blit=False)
plt.waitforbuttonpress()
return fig
def f_animate(i, XX_1, XX_2, XX):
"""
... Docstring ...
"""
global clfs, idx, \
cost_clf1, cost_clf2, cost_clf3, cost_clf4, \
met1_clf1, met1_clf2, met1_clf3, met1_clf4, \
met2_clf1, met2_clf2, met2_clf3, met2_clf4, \
boundary, axes, l_cost_clf1, l_cost_clf2, l_cost_clf3, l_cost_clf4, \
l_met1_clf1, l_met1_clf2, l_met1_clf3, l_met1_clf4, \
l_met2_clf1, l_met2_clf2, l_met2_clf3, l_met2_clf4
n_cycle = 100
clfs[0].fit(n_cycle)
clfs[1].fit(n_cycle)
clfs[2].fit(n_cycle)
clfs[3].fit(n_cycle)
idx = np.concatenate((idx, np.array([i * n_cycle])))
preds = one_vs_all_prediction(clfs, XX)
Z = preds.reshape(XX_1.shape)
cost_clf1 = np.concatenate((cost_clf1, clfs[0].cost()))
cost_clf2 = np.concatenate((cost_clf2, clfs[1].cost()))
cost_clf3 = np.concatenate((cost_clf3, clfs[2].cost()))
cost_clf4 = np.concatenate((cost_clf4, clfs[3].cost()))
tmp_met1_clf1 = clfs[0].dummy_metric1()
tmp_met1_clf2 = clfs[1].dummy_metric1()
tmp_met1_clf3 = clfs[2].dummy_metric1()
tmp_met1_clf4 = clfs[3].dummy_metric1()
tmp_met2_clf1 = clfs[0].dummy_metric2()
tmp_met2_clf2 = clfs[1].dummy_metric2()
tmp_met2_clf3 = clfs[2].dummy_metric2()
tmp_met2_clf4 = clfs[3].dummy_metric2()
met1_clf1 = np.concatenate((met1_clf1, tmp_met1_clf1))
met1_clf2 = np.concatenate((met1_clf2, tmp_met1_clf2))
met1_clf3 = np.concatenate((met1_clf3, tmp_met1_clf3))
met1_clf4 = np.concatenate((met1_clf4, tmp_met1_clf4))
met2_clf1 = np.concatenate((met2_clf1, tmp_met2_clf1))
met2_clf2 = np.concatenate((met2_clf2, tmp_met2_clf2))
met2_clf3 = np.concatenate((met2_clf3, tmp_met2_clf3))
met2_clf4 = np.concatenate((met2_clf4, tmp_met2_clf4))
# -- Plot the contour and training examples -- #
# Update the plot objects: remove the previous collections to save memory.
#l = len(boundary.collections)
for coll in boundary.collections:
# Remove the existing contours
boundary.collections.remove(coll)
boundary = axes[0].contourf(XX_1, XX_2, Z, 3, colors=["red", "green", "goldenrod", "dodgerblue"], alpha=0.5)
l_cost_clf1.set_data(idx, cost_clf1)
l_cost_clf2.set_data(idx, cost_clf2)
l_cost_clf3.set_data(idx, cost_clf3)
l_cost_clf4.set_data(idx, cost_clf4)
l_met1_clf1.set_data(idx, met1_clf1)
l_met1_clf2.set_data(idx, met1_clf2)
l_met1_clf3.set_data(idx, met1_clf3)
l_met1_clf4.set_data(idx, met1_clf4)
l_met2_clf1.set_data(idx, met2_clf1)
l_met2_clf2.set_data(idx, met2_clf2)
l_met2_clf3.set_data(idx, met2_clf3)
l_met2_clf4.set_data(idx, met2_clf4)
return boundary.collections, l_cost_clf1, l_cost_clf2, l_cost_clf3, l_cost_clf4, \
l_met1_clf1, l_met1_clf2, l_met1_clf3, l_met1_clf4, \
l_met2_clf1, l_met2_clf2, l_met2_clf3, l_met2_clf4
def f_close(event):
""" Functions called when the graphical window is closed.
It prints the last value of the theta vector and the last value of the
cost function.
"""
plt.close()
class DummyBinary():
def __init__(self, house, theta0, theta1, alpha=1e-3):
self.house = house
self.theta0 = theta0
self.theta1 = theta1
self.alpha = alpha
if self.house == "C1":
self.border_x = 6
self.border_y = 6
if self.house == "C2":
self.border_x = 6
self.border_y = 13
if self.house == "C3":
self.border_x = 13
self.border_y = 6
if self.house == "C4":
self.border_x = 13
self.border_y = 13
def fit(self, n_cycle:int):
for _ in range(n_cycle):
self.theta0 = self.theta0 + self.alpha * (self.border_x - self.theta0)
self.theta1 = self.theta1 + self.alpha * (self.border_y - self.theta1)
def cost(self) -> float:
cost = (self.theta0 - self.border_x)**2 + (self.theta1 - self.border_y)**2
return cost
def predict(self, X:np.array) -> np.array:
if self.house == 'C1':
mask = (X[:,0] < self.theta0) & (X[:,1] < self.theta1)
if self.house == 'C2':
mask = (X[:,0] < self.theta0) & (X[:,1] > self.theta1)
if self.house == 'C3':
mask = (X[:,0] > self.theta0) & (X[:,1] < self.theta1)
if self.house == 'C4':
mask = (X[:,0] > self.theta0) & (X[:,1] > self.theta1)
pred =np.zeros((X.shape[0], 1))
pred[mask] = int(self.house[1])
return pred
def dummy_metric1(self):
return np.array([0.5 * (self.theta0 / self.border_x + self.theta1 / self.border_y)])
def dummy_metric2(self):
return np.array([0.5 * ((self.theta0 / self.border_x)**2 + (self.theta1 / self.border_y)**2)])
# =========================================================================== #
# _________________________________ MAIN __________________________________ #
# =========================================================================== #
if __name__ == "__main__":
# -- Dummy data -- #
x1 = np.random.randn(60,1) * 2.5 + 3.5
x2 = np.random.randn(60,1) * 2.5 + 3.5
x3 = np.random.randn(60,1) * 2.5 + 15.5
x4 = np.random.randn(60,1) * 2.5 + 15.5
stud_house = 60 * ['C1'] + 60 * ['C2'] + 60 * ['C3'] + 60 * ['C4']
c_house = [dct_palet[house] for house in stud_house]
y1 = np.random.randn(60,1) * 2.5 + 3.5
y2 = np.random.randn(60,1) * 2.5 + 15.5
y3 = np.random.randn(60,1) * 2.5 + 3.5
y4 = np.random.randn(60,1) * 2.5 + 15.5
X = np.concatenate((x1, x2, x3, x4)) # shape: (240,1)
Y = np.concatenate((y1, y2, y3, y4)) # shape: (240,1)
data = np.concatenate((X, Y, np.array(stud_house).reshape(-1,1)), axis=1) # shape: (240,3)
clf1 = DummyBinary("C1", np.random.rand(1), np.random.rand(1))
clf2 = DummyBinary("C2", np.random.rand(1), np.random.rand(1))
clf3 = DummyBinary("C3", np.random.rand(1), np.random.rand(1))
clf4 = DummyBinary("C4", np.random.rand(1), np.random.rand(1))
clfs = [clf1, clf2, clf3, clf4]
## Visualize the raw dummy data.
#plt.scatter(X, Y, c=c_house, s=5)
#plt.show()
do_animation(clfs, data)
The Class DummyBinary mimics in a simplified way, what my One-vs-All class can do.
You can see a bunch of global in anim_visu and f_anim, in this way the code "works", but I'm aware there is something very wrong.
Attempts:
No global variables, everything were passed to f_anim via fargs, but when returning from f_anim, all the modification of the variables in f_anim scope were lost (normal behavior obviously),
Moving the definition of the f_anim within the body of anim_visu, to make f_anim an inner_function. I'm not experienced enough, so I did not succeed to make it works this way, I noticed that It may appeared it is not possible to modify variable declared in the anim_visu scope in the inner function.
Declare all the variables I need as global, it work in a way, but as you can see by running the code (in the axes[0]), the PathCollections are not cleared/deleted (despite the loop with boundary.collections.remove(coll)) and the number of PathCollection in the axes[0] seems to increased, leading to a drop of the speed the frames are updated.
Looking forward for your advice (and solution+explanation I hope).
And thank you for your times and neurons.
I have been trying to build a chart which has both candlestick and ichimoku (additionally I also want to draw support and resistance line, channels, harmonic patterns etc on the chart). I took help from various resources already...
I have built two scripts one which can build candlestick charts with RSI and MACD. Second one which can build ichimoku (Python/Pandas calculate Ichimoku chart components).
First script
# THIS VERSION IS FOR PYTHON 3 #
import urllib.request, urllib.error, urllib.parse
import time
import datetime
import numpy as np
from datetime import timedelta
import pandas as pd
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
import matplotlib.dates as mdates
#from matplotlib.finance import candlestick_ohlc
from mpl_finance import candlestick_ohlc
import matplotlib
import pylab
matplotlib.rcParams.update({'font.size': 9})
def rsiFunc(prices, n=14):
deltas = np.diff(prices)
seed = deltas[:n+1]
up = seed[seed>=0].sum()/n
down = -seed[seed<0].sum()/n
rs = up/down
rsi = np.zeros_like(prices)
rsi[:n] = 100. - 100./(1.+rs)
for i in range(n, len(prices)):
delta = deltas[i-1] # cause the diff is 1 shorter
if delta>0:
upval = delta
downval = 0.
else:
upval = 0.
downval = -delta
up = (up*(n-1) + upval)/n
down = (down*(n-1) + downval)/n
rs = up/down
rsi[i] = 100. - 100./(1.+rs)
return rsi
def movingaverage(values,window):
weigths = np.repeat(1.0, window)/window
smas = np.convolve(values, weigths, 'valid')
return smas # as a numpy array
def ExpMovingAverage(values, window):
weights = np.exp(np.linspace(-1., 0., window))
weights /= weights.sum()
a = np.convolve(values, weights, mode='full')[:len(values)]
a[:window] = a[window]
return a
def computeMACD(x, slow=26, fast=12):
"""
compute the MACD (Moving Average Convergence/Divergence) using a fast and slow exponential moving avg'
return value is emaslow, emafast, macd which are len(x) arrays
"""
emaslow = ExpMovingAverage(x, slow)
emafast = ExpMovingAverage(x, fast)
return emaslow, emafast, emafast - emaslow
def bytespdate2num(fmt, encoding='utf-8'):
strconverter = mdates.strpdate2num(fmt)
def bytesconverter(b):
s = b.decode(encoding)
return strconverter(s)
return bytesconverter
def graphData(stock,MA1,MA2):
filepath = 'S:/Perl64/lambda/Litmus/data/daily/' + stock + '.csv'
print ('filepath = ' + filepath)
try:
with open(filepath) as f:
lines = (line for line in f if not line.startswith('D'))
date, openp, highp, lowp, closep, volume = np.loadtxt(lines,delimiter=',', unpack=True,
converters={ 0: bytespdate2num('%Y-%m-%d')})
x = 0
y = len(date)
newAr = []
while x < y:
appendLine = date[x],openp[x],highp[x],lowp[x],closep[x],volume[x]
print (appendLine)
newAr.append(appendLine)
x+=1
#Av1 = movingaverage(closep, MA1)
#Av2 = movingaverage(closep, MA2)
SP = len(date[MA2-1:])
fig = plt.figure(figsize=(14,8))
ax1 = plt.subplot2grid((6,4), (1,0), rowspan=4, colspan=4)
candlestick_ohlc(ax1, newAr[-SP:], width=.5, colorup='#ff1717', colordown='#53c156')
#Label1 = str(MA1)+' SMA'
#Label2 = str(MA2)+' SMA'
#ax1.plot(date[-SP:],Av1[-SP:],'blue',label=Label1, linewidth=1.5)
#ax1.plot(date[-SP:],Av2[-SP:],'orange',label=Label2, linewidth=1.5)
ax1.grid(True, color='#E8E8E8')
ax1.xaxis.set_major_locator(mticker.MaxNLocator(10))
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
ax1.yaxis.label.set_color("w")
ax1.spines['bottom'].set_color("#E8E8E8")
ax1.spines['top'].set_color("#E8E8E8")
ax1.spines['left'].set_color("#E8E8E8")
ax1.spines['right'].set_color("#E8E8E8")
ax1.tick_params(axis='y', colors='black')
plt.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='upper'))
ax1.tick_params(axis='x', colors='black')
plt.ylabel('Stock price and Volume',color='black')
# --- legends
#maLeg = plt.legend(loc=9, ncol=2, prop={'size':7},
# fancybox=True, borderaxespad=0.)
#maLeg.get_frame().set_alpha(0.4)
#textEd = pylab.gca().get_legend().get_texts()
#pylab.setp(textEd[0:5], color = 'b')
volumeMin = 0
ax0 = plt.subplot2grid((6,4), (0,0), sharex=ax1, rowspan=1, colspan=4) #axisbg='#07000d'
rsi = rsiFunc(closep)
rsiCol = 'black'
posCol = '#386d13'
negCol = '#8f2020'
ax0.plot(date[-SP:], rsi[-SP:], rsiCol, linewidth=1.5)
ax0.axhline(70, color=negCol)
ax0.axhline(30, color=posCol)
ax0.fill_between(date[-SP:], rsi[-SP:], 70, where=(rsi[-SP:]>=70), facecolor=negCol, edgecolor=negCol, alpha=0.5)
ax0.fill_between(date[-SP:], rsi[-SP:], 30, where=(rsi[-SP:]<=30), facecolor=posCol, edgecolor=posCol, alpha=0.5)
ax0.set_yticks([30,70])
ax0.yaxis.label.set_color("w")
ax0.spines['bottom'].set_color("#7F7F7F")
ax0.spines['top'].set_color("#7F7F7F")
ax0.spines['left'].set_color("#7F7F7F")
ax0.spines['right'].set_color("#7F7F7F")
ax0.tick_params(axis='y', colors='black')
ax0.tick_params(axis='x', colors='black')
plt.ylabel('RSI', color='b')
ax1v = ax1.twinx()
ax1v.fill_between(date[-SP:],volumeMin, volume[-SP:], facecolor='#7F7F7F', alpha=.4)
ax1v.axes.yaxis.set_ticklabels([])
ax1v.grid(False)
###Edit this to 3, so it's a bit larger
ax1v.set_ylim(0, 3*volume.max())
ax1v.spines['bottom'].set_color("#7F7F7F")
ax1v.spines['top'].set_color("#7F7F7F")
ax1v.spines['left'].set_color("#7F7F7F")
ax1v.spines['right'].set_color("#7F7F7F")
ax1v.tick_params(axis='x', colors='black')
ax1v.tick_params(axis='y', colors='black')
ax2 = plt.subplot2grid((6,4), (5,0), sharex=ax1, rowspan=1, colspan=4) #axisbg='#07000d'
fillcolor = '#7F7F7F'
nslow = 26
nfast = 12
nema = 9
emaslow, emafast, macd = computeMACD(closep)
ema9 = ExpMovingAverage(macd, nema)
ax2.plot(date[-SP:], macd[-SP:], color='blue', lw=1)
ax2.plot(date[-SP:], ema9[-SP:], color='red', lw=1)
ax2.fill_between(date[-SP:], macd[-SP:]-ema9[-SP:], 0, alpha=0.5, facecolor=fillcolor, edgecolor=fillcolor)
plt.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='upper'))
ax2.spines['bottom'].set_color("#7F7F7F")
ax2.spines['top'].set_color("#7F7F7F")
ax2.spines['left'].set_color("#7F7F7F")
ax2.spines['right'].set_color("#7F7F7F")
ax2.tick_params(axis='x', colors='black')
ax2.tick_params(axis='y', colors='black')
plt.ylabel('MACD', color='black')
ax2.yaxis.set_major_locator(mticker.MaxNLocator(nbins=5, prune='upper'))
for label in ax2.xaxis.get_ticklabels():
label.set_rotation(30)
plt.suptitle(stock.upper(),color='black')
plt.setp(ax0.get_xticklabels(), visible=False)
plt.setp(ax1.get_xticklabels(), visible=False)
plt.subplots_adjust(left=.09, bottom=.14, right=.94, top=.95, wspace=.20, hspace=0)
#plt.show()
print ("File saved")
fig.savefig('example.png',facecolor=fig.get_facecolor())
except Exception as e:
print('main loop',str(e))
graphData('RELIANCE',20,50)
Second script
import re
import time
import datetime
from datetime import datetime, timedelta
import pandas as pd
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from mpl_finance import candlestick_ohlc
import pylab
def bytespdate2num(fmt, encoding='utf-8'):
strconverter = mdates.strpdate2num(fmt)
def bytesconverter(b):
s = b.decode(encoding)
return strconverter(s)
return bytesconverter
filepath = 'S:/Perl64/lambda/Litmus/data/daily/TATASTEEL.csv'
df = pd.read_csv(filepath, header=0)
df.columns = ["Date", "Open", "High", "Low", "Close", "Volume"]
df = df.reset_index(drop=True)
df = df.set_index("Date", inplace = False)
df.sort_index(inplace=True) ## Sort in chronological order or as earlier dates first
df = df.tail(250)
CL_period = 9 # length of Tenkan Sen or Conversion Line
BL_period = 26 # length of Kijun Sen or Base Line
Lead_span_B_period = 52 # length of Senkou Sen B or Leading Span B
Lag_span_period = 52 # length of Chikou Span or Lagging Span
# add to the dataframe, different components of the Ichimoku
# use shift function to shift a time series forward by the given value
high_9 = df['High'].rolling(window=CL_period).max()
low_9 = df['Low'].rolling(window=CL_period).min()
df['Conv_line'] = (high_9 + low_9) /2
high_26 = df['High'].rolling(window=BL_period).max()
low_26 = df['Low'].rolling(window=BL_period).min()
df['Base_line'] = (high_26 + low_26) /2
df['SpanA'] = ((df['Conv_line'] + df['Base_line'])/2)
#df['SpanA'] = df['SpanA'].shift(26)
high_52 = df['High'].rolling(window=Lead_span_B_period).max()
low_52 = df['Low'].rolling(window=Lead_span_B_period).min()
df['SpanB'] = (high_52 + low_52)/2
#df['SpanB'] = df['SpanB'].shift(26)
df['Lagging_span'] = df['Close']
#df.dropna(inplace=True) # drop NA values from Dataframe
# plot the data using matplotlib's functionality
#add figure and axis objects
fig,ax = plt.subplots(1,1,sharex=True,figsize = (14,8)) #share x axis and set a figure size
fn='S:/Perl64/lambda/scripts/reliance_coi.csv'
df.to_csv(fn, index=1, sep=',', mode='a')
df['Close'] = df['Close'].shift(-26)
ax.plot(df.index, df.Close,linewidth=2) # plot Close with index on x-axis with a line thickness of 4
try:
#candlestick_ohlc(ax, data.values, width=0.6, colorup='g', colordown='r')
candlestick_ohlc(ax=ax, opens=df['Open'], highs=df['High'],lows=df['Low'],closes=df['Close'],width=0.4, colorup='#ff1717', colordown='#53c156')
#candlestick_ohlc(ax, newAr[-SP:], width=.5, colorup='#ff1717', colordown='#53c156')
except Exception as e:
print('main loop',str(e))
df['Conv_line'] = df['Conv_line'].shift(-26)
ax.plot(df.index, df.Conv_line, color="blue") # Tenken
df['Base_line'] = df['Base_line'].shift(-26)
ax.plot(df.index, df.Base_line, color="red") # Kijun
df['Lagging_span'] = df['Lagging_span'].shift(-52)
ax.plot(df.index, df.Lagging_span, color="grey") # Tenken
ax.plot(df.index, df.SpanA) # plot Lead Span A with index on the shared x-axis
ax.plot(df.index, df.SpanB) # plot Lead Span B with index on the sahred x-axis
# use the fill_between call of ax object to specify where to fill the chosen color
# pay attention to the conditions specified in the fill_between call
ax.fill_between(df.index,df.SpanA,df.SpanB,where = df.SpanA >= df.SpanB, color = 'lightgreen')
ax.fill_between(df.index,df.SpanA,df.SpanB,where = df.SpanA < df.SpanB, color = 'lightcoral')
plt.legend(loc=0) #Let matplotlib choose best location for legend
plt.tight_layout()
#plt.grid() # display the major grid
fig.savefig('ichimoku.png')```
The stock prices csv file that I use is here
2019-01-01,1125.25,1127.30,1110.10,1121.00,4455850
2019-01-02,1114.50,1127.00,1101.00,1106.40,7144970
2019-01-03,1107.50,1114.60,1090.10,1092.75,7446457
2019-01-04,1097.40,1104.45,1081.10,1098.65,8465141
2019-01-07,1107.00,1118.45,1101.00,1104.75,5513559
2019-01-08,1105.10,1109.95,1096.00,1104.65,5625153
2019-01-09,1112.00,1117.00,1098.70,1110.75,5766805
2019-01-10,1107.75,1111.00,1103.00,1107.50,4080283
2019-01-11,1107.60,1113.80,1088.60,1098.05,6463903
2019-01-14,1095.00,1100.50,1086.40,1096.80,4111782
2019-01-15,1105.00,1132.00,1105.00,1129.65,10062875
2019-01-16,1135.00,1145.00,1130.35,1135.90,6382777
2019-01-17,1144.45,1147.90,1130.00,1134.45,7487963
2019-01-18,1148.80,1189.90,1135.25,1184.35,25684142
2019-01-21,1194.00,1239.95,1188.65,1237.70,22038534
2019-01-22,1232.85,1246.95,1219.60,1235.15,16552819
2019-01-23,1233.30,1244.20,1222.00,1226.30,8829502
2019-01-24,1225.00,1253.20,1220.10,1247.45,13155185
2019-01-25,1250.45,1264.70,1235.40,1246.00,8550836
2019-01-28,1250.50,1255.95,1222.40,1229.55,8569265
2019-01-29,1231.00,1231.65,1201.35,1210.65,9328866
2019-01-30,1215.00,1225.00,1191.10,1195.70,7846940
2019-01-31,1202.00,1229.70,1201.00,1227.15,10185347
2019-02-01,1234.00,1255.00,1227.05,1249.95,9228965
2019-02-04,1247.00,1296.95,1242.05,1290.90,11670570
2019-02-05,1292.00,1304.40,1278.60,1291.55,9362406
2019-02-06,1296.25,1317.65,1294.25,1310.25,9411585
2019-02-07,1310.25,1321.20,1286.10,1290.40,9212227
2019-02-08,1284.80,1300.50,1272.25,1277.70,6505502
2019-02-11,1275.90,1276.00,1251.00,1253.25,7523999
2019-02-12,1251.50,1272.35,1251.50,1256.40,6399485
2019-02-14,1241.00,1241.00,1218.00,1224.20,6627360
2019-02-15,1229.75,1249.90,1214.00,1244.45,9597961
2019-02-18,1250.00,1252.50,1215.00,1220.10,9649017
2019-02-19,1218.00,1239.70,1211.20,1216.10,6244189
2019-02-20,1223.85,1240.00,1219.00,1234.35,6298179
2019-02-21,1236.00,1257.80,1229.35,1246.90,10580178
2019-02-22,1244.60,1245.30,1226.00,1232.35,8755865
2019-02-25,1236.00,1243.00,1220.65,1232.30,7852528
2019-02-26,1209.50,1234.80,1206.00,1220.25,10131050
2019-02-27,1228.05,1244.90,1209.00,1223.50,11113182
2019-02-28,1233.75,1239.85,1226.55,1231.05,11286916
2019-03-01,1237.00,1242.35,1222.25,1226.05,7922513
2019-03-05,1223.40,1239.80,1218.60,1237.65,7121509
2019-03-06,1239.80,1273.10,1235.10,1264.80,12038231
2019-03-07,1264.00,1279.80,1258.15,1270.25,8109259
2019-03-08,1266.05,1274.45,1262.00,1267.10,6040052
2019-03-11,1270.05,1312.00,1268.00,1304.10,9718840
2019-03-12,1316.90,1334.00,1314.25,1331.35,11228736
2019-03-13,1337.00,1360.00,1328.10,1347.30,11236048
2019-03-14,1349.75,1362.00,1336.10,1341.55,10402048
2019-03-15,1345.00,1358.80,1311.20,1321.65,15893093
2019-03-18,1331.00,1357.95,1329.00,1350.05,10105234
2019-03-19,1360.00,1380.00,1343.10,1376.55,9805318
2019-03-20,1377.80,1388.00,1364.00,1375.45,9892823
2019-03-22,1372.50,1380.90,1336.70,1341.75,11465112
2019-03-25,1330.60,1336.85,1316.70,1324.45,7951992
2019-03-26,1330.30,1371.60,1330.00,1367.25,9479288
2019-03-27,1377.95,1377.95,1344.25,1349.25,10094174
2019-03-28,1350.25,1369.80,1342.80,1360.00,10482938
2019-04-01,1370.00,1406.80,1362.55,1391.85,10098281
2019-04-02,1398.00,1403.10,1380.10,1389.70,8012636
2019-04-03,1392.75,1403.00,1372.00,1375.20,7849461
2019-04-04,1379.00,1383.70,1347.25,1353.05,8375674
2019-04-05,1360.95,1363.90,1343.00,1353.90,6728239
2019-04-08,1356.00,1357.50,1323.70,1329.25,8723577
2019-04-09,1328.90,1340.70,1321.00,1334.45,9497621
2019-04-10,1337.95,1348.00,1326.50,1331.40,7612711
2019-04-11,1332.95,1353.00,1329.00,1346.80,5741333
2019-04-12,1350.00,1356.90,1336.65,1343.10,5919742
2019-04-15,1345.00,1348.95,1335.00,1340.15,4245127
2019-04-16,1345.00,1360.00,1340.00,1343.75,7936553
2019-04-18,1375.00,1389.75,1365.00,1385.95,17960482
2019-04-22,1360.00,1367.00,1341.30,1345.35,10792748
2019-04-23,1348.00,1373.00,1346.00,1363.85,9055300
2019-04-24,1370.30,1394.80,1366.25,1389.50,7360887
2019-04-25,1389.10,1412.40,1362.60,1372.40,13929820
2019-04-26,1375.00,1395.95,1370.70,1392.80,6889444
2019-04-30,1396.40,1396.40,1366.80,1392.80,10217019
2019-05-02,1392.00,1413.90,1382.10,1405.05,8682505
2019-05-03,1407.95,1417.50,1402.65,1408.85,6510169
2019-05-06,1398.00,1402.80,1378.10,1384.90,7237910
2019-05-07,1394.80,1395.00,1340.20,1343.50,8876945
2019-05-08,1340.00,1340.00,1292.20,1299.45,14610543
2019-05-09,1288.80,1288.80,1251.75,1256.45,19507368
2019-05-10,1265.00,1277.70,1245.00,1251.15,11226831
2019-05-13,1247.90,1260.80,1227.50,1232.05,8047801
2019-05-14,1236.50,1269.35,1231.50,1260.45,13001004
2019-05-15,1273.00,1278.00,1250.60,1256.90,11163801
2019-05-16,1259.95,1271.90,1258.10,1265.35,6606652
2019-05-17,1267.00,1276.95,1252.00,1267.40,7898440
2019-05-20,1313.60,1337.70,1303.50,1325.90,12333937
2019-05-21,1332.20,1367.00,1330.05,1339.80,13872055
2019-05-22,1345.65,1359.70,1335.10,1340.40,11287400
2019-05-23,1372.00,1392.00,1325.00,1333.90,17722514
2019-05-24,1348.00,1353.80,1316.50,1336.85,10180759
2019-05-27,1337.10,1337.50,1307.00,1310.65,7349720
2019-05-28,1319.80,1334.80,1313.35,1323.75,19472659
2019-05-29,1321.00,1333.30,1304.15,1313.05,7112830
2019-05-30,1316.25,1342.00,1316.25,1329.75,10740841
2019-05-31,1337.90,1341.90,1320.20,1330.15,11760178
2019-06-03,1335.00,1367.25,1321.20,1360.20,8483610
2019-06-04,1357.45,1374.25,1348.10,1351.65,7059911
2019-06-06,1361.90,1361.90,1321.10,1327.35,7664319
2019-06-07,1325.95,1327.25,1305.60,1314.90,6730595
2019-06-10,1320.90,1327.00,1310.10,1319.15,5380148
2019-06-11,1321.85,1334.50,1318.00,1329.15,5253790
2019-06-12,1334.70,1338.40,1325.00,1332.15,4707716
2019-06-13,1330.00,1334.70,1308.65,1327.25,7171189
2019-06-14,1321.90,1325.00,1309.40,1317.55,6831331
2019-06-17,1320.00,1320.00,1278.50,1282.30,6815554
2019-06-18,1278.90,1287.95,1269.10,1281.00,7679193
2019-06-19,1286.90,1302.00,1262.60,1277.35,6625604
2019-06-20,1280.00,1300.00,1278.00,1296.75,4914012
2019-06-21,1295.95,1296.00,1275.55,1279.50,10623098
2019-06-24,1272.15,1276.45,1257.10,1262.40,5150998
2019-06-25,1258.90,1298.00,1254.25,1295.85,6842363
2019-06-26,1291.00,1304.60,1286.35,1294.15,5299942
2019-06-27,1293.15,1296.90,1271.00,1274.15,11385972
2019-06-28,1277.10,1282.85,1248.65,1253.10,8659721
2019-07-01,1258.05,1272.65,1246.45,1268.85,6162080
2019-07-02,1273.95,1281.00,1263.30,1278.50,4638751
2019-07-03,1282.90,1286.50,1275.05,1282.55,4026032
2019-07-04,1281.40,1291.00,1280.00,1284.00,4275148
2019-07-05,1285.10,1290.50,1260.00,1263.35,4995344
2019-07-08,1258.00,1268.10,1248.20,1252.05,6404544
2019-07-09,1248.95,1283.50,1245.50,1280.10,8016757
2019-07-10,1280.05,1289.35,1268.70,1278.85,5494315
2019-07-11,1287.00,1289.80,1279.30,1281.55,3935460
2019-07-12,1283.00,1300.00,1278.05,1280.50,7174054
2019-07-15,1285.00,1289.50,1270.35,1276.10,4873164
2019-07-16,1279.95,1294.90,1277.05,1293.00,4604019
2019-07-17,1294.30,1297.00,1280.00,1281.85,4334958
2019-07-18,1282.00,1286.40,1258.00,1261.85,5459896
2019-07-19,1268.20,1272.95,1242.70,1249.00,7468515
2019-07-22,1251.00,1284.50,1227.30,1280.50,13300153
2019-07-23,1285.00,1293.90,1260.40,1273.55,9287951
2019-07-24,1273.50,1278.80,1253.55,1259.10,6943982
2019-07-25,1264.00,1269.05,1227.00,1231.50,9968545
2019-07-26,1231.50,1242.50,1210.00,1213.80,9320481
2019-07-29,1216.90,1222.00,1205.10,1210.95,8058035
2019-07-30,1213.95,1220.00,1175.95,1180.90,9533344
2019-07-31,1175.75,1185.00,1162.40,1166.25,9705619
2019-08-01,1163.40,1188.20,1150.35,1180.25,10344862
2019-08-02,1175.70,1198.25,1162.10,1184.35,10865385
2019-08-05,1167.00,1167.00,1128.00,1143.35,13897461
2019-08-06,1134.75,1149.60,1122.00,1128.30,13288909
2019-08-07,1126.50,1138.55,1103.10,1109.40,11588879
2019-08-08,1108.35,1158.00,1095.30,1152.35,14262936
2019-08-09,1161.85,1175.50,1152.30,1162.10,10043949
2019-08-13,1233.15,1302.80,1226.00,1274.75,47923444
2019-08-14,1304.00,1304.45,1280.35,1288.25,14487137
2019-08-16,1291.20,1291.80,1273.00,1278.00,10047496
2019-08-19,1281.05,1296.80,1280.00,1292.60,7459859
2019-08-20,1289.80,1292.60,1272.60,1275.95,6843460
2019-08-21,1275.65,1278.65,1266.50,1270.95,4881553
2019-08-22,1270.95,1271.00,1238.90,1246.75,6414937
2019-08-23,1239.00,1284.00,1226.50,1275.85,9741262
2019-08-26,1294.00,1294.00,1259.00,1266.80,8778739
2019-08-27,1285.00,1285.00,1261.20,1274.85,12984396
2019-08-28,1273.75,1281.00,1256.05,1263.30,5305639
2019-08-29,1256.45,1260.25,1235.30,1241.75,8635974
2019-08-30,1245.50,1254.40,1221.00,1248.55,11308120
2019-09-03,1242.25,1243.00,1200.00,1206.40,8563009
2019-09-04,1200.55,1205.25,1186.05,1201.15,15063355
2019-09-05,1206.80,1213.20,1193.30,1198.60,10512763
2019-09-06,1203.00,1229.00,1195.25,1222.50,10600234
2019-09-09,1220.65,1233.00,1213.15,1222.20,5370758
2019-09-11,1222.50,1240.00,1222.50,1234.40,5544468
2019-09-12,1235.00,1240.45,1205.70,1210.35,5431139
2019-09-13,1212.00,1228.50,1206.90,1225.60,5919260
2019-09-16,1189.00,1219.10,1186.10,1210.75,9393731
2019-09-17,1211.00,1211.00,1193.50,1197.45,7150435
2019-09-18,1204.95,1216.30,1197.20,1205.70,6827281
2019-09-19,1207.85,1209.70,1172.65,1179.05,6293454
2019-09-20,1187.95,1269.90,1174.30,1254.35,22019674
2019-09-23,1274.15,1281.00,1235.00,1239.20,9879751
2019-09-24,1243.60,1298.80,1242.75,1278.70,15982067
2019-09-25,1284.00,1295.00,1268.85,1279.55,8316894
2019-09-26,1292.00,1298.80,1283.50,1296.80,8389212
2019-09-27,1292.50,1315.00,1284.00,1309.05,8712980
2019-09-30,1310.00,1335.75,1305.55,1332.25,11549746
2019-10-01,1337.00,1342.00,1293.30,1304.90,8192597
2019-10-03,1286.00,1314.70,1281.30,1311.05,6183107
2019-10-04,1319.90,1328.60,1303.85,1308.10,6853954
2019-10-07,1308.10,1320.80,1301.70,1310.10,4599818
2019-10-09,1308.70,1329.95,1292.50,1324.75,8040938
2019-10-10,1325.00,1369.00,1321.00,1362.75,16003744
2019-10-11,1363.70,1365.60,1336.55,1352.60,7587648
2019-10-14,1364.95,1364.95,1350.85,1358.00,6123412
2019-10-15,1362.50,1370.00,1354.30,1364.15,4422075
2019-10-16,1369.90,1379.65,1363.70,1372.35,8870701
2019-10-17,1375.00,1399.00,1372.00,1396.50,7332464
2019-10-18,1404.00,1427.90,1398.70,1416.35,12856410
2019-10-22,1425.00,1436.85,1403.35,1414.15,12703054
2019-10-23,1416.30,1425.95,1383.15,1392.40,8432964
2019-10-24,1401.00,1441.40,1386.55,1436.45,10351384
2019-10-25,1441.10,1441.45,1411.25,1431.20,6190013
2019-10-29,1445.50,1480.00,1442.10,1467.05,11780494
2019-10-30,1480.00,1484.55,1460.30,1479.10,7470723
2019-10-31,1484.00,1489.65,1461.70,1464.35,8898168
2019-11-01,1455.00,1461.80,1441.00,1456.90,6356579
2019-11-04,1465.90,1471.00,1445.10,1457.65,6429329
2019-11-05,1463.10,1468.95,1441.00,1447.30,5799318
2019-11-06,1442.70,1446.45,1428.50,1434.90,6686289
2019-11-07,1435.00,1463.00,1432.20,1458.60,6438749
2019-11-08,1449.00,1459.65,1441.30,1445.50,5494844
2019-11-11,1439.10,1444.25,1422.55,1427.80,5192423
2019-11-13,1430.00,1475.90,1430.00,1472.30,11532364
2019-11-14,1476.00,1481.60,1455.80,1462.75,6518339
2019-11-15,1465.65,1486.80,1463.15,1470.85,7173674
2019-11-18,1472.65,1486.00,1455.40,1459.20,6435097
2019-11-19,1467.00,1514.90,1465.00,1509.75,13795569
2019-11-20,1555.05,1572.40,1543.20,1547.65,19904164
2019-11-21,1545.00,1556.00,1528.55,1537.60,6810577
2019-11-22,1542.10,1569.50,1537.60,1546.50,10218978
2019-11-25,1551.15,1564.80,1551.00,1561.55,6924313
2019-11-26,1568.10,1576.35,1556.00,1560.25,16152137
2019-11-27,1559.95,1575.50,1556.10,1569.85,4408336
2019-11-28,1572.65,1584.15,1563.95,1580.30,6284885
2019-11-29,1581.95,1581.95,1547.85,1551.15,8484822
2019-12-02,1600.00,1614.45,1577.00,1586.50,14275186
2019-12-03,1592.75,1594.00,1572.60,1578.90,5938786
2019-12-04,1573.00,1577.50,1533.75,1552.70,9594828
2019-12-05,1574.00,1579.75,1544.00,1550.85,9117022
2019-12-06,1553.00,1568.00,1541.10,1554.90,5982129
2019-12-09,1556.15,1577.40,1546.50,1572.60,5779807
2019-12-10,1572.05,1573.60,1554.15,1561.95,4650906
2019-12-11,1555.60,1574.50,1550.60,1562.40,5652698
2019-12-12,1570.25,1573.85,1556.65,1568.20,4720977
2019-12-13,1580.00,1590.00,1572.40,1582.90,5791522
2019-12-16,1592.00,1593.90,1564.35,1566.60,5436951
2019-12-17,1566.75,1579.00,1555.55,1562.70,9291724
2019-12-18,1563.00,1580.00,1562.00,1575.85,6739582
2019-12-19,1573.70,1614.90,1571.80,1609.95,9375484
2019-12-20,1615.00,1617.55,1596.10,1599.10,9724619
2019-12-23,1560.10,1577.55,1557.80,1571.40,11478429
2019-12-24,1568.90,1572.05,1542.50,1546.45,8251144
2019-12-26,1541.65,1552.95,1510.15,1515.40,13605737
2019-12-27,1527.00,1546.20,1521.30,1542.35,8081591
2019-12-30,1545.95,1547.65,1528.05,1544.20,7828402
2019-12-31,1542.00,1543.70,1508.05,1514.05,10150467
2020-01-01,1518.00,1527.10,1505.50,1509.60,6402372
2020-01-02,1512.00,1540.95,1512.00,1535.30,8096561
2020-01-03,1533.00,1541.65,1523.00,1537.15,9593498
2020-01-06,1520.00,1527.90,1498.00,1501.50,11209343
2020-01-07,1519.00,1534.50,1513.50,1524.60,7627191
2020-01-08,1515.00,1534.45,1510.00,1513.15,7336561
2020-01-09,1538.60,1550.00,1531.25,1548.00,6849606
2020-01-10,1551.90,1557.95,1539.65,1547.65,5704686
2020-01-13,1545.05,1558.70,1538.40,1543.70,8358090
2020-01-14,1540.00,1550.00,1521.85,1529.40,7230788
2020-01-15,1535.85,1539.90,1518.25,1523.85,7231393
2020-01-16,1529.00,1543.35,1528.00,1537.90,5873662
2020-01-17,1553.50,1584.95,1553.20,1581.00,13469708
2020-01-20,1609.00,1609.00,1526.40,1532.35,14878868
2020-01-21,1528.60,1545.85,1522.00,1533.90,8650831
2020-01-22,1544.00,1546.75,1531.10,1533.35,4719245
2020-01-23,1536.50,1541.95,1520.70,1526.85,5142037
2020-01-24,1527.00,1536.35,1518.55,1521.55,6687633
2020-01-27,1514.90,1524.45,1505.00,1506.55,6120429
2020-01-28,1508.60,1510.00,1463.60,1471.75,11215313
2020-01-29,1474.05,1494.40,1464.05,1479.85,11313297
2020-01-30,1479.00,1479.70,1440.00,1443.75,10241756
2020-01-31,1453.00,1453.25,1407.20,1411.65,15886673
2020-02-01,1405.30,1426.95,1317.10,1383.35,10718667
The problem with second script above is that I am getting error at line
candlestick_ohlc(ax=ax, opens=df['Open'], highs=df['High'],lows=df['Low'],closes=df['Close'],width=0.4, colorup='#ff1717', colordown='#53c156')
It gives me error (you can see I have tried various version of code to build candlestick_ohlc charts)
main loop candlestick_ohlc() got an unexpected keyword argument 'opens'
I have the following questions to ask:
What is wrong with second script?
How can draw support/resistance (line), harmonic-pattern using matplotlib. The support lines and harmonic-patterns are stored in a pandas dataframe which needs to be plotted on the chart
Note: Support and resistance are some points of stock charts on which you can draw line and they act as support or resistance for stock
matplotlib.use('agg')
This is non-interactive backend. Use interactive backends like -GTK3Agg, GTK3Cairo, MacOSX, nbAgg,Qt4Agg, Qt4Cairo, Qt5Agg, Qt5Cairo, TkAgg, TkCairo, WebAgg, WX, WXAgg, WXCairo.
Try using above. May be this is helpful
I am trying to animate a scatter and bivariate gaussian distribution from a set of xy coordinates. I'll record the specific code that calls the scatter and distribution first and then how I measure the distribution afterwards.
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import scipy.stats as sts
import matplotlib.animation as animation
''' Below is a section of the script that generates the scatter and contour '''
fig, ax = plt.subplots(figsize = (10,4))
def plotmvs(df, xlim=None, ylim=None, fig=fig, ax=ax):
if xlim is None: xlim = datalimits(df['X'])
if ylim is None: ylim = datalimits(df['Y'])
PDFs = []
for (group,gdf),color in zip(df.groupby('group'), ('red', 'blue')):
ax.plot(*gdf[['X','Y']].values.T, '.', c=color, alpha = 0.5)
kwargs = {
'xlim': xlim,
'ylim': ylim
}
X, Y, PDF = mvpdfs(gdf['X'].values, gdf['Y'].values, **kwargs)
PDFs.append(PDF)
PDF = PDFs[0] - PDFs[1]
normPDF = PDF - PDF.min()
normPDF = normPDF/normPDF.max()
cfs = ax.contourf(X, Y, normPDF, levels=100, cmap='jet')
return fig, ax
n = 10
time = [1]
d = ({
'A1_Y' : [10,20,15,20,25,40,50,60,61,65],
'A1_X' : [15,10,15,20,25,25,30,40,60,61],
'A2_Y' : [10,13,17,10,20,24,29,30,33,40],
'A2_X' : [10,13,15,17,18,19,20,21,26,30],
'A3_Y' : [11,12,15,17,19,20,22,25,27,30],
'A3_X' : [15,18,20,21,22,28,30,32,35,40],
'A4_Y' : [15,20,15,20,25,40,50,60,61,65],
'A4_X' : [16,20,15,30,45,30,40,10,11,15],
'B1_Y' : [18,10,11,13,18,10,30,40,31,45],
'B1_X' : [17,20,15,10,25,20,10,12,14,25],
'B2_Y' : [13,10,14,20,21,12,30,20,11,35],
'B2_X' : [12,20,16,22,15,20,10,20,16,15],
'B3_Y' : [15,20,15,20,25,10,20,10,15,25],
'B3_X' : [18,15,13,20,21,10,20,10,11,15],
'B4_Y' : [19,12,15,18,14,19,13,12,11,18],
'B4_X' : [20,10,12,18,17,15,13,14,19,13],
})
tuples = [((t, k.split('_')[0][0], int(k.split('_')[0][1:]), k.split('_')[1]), v[i]) for k,v in d.items() for i,t in enumerate(time)]
df = pd.Series(dict(tuples)).unstack(-1)
df.index.names = ['time', 'group', 'id']
for time,tdf in df.groupby('time'):
plotmvs(tdf)
'''MY ATTEMPT AT ANIMATING THE PLOT '''
def animate(i) :
tdf.set_offsets([[tdf.iloc[0:,1][0+i][0], tdf.iloc[0:,0][0+i][0]], [tdf.iloc[0:,1][0+i][1], tdf.iloc[0:,0][0+i][1]], [tdf.iloc[0:,1][0+i][2], tdf.iloc[0:,0][0+i][2]], [tdf.iloc[0:,1][0+i][3], tdf.iloc[0:,0][0+i][3]], [tdf.iloc[0:,1][0+i][4], tdf.iloc[0:,0][0+i][4]]])
normPDF = n[i,:,0,:].T
cfs.set_data(X, Y, normPDF)
ani = animation.FuncAnimation(fig, animate, np.arange(0,10),# init_func = init,
interval = 10, blit = False)
A full working code on how the distribution is generated and plotted using a single frame
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import scipy.stats as sts
import matplotlib.animation as animation
def datalimits(*data, pad=.15):
dmin,dmax = min(d.min() for d in data), max(d.max() for d in data)
spad = pad*(dmax - dmin)
return dmin - spad, dmax + spad
def rot(theta):
theta = np.deg2rad(theta)
return np.array([
[np.cos(theta), -np.sin(theta)],
[np.sin(theta), np.cos(theta)]
])
def getcov(radius=1, scale=1, theta=0):
cov = np.array([
[radius*(scale + 1), 0],
[0, radius/(scale + 1)]
])
r = rot(theta)
return r # cov # r.T
def mvpdf(x, y, xlim, ylim, radius=1, velocity=0, scale=0, theta=0):
X,Y = np.meshgrid(np.linspace(*xlim), np.linspace(*ylim))
XY = np.stack([X, Y], 2)
x,y = rot(theta) # (velocity/2, 0) + (x, y)
cov = getcov(radius=radius, scale=scale, theta=theta)
PDF = sts.multivariate_normal([x, y], cov).pdf(XY)
return X, Y, PDF
def mvpdfs(xs, ys, xlim, ylim, radius=None, velocity=None, scale=None, theta=None):
PDFs = []
for i,(x,y) in enumerate(zip(xs,ys)):
kwargs = {
'xlim': xlim,
'ylim': ylim
}
X, Y, PDF = mvpdf(x, y,**kwargs)
PDFs.append(PDF)
return X, Y, np.sum(PDFs, axis=0)
fig, ax = plt.subplots(figsize = (10,4))
def plotmvs(df, xlim=None, ylim=None, fig=fig, ax=ax):
if xlim is None: xlim = datalimits(df['X'])
if ylim is None: ylim = datalimits(df['Y'])
PDFs = []
for (group,gdf),color in zip(df.groupby('group'), ('red', 'blue')):
#Animate this scatter
ax.plot(*gdf[['X','Y']].values.T, '.', c=color, alpha = 0.5)
kwargs = {
'xlim': xlim,
'ylim': ylim
}
X, Y, PDF = mvpdfs(gdf['X'].values, gdf['Y'].values, **kwargs)
PDFs.append(PDF)
PDF = PDFs[0] - PDFs[1]
normPDF = PDF - PDF.min()
normPDF = normPDF/normPDF.max()
#Animate this contour
cfs = ax.contourf(X, Y, normPDF, levels=100, cmap='jet')
return fig, ax
n = 10
time = [1]
d = ({
'A1_Y' : [10,20,15,20,25,40,50,60,61,65],
'A1_X' : [15,10,15,20,25,25,30,40,60,61],
'A2_Y' : [10,13,17,10,20,24,29,30,33,40],
'A2_X' : [10,13,15,17,18,19,20,21,26,30],
'A3_Y' : [11,12,15,17,19,20,22,25,27,30],
'A3_X' : [15,18,20,21,22,28,30,32,35,40],
'A4_Y' : [15,20,15,20,25,40,50,60,61,65],
'A4_X' : [16,20,15,30,45,30,40,10,11,15],
'B1_Y' : [18,10,11,13,18,10,30,40,31,45],
'B1_X' : [17,20,15,10,25,20,10,12,14,25],
'B2_Y' : [13,10,14,20,21,12,30,20,11,35],
'B2_X' : [12,20,16,22,15,20,10,20,16,15],
'B3_Y' : [15,20,15,20,25,10,20,10,15,25],
'B3_X' : [18,15,13,20,21,10,20,10,11,15],
'B4_Y' : [19,12,15,18,14,19,13,12,11,18],
'B4_X' : [20,10,12,18,17,15,13,14,19,13],
})
tuples = [((t, k.split('_')[0][0], int(k.split('_')[0][1:]), k.split('_')[1]), v[i]) for k,v in d.items() for i,t in enumerate(time)]
df = pd.Series(dict(tuples)).unstack(-1)
df.index.names = ['time', 'group', 'id']
for time,tdf in df.groupby('time'):
plotmvs(tdf)
I essentially want to animate this code by iterating over each row of xy coordinates.
Here's a very quick and dirty modification of the OP's code, fixing the scatter animation and adding (a form of) contour animation.
Basically, you start by creating the artists for your animation (in this case Line2D objects, as returned by plot()). Subsequently, you create an update function (and, optionally, an initialization function). In that function, you update the existing artists. I think the example in the matplotlib docs explains it all.
In this case, I modified the OP's plotmvs function to be used as the update function (instead of the OP's proposed animate function).
The QuadContourSet returned by contourf (i.e. your cfs) cannot be used as an artist in itself, but you can make it work using cfs.collections (props to this SO answer). However, you still need to create a new contour plot and remove the old one, instead of just updating the contour data. Personally I would prefer a lower level approach: try to get the contour-data without calling contourf, then initialize and update the contour lines just like you do for the scatter.
Nevertheless, the approach above is implemented in the OP's code below (just copy, paste, and run):
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import scipy.stats as sts
from matplotlib.animation import FuncAnimation
# quick and dirty override of datalimits(), to get a fixed contour-plot size
DATA_LIMITS = [0, 70]
def datalimits(*data, pad=.15):
# dmin,dmax = min(d.min() for d in data), max(d.max() for d in data)
# spad = pad*(dmax - dmin)
return DATA_LIMITS # dmin - spad, dmax + spad
def rot(theta):
theta = np.deg2rad(theta)
return np.array([
[np.cos(theta), -np.sin(theta)],
[np.sin(theta), np.cos(theta)]
])
def getcov(radius=1, scale=1, theta=0):
cov = np.array([
[radius*(scale + 1), 0],
[0, radius/(scale + 1)]
])
r = rot(theta)
return r # cov # r.T
def mvpdf(x, y, xlim, ylim, radius=1, velocity=0, scale=0, theta=0):
X,Y = np.meshgrid(np.linspace(*xlim), np.linspace(*ylim))
XY = np.stack([X, Y], 2)
x,y = rot(theta) # (velocity/2, 0) + (x, y)
cov = getcov(radius=radius, scale=scale, theta=theta)
PDF = sts.multivariate_normal([x, y], cov).pdf(XY)
return X, Y, PDF
def mvpdfs(xs, ys, xlim, ylim, radius=None, velocity=None, scale=None, theta=None):
PDFs = []
for i,(x,y) in enumerate(zip(xs,ys)):
kwargs = {
'xlim': xlim,
'ylim': ylim
}
X, Y, PDF = mvpdf(x, y,**kwargs)
PDFs.append(PDF)
return X, Y, np.sum(PDFs, axis=0)
fig, ax = plt.subplots(figsize = (10,4))
ax.set_xlim(DATA_LIMITS)
ax.set_ylim(DATA_LIMITS)
# Initialize empty lines for the scatter (increased marker size to make them more visible)
line_a, = ax.plot([], [], '.', c='red', alpha = 0.5, markersize=20, animated=True)
line_b, = ax.plot([], [], '.', c='blue', alpha = 0.5, markersize=20, animated=True)
cfs = None
# Modify the plotmvs function so it updates the lines
# (might as well rename the function to "update")
def plotmvs(tdf, xlim=None, ylim=None):
global cfs # as noted: quick and dirty...
if cfs:
for tp in cfs.collections:
# Remove the existing contours
tp.remove()
# Get the data frame for time t
df = tdf[1]
if xlim is None: xlim = datalimits(df['X'])
if ylim is None: ylim = datalimits(df['Y'])
PDFs = []
for (group, gdf), group_line in zip(df.groupby('group'), (line_a, line_b)):
#Animate this scatter
#ax.plot(*gdf[['X','Y']].values.T, '.', c=color, alpha = 0.5)
# Update the scatter line data
group_line.set_data(*gdf[['X','Y']].values.T)
kwargs = {
'xlim': xlim,
'ylim': ylim
}
X, Y, PDF = mvpdfs(gdf['X'].values, gdf['Y'].values, **kwargs)
PDFs.append(PDF)
PDF = PDFs[0] - PDFs[1]
normPDF = PDF - PDF.min()
normPDF = normPDF / normPDF.max()
# Plot a new contour
cfs = ax.contourf(X, Y, normPDF, levels=100, cmap='jet')
# Return the artists (the trick is to return cfs.collections instead of cfs)
return cfs.collections + [line_a, line_b]
n = 10
time = range(n) # assuming n represents the length of the time vector...
d = ({
'A1_Y' : [10,20,15,20,25,40,50,60,61,65],
'A1_X' : [15,10,15,20,25,25,30,40,60,61],
'A2_Y' : [10,13,17,10,20,24,29,30,33,40],
'A2_X' : [10,13,15,17,18,19,20,21,26,30],
'A3_Y' : [11,12,15,17,19,20,22,25,27,30],
'A3_X' : [15,18,20,21,22,28,30,32,35,40],
'A4_Y' : [15,20,15,20,25,40,50,60,61,65],
'A4_X' : [16,20,15,30,45,30,40,10,11,15],
'B1_Y' : [18,10,11,13,18,10,30,40,31,45],
'B1_X' : [17,20,15,10,25,20,10,12,14,25],
'B2_Y' : [13,10,14,20,21,12,30,20,11,35],
'B2_X' : [12,20,16,22,15,20,10,20,16,15],
'B3_Y' : [15,20,15,20,25,10,20,10,15,25],
'B3_X' : [18,15,13,20,21,10,20,10,11,15],
'B4_Y' : [19,12,15,18,14,19,13,12,11,18],
'B4_X' : [20,10,12,18,17,15,13,14,19,13],
})
tuples = [((t, k.split('_')[0][0], int(k.split('_')[0][1:]), k.split('_')[1]), v[i])
for k,v in d.items() for i,t in enumerate(time)]
df = pd.Series(dict(tuples)).unstack(-1)
df.index.names = ['time', 'group', 'id']
# Use the modified plotmvs as the update function, and supply the data frames
interval_ms = 200
delay_ms = 1000
ani = FuncAnimation(fig, plotmvs, frames=df.groupby('time'),
blit=True, interval=interval_ms, repeat_delay=delay_ms)
# Start the animation
plt.show()
I am trying to use a log scale as the margin plots for my seaborn jointplot. I am usings set_xticks() and set_yticks(), but my changes do not appear. Here is my code below and the resulting graph:
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import seaborn as sns
import pandas as pd
tips = sns.load_dataset('tips')
female_waiters = tips[tips['sex']=='Female']
def graph_joint_histograms(df1):
g=sns.jointplot(x = 'total_bill',y = 'tip', data = tips, space = 0.3,ratio = 3)
g.ax_joint.cla()
g.ax_marg_x.cla()
g.ax_marg_y.cla()
for xlabel_i in g.ax_marg_x.get_xticklabels():
xlabel_i.set_visible(False)
for ylabel_i in g.ax_marg_y.get_yticklabels():
ylabel_i.set_visible(False)
x_labels = g.ax_joint.get_xticklabels()
x_labels[0].set_visible(False)
x_labels[-1].set_visible(False)
y_labels = g.ax_joint.get_yticklabels()
y_labels[0].set_visible(False)
y_labels[-1].set_visible(False)
g.ax_joint.set_xlim(0,200)
g.ax_marg_x.set_xlim(0,200)
g.ax_joint.scatter(x = df1['total_bill'],y = df1['tip'],data = df1,c = 'y',edgecolors= '#080808',zorder = 2)
g.ax_joint.scatter(x = tips['total_bill'],y = tips['tip'],data = tips, c= 'c',edgecolors= '#080808')
ax1 =g.ax_marg_x.get_axes()
ax2 = g.ax_marg_y.get_axes()
ax1.set_yscale('log')
ax2.set_xscale('log')
ax1.set_yscale('log')
ax2.set_xscale('log')
ax2.set_xlim(1e0, 1e4)
ax1.set_ylim(1e0, 1e3)
ax2.xaxis.set_ticks([1e0,1e1,1e2,1e3])
ax2.xaxis.set_ticklabels(("1","10","100","1000"), visible = True)
plt.setp(ax2.get_xticklabels(), visible = True)
colors = ['y','c']
ax1.hist([df1['total_bill'],tips['total_bill']],bins = 10, stacked=True,log = True,color = colors, ec='black')
ax2.hist([df1['tip'],tips['tip']],bins = 10,orientation = 'horizontal', stacked=True,log = True,color = colors, ec='black')
ax2.set_ylabel('')
Any ideas would be much appreciated.
Here is the resulting graph:
You should actually get an error from the line g.ax_marg_y.get_axes() since an axes does not have a get_axes() method.
Correcting for that
ax1 =g.ax_marg_x
ax2 = g.ax_marg_y
should give you the desired plot. The ticklabels for the log axis are unfortunately overwritten by the histogram's log=True argument. So you can either leave that out (since you already set the axes to log scale anyways) or you need to set the labels after calling hist.
import matplotlib.pyplot as plt
import seaborn as sns
tips = sns.load_dataset('tips')
def graph_joint_histograms(tips):
g=sns.jointplot(x = 'total_bill',y = 'tip', data = tips, space = 0.3,ratio = 3)
g.ax_joint.cla()
g.ax_marg_x.cla()
g.ax_marg_y.cla()
for xlabel_i in g.ax_marg_x.get_xticklabels():
xlabel_i.set_visible(False)
for ylabel_i in g.ax_marg_y.get_yticklabels():
ylabel_i.set_visible(False)
x_labels = g.ax_joint.get_xticklabels()
x_labels[0].set_visible(False)
x_labels[-1].set_visible(False)
y_labels = g.ax_joint.get_yticklabels()
y_labels[0].set_visible(False)
y_labels[-1].set_visible(False)
g.ax_joint.set_xlim(0,200)
g.ax_marg_x.set_xlim(0,200)
g.ax_joint.scatter(x = tips['total_bill'],y = tips['tip'],data = tips,
c = 'y',edgecolors= '#080808',zorder = 2)
g.ax_joint.scatter(x = tips['total_bill'],y = tips['tip'],data = tips,
c= 'c',edgecolors= '#080808')
ax1 =g.ax_marg_x
ax2 = g.ax_marg_y
ax1.set_yscale('log')
ax2.set_xscale('log')
ax2.set_xlim(1e0, 1e4)
ax1.set_ylim(1e0, 1e3)
ax2.xaxis.set_ticks([1e0,1e1,1e2,1e3])
ax2.xaxis.set_ticklabels(("1","10","100","1000"), visible = True)
plt.setp(ax2.get_xticklabels(), visible = True)
colors = ['y','c']
ax1.hist([tips['total_bill'],tips['total_bill']],bins = 10,
stacked=True, color = colors, ec='black')
ax2.hist([tips['tip'],tips['tip']],bins = 10,orientation = 'horizontal',
stacked=True, color = colors, ec='black')
ax2.set_ylabel('')
graph_joint_histograms(tips)
plt.show()
I am trying to plot the following code. But in my Y axis I want labels as feature_1, feature_2 etc. But, when I am plotting the labels are not clear. It is congested. Can anyone help me out.
"""
Simple demo of a horizontal bar chart.
"""
import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
import matplotlib.pyplot as plt
# Example data
important_features = [(0.0457, 'feat_34'), (0.042, 'feat_11'), (0.0359, 'feat_25'), (0.0355, 'feat_60'), (0.0316, 'feat_40'), (0.0298, 'feat_14'), (0.0272, 'feat_80'), (0.0252, 'feat_42'), (0.0241, 'feat_86'), (0.0236, 'feat_67'), (0.0232, 'feat_15'), (0.0217, 'feat_62'), (0.02, 'feat_24'), (0.0196, 'feat_26'), (0.0191, 'feat_54'), (0.0189, 'feat_36'), (0.0183, 'feat_48'), (0.0177, 'feat_75'), (0.016, 'feat_88'), (0.0155, 'feat_64'), (0.0147, 'feat_90'), (0.014, 'feat_72'), (0.014, 'feat_69'), (0.0131, 'feat_9'), (0.0131, 'feat_8'), (0.0131, 'feat_57'), (0.0129, 'feat_43'), (0.0126, 'feat_32'), (0.0126, 'feat_16'), (0.012, 'feat_76'), (0.0117, 'feat_33'), (0.0109, 'feat_68'), (0.0109, 'feat_4'), (0.0107, 'feat_39'), (0.0106, 'feat_70'), (0.0104, 'feat_78'), (0.0103, 'feat_59'), (0.0101, 'feat_53'), (0.0096, 'feat_85'), (0.0087, 'feat_38'), (0.0086, 'feat_47'), (0.0083, 'feat_50'), (0.0083, 'feat_46'), (0.0079, 'feat_92'), (0.0078, 'feat_71'), (0.0078, 'feat_30'), (0.0076, 'feat_17'), (0.0073, 'feat_66'), (0.007, 'feat_41'), (0.0069, 'feat_13'), (0.0068, 'feat_83'), (0.0068, 'feat_73'), (0.0068, 'feat_56'), (0.0066, 'feat_20'), (0.0064, 'feat_1'), (0.0062, 'feat_37'), (0.0061, 'feat_27'), (0.006, 'feat_87'), (0.006, 'feat_2'), (0.0058, 'feat_58'), (0.0058, 'feat_35'), (0.0058, 'feat_22'), (0.0056, 'feat_18'), (0.0055, 'feat_55'), (0.0054, 'feat_74'), (0.0053, 'feat_91'), (0.0053, 'feat_79'), (0.005, 'feat_29'), (0.0049, 'feat_44'), (0.0046, 'feat_89'), (0.0045, 'feat_45'), (0.0041, 'feat_10'), (0.0039, 'feat_19'), (0.0037, 'feat_21'), (0.0035, 'feat_3'), (0.0033, 'feat_65'), (0.0031, 'feat_7'), (0.003, 'feat_23'), (0.0029, 'feat_63'), (0.0029, 'feat_52'), (0.0028, 'feat_77'), (0.0027, 'feat_49'), (0.0025, 'feat_12'), (0.0022, 'feat_93'), (0.0021, 'feat_28'), (0.002, 'feat_61'), (0.002, 'feat_5'), (0.0019, 'feat_81'), (0.0017, 'feat_31'), (0.0011, 'feat_84'), (0.0011, 'feat_51'), (0.0009, 'feat_82'), (0.0009, 'feat_6')]
x_value = []
y_feat = []
for (a, b) in important_features:
y_feat.append(b)
x_value.append(a)
features = y_feat
y_pos = range(1,2*len(features),2)
performance = x_value
plt.barh(y_pos, performance, align='center', alpha=0.4)
plt.yticks(y_pos, features)
plt.ylim(0,200)
plt.xlabel('Values')
plt.title('Feature Plot')
plt.show()
Here is what I would do. There are simply too many bars, so the text is on top of each other. My solution is to put half of them on the left of the bar the other half on the right of the bar (you can change that to wherever you want them to be but the idea is to put half of them somewhere and the other half somewhere else). Also, making the figure bigger helps.
"""
Simple demo of a horizontal bar chart.
"""
import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
import matplotlib.pyplot as plt
# Example data
important_features = [(0.0457, 'feat_34'), (0.042, 'feat_11'), (0.0359, 'feat_25'), (0.0355, 'feat_60'), (0.0316, 'feat_40'), (0.0298, 'feat_14'), (0.0272, 'feat_80'), (0.0252, 'feat_42'), (0.0241, 'feat_86'), (0.0236, 'feat_67'), (0.0232, 'feat_15'), (0.0217, 'feat_62'), (0.02, 'feat_24'), (0.0196, 'feat_26'), (0.0191, 'feat_54'), (0.0189, 'feat_36'), (0.0183, 'feat_48'), (0.0177, 'feat_75'), (0.016, 'feat_88'), (0.0155, 'feat_64'), (0.0147, 'feat_90'), (0.014, 'feat_72'), (0.014, 'feat_69'), (0.0131, 'feat_9'), (0.0131, 'feat_8'), (0.0131, 'feat_57'), (0.0129, 'feat_43'), (0.0126, 'feat_32'), (0.0126, 'feat_16'), (0.012, 'feat_76'), (0.0117, 'feat_33'), (0.0109, 'feat_68'), (0.0109, 'feat_4'), (0.0107, 'feat_39'), (0.0106, 'feat_70'), (0.0104, 'feat_78'), (0.0103, 'feat_59'), (0.0101, 'feat_53'), (0.0096, 'feat_85'), (0.0087, 'feat_38'), (0.0086, 'feat_47'), (0.0083, 'feat_50'), (0.0083, 'feat_46'), (0.0079, 'feat_92'), (0.0078, 'feat_71'), (0.0078, 'feat_30'), (0.0076, 'feat_17'), (0.0073, 'feat_66'), (0.007, 'feat_41'), (0.0069, 'feat_13'), (0.0068, 'feat_83'), (0.0068, 'feat_73'), (0.0068, 'feat_56'), (0.0066, 'feat_20'), (0.0064, 'feat_1'), (0.0062, 'feat_37'), (0.0061, 'feat_27'), (0.006, 'feat_87'), (0.006, 'feat_2'), (0.0058, 'feat_58'), (0.0058, 'feat_35'), (0.0058, 'feat_22'), (0.0056, 'feat_18'), (0.0055, 'feat_55'), (0.0054, 'feat_74'), (0.0053, 'feat_91'), (0.0053, 'feat_79'), (0.005, 'feat_29'), (0.0049, 'feat_44'), (0.0046, 'feat_89'), (0.0045, 'feat_45'), (0.0041, 'feat_10'), (0.0039, 'feat_19'), (0.0037, 'feat_21'), (0.0035, 'feat_3'), (0.0033, 'feat_65'), (0.0031, 'feat_7'), (0.003, 'feat_23'), (0.0029, 'feat_63'), (0.0029, 'feat_52'), (0.0028, 'feat_77'), (0.0027, 'feat_49'), (0.0025, 'feat_12'), (0.0022, 'feat_93'), (0.0021, 'feat_28'), (0.002, 'feat_61'), (0.002, 'feat_5'), (0.0019, 'feat_81'), (0.0017, 'feat_31'), (0.0011, 'feat_84'), (0.0011, 'feat_51'), (0.0009, 'feat_82'), (0.0009, 'feat_6')]
x_value = []
y_feat = []
for (a, b) in important_features:
y_feat.append(b)
x_value.append(a)
features = y_feat
y_pos = range(1,2*len(features),2)
performance = x_value
fig = plt.figure(figsize=(16, 12))
plt.barh(y_pos, performance, align='center', alpha=0.4)
# no ticks
plt.yticks([])
j = 0
for x, y, label in zip(performance, y_pos, features):
if j % 2 == 0: # half of them on the left of the axes
plt.text(-0.0003, y, label, horizontalalignment='right', verticalalignment='center')
else: # half of them on the right of the bars
plt.text(x+0.0003, y, label, horizontalalignment='left', verticalalignment='center')
j +=1
plt.ylim(0,200)
plt.xlabel('Values')
plt.title('Feature Plot')
plt.show()