Matplotlib: contour plot with data interpolation - python

I use scipy.interpolate.griddata to interpolate my data for contour plot. The data have different scale on x and y axes:
from scipy.interpolate import griddata
xx = [0.0493, 0.0458, 0.0425, 0.0394, 0.0365, 0.0337, 0.0311, 0.0286, 0.0262, 0.024, 0.0219, 0.0198, 0.0179, 0.016, 0.0143, 0.0126, 0.0109, 0.0094, 0.0079, 0.0064, 0.005, 0.0037, 0.0024, 0.0012, 0.0, 0.0663, 0.0637, 0.0613, 0.059, 0.0567, 0.0546, 0.0525, 0.0506, 0.0487, 0.0469, 0.0451, 0.0434, 0.0418, 0.0402, 0.0387, 0.0373, 0.0359, 0.0345, 0.0332, 0.0319, 0.0307, 0.0295, 0.0283, 0.0272, 0.0261, 0.0792, 0.0774, 0.0756, 0.0739, 0.0722, 0.0706, 0.0691, 0.0676, 0.0661, 0.0647, 0.0633, 0.062, 0.0607, 0.0594, 0.0582, 0.057, 0.0559, 0.0547, 0.0536, 0.0526, 0.0515, 0.0505, 0.0495, 0.0486, 0.0477, 0.0919, 0.0905, 0.0891, 0.0878, 0.0865, 0.0852, 0.084, 0.0828, 0.0816, 0.0805, 0.0794, 0.0783, 0.0772, 0.0762, 0.0752, 0.0742, 0.0732, 0.0723, 0.0714, 0.0705, 0.0696, 0.0688, 0.0679, 0.0671, 0.0663, 0.1044, 0.1033, 0.1022, 0.1011, 0.1, 0.099, 0.098, 0.097, 0.096, 0.0951, 0.0942, 0.0933, 0.0924, 0.0915, 0.0907, 0.0898, 0.089, 0.0882, 0.0874, 0.0867, 0.0859, 0.0852, 0.0845, 0.0837, 0.083, 0.1168, 0.1159, 0.1149, 0.114, 0.1132, 0.1123, 0.1114, 0.1106, 0.1098, 0.109, 0.1082, 0.1074, 0.1066, 0.1059, 0.1052, 0.1045, 0.1037, 0.1031, 0.1024, 0.1017, 0.1011, 0.1004, 0.0998, 0.0992, 0.0985, 0.1291, 0.1283, 0.1275, 0.1267, 0.126, 0.1252, 0.1245, 0.1238, 0.123, 0.1223, 0.1217, 0.121, 0.1203, 0.1197, 0.119, 0.1184, 0.1178, 0.1172, 0.1166, 0.116, 0.1154, 0.1148, 0.1143, 0.1137, 0.1132]
yy = [0.6137, 0.8211, 1.0277, 1.2338, 1.4393, 1.6444, 1.8489, 2.053, 2.2567, 2.4601, 2.6631, 2.8658, 3.0682, 3.2703, 3.4722, 3.6738, 3.8752, 4.0763, 4.2773, 4.4781, 4.6787, 4.8791, 5.0794, 5.2795, 5.4795, 0.3217, 0.5059, 0.694, 0.8859, 1.0812, 1.2799, 1.4816, 1.6861, 1.8934, 2.1033, 2.3155, 2.5301, 2.7467, 2.9655, 3.1861, 3.4086, 3.6328, 3.8586, 4.0861, 4.315, 4.5453, 4.777, 5.01, 5.2442, 5.4795, 0.2447, 0.4154, 0.5919, 0.7737, 0.9606, 1.1524, 1.3488, 1.5496, 1.7547, 1.9638, 2.1767, 2.3932, 2.6133, 2.8367, 3.0633, 3.293, 3.5257, 3.7611, 3.9993, 4.2401, 4.4833, 4.729, 4.977, 5.2272, 5.4795, 0.1814, 0.3467, 0.5184, 0.696, 0.8795, 1.0685, 1.2629, 1.4624, 1.6668, 1.876, 2.0897, 2.3079, 2.5302, 2.7567, 2.987, 3.2212, 3.4591, 3.7004, 3.9452, 4.1933, 4.4446, 4.6989, 4.9563, 5.2165, 5.4795, 0.1202, 0.2837, 0.4538, 0.6303, 0.8128, 1.0012, 1.1953, 1.3949, 1.5998, 1.8099, 2.0249, 2.2448, 2.4693, 2.6983, 2.9318, 3.1694, 3.4112, 3.657, 3.9066, 4.16, 4.417, 4.6776, 4.9416, 5.2089, 5.4795, 0.0598, 0.2232, 0.3932, 0.5697, 0.7525, 0.9413, 1.136, 1.3365, 1.5425, 1.754, 1.9706, 2.1924, 2.4191, 2.6506, 2.8867, 3.1275, 3.3726, 3.6221, 3.8757, 4.1334, 4.3951, 4.6606, 4.93, 5.203, 5.4795, 0.0, 0.1638, 0.3344, 0.5115, 0.695, 0.8848, 1.0806, 1.2823, 1.4897, 1.7028, 1.9212, 2.145, 2.3739, 2.6078, 2.8466, 3.0903, 3.3385, 3.5914, 3.8486, 4.1102, 4.376, 4.646, 4.9199, 5.1978, 5.4795]
vv = [0.4829, 0.5196, 0.5541, 0.5866, 0.6173, 0.6463, 0.6738, 0.6998, 0.7246, 0.7481, 0.7706, 0.7919, 0.8123, 0.8318, 0.8504, 0.8683, 0.8854, 0.9017, 0.9175, 0.9326, 0.9471, 0.9611, 0.9745, 0.9875, 1.0, 0.4229, 0.4512, 0.4782, 0.5041, 0.5288, 0.5525, 0.5752, 0.597, 0.618, 0.6381, 0.6575, 0.6761, 0.6941, 0.7114, 0.7282, 0.7443, 0.7599, 0.775, 0.7895, 0.8036, 0.8173, 0.8305, 0.8433, 0.8557, 0.8678, 0.4044, 0.4259, 0.4467, 0.4668, 0.4862, 0.505, 0.5231, 0.5407, 0.5578, 0.5743, 0.5903, 0.6059, 0.621, 0.6356, 0.6498, 0.6637, 0.6771, 0.6902, 0.703, 0.7154, 0.7274, 0.7392, 0.7507, 0.7618, 0.7727, 0.3883, 0.4056, 0.4225, 0.4388, 0.4548, 0.4703, 0.4854, 0.5001, 0.5144, 0.5283, 0.5419, 0.5552, 0.5681, 0.5808, 0.5931, 0.6051, 0.6169, 0.6284, 0.6396, 0.6506, 0.6613, 0.6718, 0.6821, 0.6921, 0.7019, 0.3725, 0.3871, 0.4014, 0.4153, 0.4289, 0.4422, 0.4551, 0.4678, 0.4802, 0.4924, 0.5042, 0.5159, 0.5272, 0.5384, 0.5493, 0.56, 0.5704, 0.5807, 0.5907, 0.6006, 0.6102, 0.6197, 0.629, 0.6381, 0.6471, 0.3569, 0.3697, 0.3821, 0.3943, 0.4063, 0.418, 0.4295, 0.4407, 0.4518, 0.4626, 0.4732, 0.4836, 0.4938, 0.5038, 0.5137, 0.5233, 0.5328, 0.5421, 0.5513, 0.5603, 0.5691, 0.5778, 0.5863, 0.5947, 0.6029, 0.3415, 0.3529, 0.3641, 0.375, 0.3858, 0.3964, 0.4068, 0.417, 0.427, 0.4368, 0.4465, 0.456, 0.4654, 0.4745, 0.4836, 0.4925, 0.5012, 0.5098, 0.5182, 0.5265, 0.5347, 0.5428, 0.5507, 0.5585, 0.5662]
N=500
data_points = (xx, yy)
grid_points = (np.linspace(min(xx), max(xx), N), np.linspace(min(yy), max(yy), N))
vi = griddata(data_points, vv,
(grid_points[0][None, :], grid_points[1][:, None]), method='cubic')
plot(xx,yy, '.k')
contour(grid_points[0], grid_points[1], vi)
But I got ugly sharped contours:
However, if I scale for example the y axis, like this yy = [v/50. for v in yy], I got the smoothed plot:
How to get the smoothed contours with original axes scales?

Related

Find local maxima in data from dataframe

Is there a way to find the local maxima from data I get from CSV file and put its value on the plot?
The x and y values that are in a pandas dataframe look something like this
x = 1598.78, 1596.85, 1594.92, 1592.99, 1591.07, 1589.14, 1587.21, 1585.28, 1583.35, 1581.42, 1579.49, 1577.57, 1575.64, 1573.71, 1571.78, 1569.85, 1567.92, 1565.99, 1564.07, 1562.14, 1560.21, 1558.28, 1556.35, 1554.42, 1552.49, 1550.57, 1548.64, 1546.71, 1544.78, 1542.85, 1540.92, 1538.99, 1537.07, 1535.14, 1533.21, 1531.28, 1529.35, 1527.42, 1525.49, 1523.57, 1521.64, 1519.71, 1517.78, 1515.85, 1513.92, 1511.99, 1510.07, 1508.14, 1506.21, 1504.28, 1502.35, 1500.42, 1498.49, 1496.57, 1494.64, 1492.71, 1490.78, 1488.85, 1486.92, 1484.99, 1483.07, 1481.14, 1479.21, 1477.28, 1475.35, 1473.42, 1471.49, 1469.57, 1467.64, 1465.71, 1463.78, 1461.85, 1459.92, 1457.99, 1456.07, 1454.14, 1452.21, 1450.28, 1448.35, 1446.42, 1444.49, 1442.57, 1440.64, 1438.71, 1436.78, 1434.85, 1432.92, 1430.99, 1429.07, 1427.14, 1425.21, 1423.28, 1421.35, 1419.42, 1417.49, 1415.57, 1413.64, 1411.71, 1409.78, 1407.85, 1405.92, 1403.99, 1402.07, 1400.14
y = 0.640, 0.624, 0.609, 0.594, 0.581, 0.569, 0.558, 0.547, 0.537, 0.530, 0.523, 0.516, 0.508, 0.502, 0.497, 0.491, 0.487, 0.484, 0.481, 0.480, 0.479, 0.482, 0.490, 0.503, 0.520, 0.542, 0.566, 0.586, 0.600, 0.606, 0.593, 0.569, 0.557, 0.548, 0.538, 0.531, 0.527, 0.524, 0.522, 0.522, 0.523, 0.525, 0.526, 0.527, 0.530, 0.534, 0.536, 0.539, 0.547, 0.553, 0.557, 0.563, 0.573, 0.599, 0.654, 0.738, 0.852, 0.891, 0.810, 0.744, 0.711, 0.694, 0.686, 0.683, 0.683, 0.690, 0.700, 0.706, 0.713, 0.723, 0.731, 0.732, 0.737, 0.756, 0.779, 0.786, 0.790, 0.794, 0.802, 0.815, 0.827, 0.832, 0.831, 0.826, 0.823, 0.828, 0.834, 0.834, 0.832, 0.832, 0.831, 0.825, 0.816, 0.804, 0.798, 0.794, 0.786, 0.775, 0.764, 0.752, 0.739, 0.722, 0.708, 0.697
and I'm trying to get something like this.
P.S. Note that numeric values were added with the plt.text function just to exemplify what I want.
x = [1598.78, 1596.85, 1594.92, 1592.99, 1591.07, 1589.14, 1587.21, 1585.28, 1583.35, 1581.42, 1579.49, 1577.57, 1575.64, 1573.71, 1571.78, 1569.85, 1567.92, 1565.99, 1564.07, 1562.14, 1560.21, 1558.28, 1556.35, 1554.42, 1552.49, 1550.57, 1548.64, 1546.71, 1544.78, 1542.85, 1540.92, 1538.99, 1537.07, 1535.14, 1533.21, 1531.28, 1529.35, 1527.42, 1525.49, 1523.57, 1521.64, 1519.71, 1517.78, 1515.85, 1513.92, 1511.99, 1510.07, 1508.14, 1506.21, 1504.28, 1502.35, 1500.42, 1498.49, 1496.57, 1494.64, 1492.71, 1490.78, 1488.85, 1486.92, 1484.99, 1483.07, 1481.14, 1479.21, 1477.28, 1475.35, 1473.42, 1471.49, 1469.57, 1467.64, 1465.71, 1463.78, 1461.85, 1459.92, 1457.99, 1456.07, 1454.14, 1452.21, 1450.28, 1448.35, 1446.42, 1444.49, 1442.57, 1440.64, 1438.71, 1436.78, 1434.85, 1432.92, 1430.99, 1429.07, 1427.14, 1425.21, 1423.28, 1421.35, 1419.42, 1417.49, 1415.57, 1413.64, 1411.71, 1409.78, 1407.85, 1405.92, 1403.99, 1402.07, 1400.14]
y = [0.640, 0.624, 0.609, 0.594, 0.581, 0.569, 0.558, 0.547, 0.537, 0.530, 0.523, 0.516, 0.508, 0.502, 0.497, 0.491, 0.487, 0.484, 0.481, 0.480, 0.479, 0.482, 0.490, 0.503, 0.520, 0.542, 0.566, 0.586, 0.600, 0.606, 0.593, 0.569, 0.557, 0.548, 0.538, 0.531, 0.527, 0.524, 0.522, 0.522, 0.523, 0.525, 0.526, 0.527, 0.530, 0.534, 0.536, 0.539, 0.547, 0.553, 0.557, 0.563, 0.573, 0.599, 0.654, 0.738, 0.852, 0.891, 0.810, 0.744, 0.711, 0.694, 0.686, 0.683, 0.683, 0.690, 0.700, 0.706, 0.713, 0.723, 0.731, 0.732, 0.737, 0.756, 0.779, 0.786, 0.790, 0.794, 0.802, 0.815, 0.827, 0.832, 0.831, 0.826, 0.823, 0.828, 0.834, 0.834, 0.832, 0.832, 0.831, 0.825, 0.816, 0.804, 0.798, 0.794, 0.786, 0.775, 0.764, 0.752, 0.739, 0.722, 0.708, 0.697]
import matplotlib.pyplot as plt
# The slope of a line is a measure of its steepness. Mathematically, slope is calculated as "rise over run" (change in y divided by change in x).
slope = [np.sign((y[i] - y[i-1]) / (x[i] - x[i-1])) for i in range(1, len(y))]
x_prev = slope[0]
optima_dic={'minima':[], 'maxima':[]}
for i in range(1, len(slope)):
if slope[i] * x_prev == -1: #slope changed
if x_prev == 1: # slope changed from 1 to -1
optima_dic['maxima'].append(i)
else: # slope changed from -1 to 1
optima_dic['minima'].append(i)
x_prev=-x_prev
from matplotlib.pyplot import text
plt.rcParams["figure.figsize"] = (20,10)
ix = 0
for x_, y_ in zip(x, y):
plt.plot(x_, y_, 'o--', color='grey')
if(ix in optima_dic['minima']):
plt.text(x_, y_, s = x_, fontsize=10)
ix += 1

how can i smooth the graph values or extract main signals only

when i try to run the code below i get this graph
my code:
from numpy import nan
import json
import os
import numpy as np
import subprocess
import math
import matplotlib.pyplot as plt
from statistics import mean, stdev
def smooth(t):
new_t = []
for i, x in enumerate(t):
neighbourhood = t[max(i-2,0): i+3]
m = mean(neighbourhood)
s = stdev(neighbourhood, xbar=m)
if abs(x - m) > s:
x = ( t[i - 1 + (i==0)*2] + t[i + 1 - (i+1==len(t))*2] ) / 2
new_t.append(x)
return new_t
def outLiersFN(*U):
outliers=[] # after preprocessing list
#preprocessing Fc =| 2*LF1 prev by 1 - LF2 prev by 2 |
c0 = -2 #(previous) by 2 #from original
c1 =-1 #(previous) #from original
c2 =0 #(current) #from original
c3 = 1 #(next) #from original
preP = U[0] # original list
if c2 == 0:
outliers.append(preP[0])
c1+=1
c2+=1
c0+=1
c3+=1
oldlen = len(preP)
M_RangeOfMotion = 90
while oldlen > c2 :
if c3 == oldlen:
outliers.insert(c2, preP[c2]) #preP[c2] >> last element in old list
break
if (preP[c2] > M_RangeOfMotion and preP[c2] < (preP[c1] + preP[c3])/2) or (preP[c2] < M_RangeOfMotion and preP[c2] > (preP[c1] + preP[c3])/2): #Check Paper 3.3.1
Equ = (preP[c1] + preP[c3])/2 #fn of preprocessing # From third index # ==== inserting current frame
formatted_float = "{:.2f}".format(Equ) #with .2 number only
equu = float(formatted_float) #from string float to float
outliers.insert(c2,equu) # insert the preprocessed value to the List
c1+=1
c2+=1
c0+=1
c3+=1
else :
Equ = preP[c2] # fn of preprocessing #put same element (do nothing)
formatted_float = "{:.2f}".format(Equ) # with .2 number only
equu = float(formatted_float) # from string float to float
outliers.insert(c2, equu) # insert the preprocessed value to the List
c1 += 1
c2 += 1
c0 += 1
c3 += 1
return outliers
def remove_nan(list):
newlist = [x for x in list if math.isnan(x) == False]
return newlist
the_angel = [176.04, 173.82, 170.09, 165.3, 171.8, 178.3, 178.77, 179.24, 179.93, 180.0, 173.39, 166.78, 166.03, 165.28, 165.72, 166.17, 166.71, 167.26, 168.04, 167.22, 166.68, 166.13, 161.53, 165.81, 170.1, 170.05, 170.5, 173.01, 176.02, 174.53, 160.09, 146.33, 146.38, 146.71, 150.33, 153.95, 154.32, 154.69, 134.52, 114.34, 115.6, 116.86, 134.99, 153.12, 152.28, 151.43, 151.36, 152.32, 158.9, 166.52, 177.74, 178.61, 179.47, 167.44, 155.4, 161.54, 167.68, 163.96, 160.24, 137.45, 114.66, 117.78, 120.89, 139.95, 139.62, 125.51, 111.79, 112.07, 112.74, 110.22, 107.7, 107.3, 106.52, 105.73, 103.07, 101.35, 102.5, 104.59, 104.6, 104.49, 104.38, 102.81, 101.25, 100.62, 100.25, 100.15, 100.32, 99.84, 99.36, 100.04, 100.31, 99.14, 98.3, 97.92, 97.41, 96.9, 96.39, 95.88, 95.9, 95.9, 96.02, 96.14, 96.39, 95.2, 94.56, 94.02, 93.88, 93.8, 93.77, 93.88, 94.04, 93.77, 93.65, 93.53, 94.2, 94.88, 92.59, 90.29, 27.01, 32.9, 38.78, 50.19, 61.59, 61.95, 62.31, 97.46, 97.38, 97.04, 96.46, 96.02, 96.1, 96.33, 95.61, 89.47, 89.34, 89.22, 89.48, 89.75, 90.02, 90.28, 88.16, 88.22, 88.29, 88.17, 88.17, 94.98, 94.84, 94.69, 94.94, 94.74, 94.54, 94.69, 94.71, 94.64, 94.58, 94.19, 94.52, 94.85, 87.7, 87.54, 87.38, 95.71, 96.57, 97.11, 97.05, 96.56, 96.07, 95.76, 95.56, 95.35, 95.28, 95.74, 96.2, 96.32, 96.33, 96.2, 96.14, 96.07, 96.07, 96.12, 96.17, 96.28, 96.31, 96.33, 96.16, 96.05, 95.94, 95.33, 88.96, 95.0, 95.78, 88.19, 88.19, 88.19, 87.92, 87.93, 88.03, 87.94, 87.86, 87.85, 87.89, 88.08, 88.01, 87.88, 88.02, 88.15, 88.15, 88.66, 88.73, 88.81, 88.41, 88.55, 88.68, 88.69, 88.02, 87.35, 95.19, 95.39, 95.38, 95.37, 95.27, 95.17, 95.33, 95.32, 95.31, 95.37, 95.42, 95.34, 95.44, 95.53, 95.47, 95.41, 95.13, 94.15, 94.78, 97.64, 97.1, 96.87, 97.03, 96.76, 35.44, 23.63, 23.27, 24.71, 26.16, 96.36, 113.13, 129.9, 96.82, 63.74, 34.25, 33.42, 32.6, 30.69, 31.06, 31.43, 97.14, 97.51, 97.23, 98.54, 100.13, 100.95, 28.82, 33.81, 66.81, 99.82, 102.63, 101.9, 101.44, 102.19, 103.22, 103.67, 104.13, 104.07, 104.73, 105.46, 103.74, 102.02, 103.32, 102.59, 29.54, 28.08, 28.76, 29.79, 30.82, 113.51, 129.34, 145.16, 143.18, 148.29, 153.67, 166.14, 161.16, 151.64, 149.27, 146.9, 151.67, 153.02, 149.28, 145.53, 149.1, 152.67, 158.78, 164.89, 164.84, 164.8, 162.11, 159.42, 156.73, 156.28, 155.83, 156.4, 161.0, 165.59, 164.44, 159.73, 155.76, 156.97, 158.92, 159.15, 159.39, 159.99, 160.44, 160.88, 163.89, 166.9, 167.71, 167.11, 167.0, 167.44, 168.38, 153.16, 137.94, 137.65, 152.09, 169.49, 171.36, 173.22, 174.01, 174.0, 174.2, 174.41, 157.74, 141.09, 149.32, 157.57, 156.4, 148.4, 140.78, 141.06, 141.73, 143.05, 143.91, 156.59, 169.29, 172.17, 175.05, 175.29, 175.27, 175.15, 175.02, 174.81, 174.59, 174.76, 174.94, 175.18, 175.41, 175.23, 174.51, 174.64, 174.77, 174.56, 173.25, 172.38, 174.17, 176.4, 177.27, 177.29, 177.33, 178.64, 179.98, 179.99, 176.0, 172.88, 173.77, 173.8, 173.97, 174.72, 175.24, 176.89, 179.07, 179.27, 178.78, 178.29, 175.61, 174.21, 172.8, 173.05, 173.41, 173.77, 174.65, 175.52, 175.58, 176.15, 176.71, 159.12, 141.54, 141.12, 155.62, 170.53, 165.54, 160.71, 158.22, 156.35, 156.82, 158.55, 160.27, 161.33, 162.39, 162.37, 159.48, 156.59, 156.77, 158.05, 159.32, 158.49, 157.66, 157.7, 157.74, 158.44, 159.14, 150.13, 143.06, 136.0, 125.7, 115.41, 111.19, 106.97, 107.1, 107.24, 107.45, 107.67, 113.34, 119.01, 144.87, 170.73, 174.31, 177.89, 174.78, 171.67, 163.26, 134.58, 105.9, 102.98, 100.77, 101.05, 101.39, 101.73, 99.79, 98.71, 97.64, 97.8, 97.89, 96.67, 95.45, 94.33, 93.38, 92.44, 48.53, 91.4, 91.35, 91.34, 91.33, 90.92, 90.51, 88.63, 87.0, 86.74, 86.48, 96.79, 96.09, 95.46, 95.39, 94.32, 93.25, 93.31, 93.37, 93.11, 92.57, 93.41, 94.25, 96.48, 92.71, 88.94, 90.07, 90.43, 78.06, 77.69, 77.32, 90.1, 89.15, 89.14, 88.85, 88.38, 87.63, 121.2, 120.66, 86.89, 86.42, 85.69, 84.86, 84.86, 85.34, 85.82, 86.07, 86.32, 85.82, 85.32, 86.23, 86.69, 87.15, 87.04, 86.87, 86.58, 86.0, 85.41, 85.41, 85.53, 85.66, 85.7, 85.72, 85.75, 85.92, 86.09, 85.77, 85.45, 84.94, 85.55, 86.16, 86.21, 86.1, 85.77, 85.27, 84.56, 84.99, 85.38, 85.42, 85.98, 86.54, 86.5, 86.45, 86.56, 86.63, 86.35, 86.08, 85.82, 85.51, 85.21, 84.6, 84.84, 84.97, 85.1, 86.12, 86.88, 86.8, 86.46, 86.47, 87.23, 87.8, 88.0, 88.08, 88.16, 87.72, 87.63, 87.37, 86.42, 86.48, 87.24, 87.97, 88.09, 88.19, 88.32, 88.44, 87.82, 87.2, 86.03, 85.78, 91.5, 93.0, 88.2, 88.52, 88.42, 87.28, 85.73, 85.62, 85.5, 85.5, 87.06, 87.6, 88.1, 88.31, 88.53, 88.77, 89.14, 89.52, 89.46, 89.4, 90.28, 89.74, 91.28, 92.17, 92.16, 92.15, 93.08, 94.0, 94.66, 95.32, 94.13, 93.7, 93.32, 93.69, 94.58, 95.47, 97.25, 99.03, 99.63, 99.67, 99.71, 100.33, 101.58, 103.36, 103.49, 103.41, 106.31, 109.34, 109.28, 109.21, 107.76, 106.31, 105.43, 104.94, 104.44, 111.19, 117.93, 115.59, 113.24, 116.15, 119.06, 125.43, 140.72, 156.0, 161.7, 143.52, 135.33, 127.13, 127.68, 148.68, 169.68, 172.2, 174.72, 174.75, 174.66, 158.57, 142.63, 145.13, 153.29, 161.45, 163.34, 165.24, 162.25, 159.89, 159.07, 156.39, 155.21, 156.04, 159.29, 160.07, 160.85, 163.45, 162.93, 161.71, 160.06, 158.4, 144.74, 132.64, 134.57, 150.22, 165.86, 172.95, 174.12, 175.3, 175.5, 176.31, 177.71, 179.72, 168.13, 156.55, 146.24, 155.75, 176.0, 175.99, 175.98, 176.0, 176.02, 176.25, 175.13, 174.26, 173.38, 173.37, 173.46, 176.34, 174.55, 172.77, 168.45, 166.35, 166.47, 168.81, 167.43, 166.79, 167.35, 168.65, 168.51, 168.37, 168.88, 169.74, 171.19, 171.33, 169.91, 168.49, 167.11, 166.83, 167.01, 168.68, 170.34, 170.43, 172.15, 173.86, 177.62, 177.61, 175.34, 173.06, 176.47, 179.87, 179.9, 177.67, 175.67, 175.39, 175.36, 177.03, 176.0, 174.98, 174.96, 174.94, 175.76, 176.57, 169.05, 162.99, 164.97, 168.74, 172.51, 167.38, 165.08, 163.03, 163.81, 164.83, 164.81, 164.8, 165.88, 165.36, 159.61, 153.86, 153.57, 153.61, 153.65, 154.62, 155.58, 157.97, 156.35, 155.66, 154.98, 156.11, 157.24, 159.25, 159.6, 160.43, 161.26, 164.71, 168.17, 147.46, 126.92, 106.38, 105.23, 104.4, 105.37, 106.65, 109.21, 107.44, 104.65, 101.86, 102.35, 102.84, 102.79, 102.19, 101.59, 100.98, 100.38, 98.72, 97.73, 97.32, 96.9, 95.11, 93.97, 94.12, 94.12, 93.1, 92.08, 89.29, 90.35, 90.35, 90.35, 90.35, 86.95, 86.37, 86.06, 85.74, 94.56, 93.16, 92.46, 91.76, 88.55, 85.33, 87.52, 92.18, 93.68, 95.18, 94.4, 92.17, 89.94, 89.4, 89.37, 99.44, 100.98, 102.52, 103.18, 88.96, 88.23, 87.5, 85.2, 85.19, 86.87, 121.42, 155.96, 155.97, 155.97, 86.2, 86.5, 86.8, 87.22, 87.36, 87.34, 87.03, 87.04, 87.05, 86.36, 85.68, 85.71, 85.84, 85.93, 86.01, 86.04, 86.08, 85.92, 86.05, 86.18, 86.17, 86.19, 86.23, 86.22, 86.09, 85.92, 85.66, 85.69, 85.69, 85.31, 84.91, 84.93, 84.95, 84.93, 84.91, 84.9, 84.9, 84.9, 84.9, 85.38, 85.52, 85.66, 85.66, 85.4, 85.14, 85.47, 85.8, 85.72, 85.64, 86.09, 85.84, 85.27, 85.47, 85.66, 85.59, 85.52, 85.38, 85.39, 85.28, 85.17, 85.39, 85.7, 85.98, 86.26, 86.61, 92.97, 93.15, 86.58, 86.58, 86.53, 86.47, 98.55, 99.41, 100.16, 100.9, 89.19, 90.28, 91.38, 91.39, 91.4, 91.44, 92.05, 131.05, 170.63, 170.13, 162.43, 125.64, 88.85, 88.85, 99.08, 100.38, 101.69, 100.74, 99.79, 96.33, 93.31, 93.73, 94.87, 96.01, 96.93, 97.85, 98.97, 97.85, 98.14, 99.37, 102.01, 103.8, 105.58, 108.52, 108.12, 107.72, 106.75, 106.82, 109.08, 112.37, 112.52, 112.66, 112.97, 114.12, 115.64, 117.1, 118.57, 126.13, 133.69, 149.27, 163.96, 166.62, 169.27, 164.94, 160.61, 149.35, 141.18, 143.41, 143.57, 149.26, 157.49, 159.94, 151.93, 147.47, 145.97, 145.56, 145.15, 143.85, 142.54, 142.18, 142.43, 143.12, 144.41, 144.38, 151.99, 159.59, 174.81, 174.94, 175.84, 176.87, 162.41, 152.94, 151.59, 155.24, 155.22, 155.19, 155.04]
p0 = outLiersFN(smooth(remove_nan(the_angel)))
the_angel = p0
plt.plot(the_angel) #list(filter(fun, L1))
plt.show()
print((the_angel))
how can i smooth the values in (the_angel) to get graph like this (red line)
i mean ignoring all unnecessary and noisy values and get only main line instead
you can edit my code or suggest me new filter or algorithm
pandas has a rolling() method for dataframes that you can use to calculate the mean over a window of values, e.g. the 70 closest ones:
import pandas as pd
import matplotlib.pyplot as plt
WINDOW_SIZE = 70
the_angel = [176.04, 173.82, 170.09, 165.3, 171.8, # ...
]
df = pd.DataFrame({'the angel': the_angel})
df[f'mean of {WINDOW_SIZE}'] = df['the angel'].rolling(
window=WINDOW_SIZE, center=True).mean()
df.plot(color=['blue', 'red']);

How to visualize high-dimension vectors as points in 2D plane?

For example, there are three vectors as below.
[ 0.0377, 0.1808, 0.0807, -0.0703, 0.2427, -0.1957, -0.0712, -0.2137,
-0.0754, -0.1200, 0.1919, 0.0373, 0.0536, 0.0887, -0.1916, -0.1268,
-0.1910, -0.1411, -0.1282, 0.0274, -0.0781, 0.0138, -0.0654, 0.0491,
0.0398, 0.1696, 0.0365, 0.2266, 0.1241, 0.0176, 0.0881, 0.2993,
-0.1425, -0.2535, 0.1801, -0.1188, 0.1251, 0.1840, 0.1112, 0.3172,
0.0844, -0.1142, 0.0662, 0.0910, 0.0416, 0.2104, 0.0781, -0.0348,
-0.1488, 0.0129],
[-0.1302, 0.1581, -0.0897, 0.1024, -0.1133, 0.1076, 0.1595, -0.1047,
0.0760, 0.1092, 0.0062, -0.1567, -0.1448, -0.0548, -0.1275, -0.0689,
-0.1293, 0.1024, 0.1615, 0.0869, 0.2906, -0.2056, 0.0442, -0.0595,
-0.1448, 0.0167, -0.1259, -0.0989, 0.0651, -0.0424, 0.0795, -0.1546,
0.1330, -0.2284, 0.1672, 0.1847, 0.0841, 0.1771, -0.0101, -0.0681,
0.1497, 0.1226, 0.1146, -0.2090, 0.3275, 0.0981, -0.3295, 0.0590,
0.1130, -0.0650],
[-0.1745, -0.1940, -0.1529, -0.0964, 0.2657, -0.0979, 0.1510, -0.1248,
-0.1541, 0.1782, -0.1769, -0.2335, 0.2011, 0.1906, -0.1918, 0.1896,
-0.2183, -0.1543, 0.1816, 0.1684, -0.1318, 0.2285, 0.1784, 0.2260,
-0.2331, 0.0523, 0.1882, 0.1764, -0.1686, 0.2292]
How to plot them as three points in the same 2D plane like this picture below? Thanks!
I use PCA from sklearn, maybe this code help you:
import matplotlib.pyplot as plt
import numpy as np
from sklearn.decomposition import PCA
usa = [ 0.0377, 0.1808, 0.0807, -0.0703, 0.2427, -0.1957, -0.0712, -0.2137,
-0.0754, -0.1200, 0.1919, 0.0373, 0.0536, 0.0887, -0.1916, -0.1268,
-0.1910, -0.1411, -0.1282, 0.0274, -0.0781, 0.0138, -0.0654, 0.0491,
0.0398, 0.1696, 0.0365, 0.2266, 0.1241, 0.0176, 0.0881, 0.2993,
-0.1425, -0.2535, 0.1801, -0.1188, 0.1251, 0.1840, 0.1112, 0.3172,
0.0844, -0.1142, 0.0662, 0.0910, 0.0416, 0.2104, 0.0781, -0.0348,
-0.1488, 0.0129]
obama = [-0.1302, 0.1581, -0.0897, 0.1024, -0.1133, 0.1076, 0.1595, -0.1047,
0.0760, 0.1092, 0.0062, -0.1567, -0.1448, -0.0548, -0.1275, -0.0689,
-0.1293, 0.1024, 0.1615, 0.0869, 0.2906, -0.2056, 0.0442, -0.0595,
-0.1448, 0.0167, -0.1259, -0.0989, 0.0651, -0.0424, 0.0795, -0.1546,
0.1330, -0.2284, 0.1672, 0.1847, 0.0841, 0.1771, -0.0101, -0.0681,
0.1497, 0.1226, 0.1146, -0.2090, 0.3275, 0.0981, -0.3295, 0.0590,
0.1130, -0.0650]
nationality = [-0.1745, -0.1940, -0.1529, -0.0964, 0.2657, -0.0979, 0.1510, -0.1248,
-0.1541, 0.1782, -0.1769, -0.2335, 0.2011, 0.1906, -0.1918, 0.1896,
-0.2183, -0.1543, 0.1816, 0.1684, -0.1318, 0.2285, 0.1784, 0.2260,
-0.2331, 0.0523, 0.1882, 0.1764, -0.1686, 0.2292]
pca = PCA(n_components=1)
X = np.array(usa).reshape(2,len(usa)//2)
X = pca.fit_transform(X)
Y = np.array(obama).reshape(2,len(obama)//2)
Y = pca.fit_transform(Y)
Z = np.array(nationality).reshape(2,len(nationality)//2)
Z = pca.fit_transform(Z)
x_coordinates = [X[0][0], Y[0][0], Z[0][0]]
y_coordinates = [X[1][0], Y[1][0], Z[1][0]]
colors = ['r','g','b']
annotations=["U.S.A","Obama","Nationality"]
plt.figure(figsize=(8,6))
plt.scatter(x_coordinates, y_coordinates, marker=",", color=colors,s=300)
for i, label in enumerate(annotations):
plt.annotate(label, (x_coordinates[i], y_coordinates[i]))
plt.show()
output:

hbar bokeh 1.3.4 with categorical data

I am trying to build a categorical plot with hbar in bokeh, though it seems a bit odd that it does not follow the same concept as the vbar. I have tried few variations and I still have not been able to plot the data, I am only getting an empty canvas. If someone could help me out, it would be much appreciated.
I am using bokeh 1.3.4 in my system and in an webapp I am building in Flask, so it has to be either this version or below(feels a bit demanding, but it is software requirements).
I have done it with pandas_bokeh which makes it very simple, though I am adding interactivity to the plots to allow the viewer to play around and pandas_bokeh does the job and you ended up not learning it properly.
webapp draft so far
rX = df3.index.values
xL = ['Dublin New', 'Ireland New','Dublin Existing','Ireland Existing']
labelDict = {'2010': xL, '2011': xL, '2012': xL, '2013': xL, '2014': xL,'2015': xL,'2016': xL,'2017': xL, '2018': xL}
sourceT = ColumnDataSource(data=dict(x=df3.index.values,
y=df3['Dublin New'],
y1=df3['Ireland New'],
y2=df3['Dublin Existing'],
y3=df3['Ireland Existing']))
pT = figure(y_range=FactorRange(*labelDict), plot_height=350, plot_width=550, title='Properties Transactions in Ireland', tools='pan, wheel_zoom, box_zoom, reset')
pT.hbar(y=dodge('x', -0.5, range=pT.y_range), height=0.3, right='y', fill_color="#FDE724", source=sourceT)
#pT.hbar(y=dodge('x', -0.25, range=pT.y_range), height=0.3, right='y1', fill_color='#35B778', source=sourceT)
show(pT)
Here is the plot I would like to reproduce using bokeh, instead pandas_bokeh.
Thanks in advance for the help.
The value for the range parameter of the dodge should be the actual range:
range=pT.y_range # GOOD
You are passing the range end property, which is a number:
range=pT.y_range.end # BAD
EDIT: Without a complete, minimal reproducer, it is not possible to directly fix your code. The best that can be offered is a complete working example that demonstrates that hbar does work exactly equivalently to vbar, that hopefully will be useful to you by comparison, to figure out where your full code strays:
from bokeh.io import show
from bokeh.models import ColumnDataSource
from bokeh.plotting import figure
from bokeh.transform import dodge
fruits = ['Apples', 'Pears', 'Nectarines', 'Plums', 'Grapes', 'Strawberries']
years = ['2015', '2016', '2017']
data = {'fruits' : fruits,
'2015' : [2, 1, 4, 3, 2, 4],
'2016' : [5, 3, 3, 2, 4, 6],
'2017' : [3, 2, 4, 4, 5, 3]}
source = ColumnDataSource(data=data)
p = figure(y_range=fruits, x_range=(0, 10), plot_width=250, title="Fruit Counts by Year",
toolbar_location=None, tools="")
p.hbar(y=dodge('fruits', -0.25, range=p.y_range), right='2015', height=0.2, source=source,
color="#c9d9d3")
p.hbar(y=dodge('fruits', 0.0, range=p.y_range), right='2016', height=0.2, source=source,
color="#718dbf")
p.hbar(y=dodge('fruits', 0.25, range=p.y_range), right='2017', height=0.2, source=source,
color="#e84d60")
p.y_range.range_padding = 0.1
p.ygrid.grid_line_color = None
show(p)
#bigreddot Thanks a lot. I followed your approach and it works smoothly. I will try to work reserve the axis as I would rather to have the years in the yaxis.
Anyhow I really appreciated your help.
varT = ['Dublin New', 'Ireland New', 'Dublin Existing','Ireland Existing']
yearsT = df3.index.values.tolist()
dataT = {'var': var,
'2010': df3.iloc[0].values, '2011': df3.iloc[1].values,
'2012': df3.iloc[2].values, '2013': df3.iloc[3].values,
'2014': df3.iloc[4].values, '2015': df3.iloc[5].values,
'2016': df3.iloc[6].values, '2017': df3.iloc[7].values,
'2018': df3.iloc[8].values,
}
sourceTs = ColumnDataSource(data=dataT)
pT1 = figure(y_range=var, x_range=(0, df3.values.max()), plot_width=450, title='Properties Transactions in Ireland', tools='pan, wheel_zoom, box_zoom, reset')
pT1.hbar(y=dodge('var', -0.4, range=pT1.y_range), right='2010', height=0.1, source=sourceTs, color='#440154', legend=value('2010'))
pT1.hbar(y=dodge('var', -0.3, range=pT1.y_range), right='2011', height=0.1, source=sourceTs, color='#46317E', legend=value('2011'))
pT1.hbar(y=dodge('var', -0.2, range=pT1.y_range), right='2012', height=0.1, source=sourceTs, color='#365A8C', legend=value('2012'))
pT1.hbar(y=dodge('var', -0.1, range=pT1.y_range), right='2013', height=0.1, source=sourceTs, color='#277E8E', legend=value('2013'))
pT1.hbar(y=dodge('var', 0, range=pT1.y_range), right='2014', height=0.1, source=sourceTs, color='#1EA087', legend=value('2014'))
pT1.hbar(y=dodge('var', 0.1, range=pT1.y_range), right='2015', height=0.1, source=sourceTs, color='#49C16D', legend=value('2015'))
pT1.hbar(y=dodge('var', 0.2, range=pT1.y_range), right='2016', height=0.1, source=sourceTs, color='#9DD93A', legend=value('2016'))
pT1.hbar(y=dodge('var', 0.3, range=pT1.y_range), right='2017', height=0.1, source=sourceTs, color='#FDE724', legend=value('2017'))
pT1.hbar(y=dodge('var', 0.4, range=pT1.y_range), right='2018', height=0.1, source=sourceTs, color='#AADB32', legend=value('2018'))
pT1.legend.location='bottom_right'
#pT1.y_range.range_padding = 0.1
pT1.grid.grid_line_color = None
tick_labels_pt = {'10000':'10K','20000':'20K','30000':'30K','40000':'40K','50000':'50K'}
pT1.xaxis.major_label_overrides = tick_labels_pt
pT1.legend.background_fill_alpha=None
pT1.legend.border_line_alpha=0
pT1.legend.label_text_font_size = "11px"
pT1.legend.click_policy="hide"
pT1.title.text_font_size = '15px'
pT1.axis.major_label_text_font_style = 'bold'
#pT1.xaxis.major_label_text_font_style = 'bold'
pT1.toolbar.autohide = True
show(pT1)
Here is the outcome:
with the axis reversed:
varpti = ['Dublin New', 'Ireland New', 'Dublin Existing','Ireland Existing']
#the values of the y axis has to be in str format
yearspti = '2010','2011','2012', '2013', '2014', '2015', '2016', '2017', '2018' #df3.index.values.tolist()
datapti = {'years': yearspti,
'Dublin New': df3['Dublin New'].values,
'Ireland New': df3['Ireland New'].values,
'Dublin Existing': df3['Dublin Existing'].values,
'Ireland Existing': df3['Ireland Existing'].values
}
sourcepti = ColumnDataSource(data=datapti)
pti = figure(y_range=yearspti, x_range=(0, df3.values.max()), plot_height=500, plot_width=450, title='Properties Transactions in Ireland', tools='pan, wheel_zoom, box_zoom, reset')
pti.hbar(y=dodge('years', -0.2, range=pti.y_range), right='Dublin New', height=0.1, source=sourcepti, color='#440154', legend=value('Dublin New'))
pti.hbar(y=dodge('years', 0, range=pti.y_range), right='Ireland New', height=0.1, source=sourcepti, color='#30678D', legend=value('Ireland New'))
pti.hbar(y=dodge('years', 0.2, range=pti.y_range), right='Dublin Existing', height=0.1, source=sourcepti, color='#35B778', legend=value('Dublin Exsiting'))
pti.hbar(y=dodge('years', 0.4, range=pti.y_range), right='Ireland Existing', height=0.1, source=sourcepti, color='#FDE724', legend=value('Ireland Exsiting'))

Syntax for interp2d or RectBivariateSpline

I have a data set of points, logR, logT, and logX, where X is a function of R and T. It's only a data set, I have no defined function for X. The data is listed in a table, where logR corresponds to columns and logT corresponds to logT. I am attempting to use an interpolation function to evaluate this grid at two inputs of logR and logT. I found my situation most related to this post:
How to pass arrays into Scipy Interpolate RectBivariateSpline?
But I could not arrange things so my function could be evaluated at my inputs. Here are my attempts:
import numpy as np
from scipy.interpolate import RectBivariateSpline, interp2d
op_r = np.array([1e-8, 3.1622e-8, 1e-7, 3.1622e-7, 1e-6, 3.1622e-6, 1e-5,
3.1622e-5, 1e-4, 3.1622e-4, 1e-3, 3.1622e-3, 1e-2, 3.1622e-2, 0.1, .31622, 1,
3.1622, 10])
op_T = np.array([17782.794, 19952.623, 22387.211, 25118.864, 28183.829,
31622.777, 35481.339, 39810.717, 44668.359, 50118.723, 56234.133, 63095.734,
79432.823, 89125.094])
log_op_val = np.array([[-0.598, -0.593, -0.583, -0.568, -0.539, -0.477,
-0.353, -0.142, 0.168, 0.558, 0.990, 1.443, 1.915, 2.407, 2.866, 3.239, 3.517,
3.725, 3.896], [-0.597, -0.592, -0.580, -0.561, -0.532, -0.474, -0.362,
-0.165, 0.138, 0.539, 1.001, 1.476, 1.942, 2.426, 2.912, 3.352, 3.702, 3.968,
4.175], [-0.588, -0.588, -0.578, -0.555, -0.520, -0.462, -0.357, -0.171,
0.124, 0.529, 1.009, 1.507, 2.001, 2.487, 2.979, 3.453, 3.856, 4.176, 4.422],
[-0.545, -0.559, -0.563, -0.546, -0.506, -0.442, -0.338, -0.159, 0.132, 0.538,
1.015, 1.525, 2.051, 2.565, 3.072, 3.563, 3.996, 4.356, 4.634], [-0.520,
-0.521, -0.519, -0.509, -0.475, -0.409, -0.301, -0.122, 0.167, 0.571, 1.052,
1.570, 2.106, 2.642, 3.176, 3.684, 4.136, 4.517, 4.822], [-0.518, -0.514,
-0.504, -0.478, -0.425, -0.344, -0.232, -0.056, 0.226, 0.629, 1.111, 1.631,
2.169, 2.719, 3.276, 3.804, 4.275, 4.672, 4.990], [-0.517, -0.513, -0.504,
-0.479, -0.417, -0.297, -0.129, 0.074, 0.353, 0.734, 1.202, 1.715, 2.250,
2.800, 3.364, 3.907, 4.394, 4.802, 5.127], [-0.518, -0.514, -0.505, -0.484,
-0.429, -0.311, -0.104, 0.185, 0.521, 0.894, 1.329, 1.818, 2.341, 2.883,
3.441, 3.986, 4.481, 4.894, 5.218], [-0.517, -0.514, -0.507, -0.490, -0.443,
-0.337, -0.142, 0.169, 0.588, 1.039, 1.480, 1.936, 2.431, 2.955, 3.496, 4.031,
4.521, 4.934, 5.253], [-0.516, -0.513, -0.507, -0.492, -0.453, -0.361, -0.184,
0.103, 0.510, 1.009, 1.531, 2.022, 2.502, 3.002, 3.519, 4.035, 4.513, 4.920,
5.235], [-0.515, -0.511, -0.506, -0.493, -0.460, -0.381, -0.225, 0.036, 0.409,
0.877, 1.415, 1.973, 2.502, 3.005, 3.505, 4.002, 4.468, 4.868, 5.183],
[-0.515, -0.511, -0.503, -0.490, -0.462, -0.394, -0.257, -0.022, 0.321, 0.759,
1.269, 1.827, 2.403, 2.949, 3.458, 3.948, 4.405, 4.802, 5.113], [-0.516,
-0.512, -0.502, -0.487, -0.460, -0.400, -0.279, -0.066, 0.254, 0.672, 1.164,
1.701, 2.278, 2.851, 3.388, 3.889, 4.347, 4.741, 5.047], [-0.517, -0.512,
-0.503, -0.485, -0.454, -0.397, -0.287, -0.092, 0.211, 0.620, 1.101, 1.628,
2.190, 2.762, 3.322, 3.841, 4.305, 4.695, 4.989], [-0.516, -0.512, -0.503,
-0.484, -0.449, -0.388, -0.283, -0.099, 0.192, 0.596, 1.071, 1.596, 2.148,
2.714, 3.281, 3.811, 4.280, 4.661, 4.937]])
T_1a = 22100.
R_a = rho_ta /(((T_1a)/(1e6))**3)
gri_chi_a = RectBivariateSpline(op_r, op_T, op_val)
chi_a = RectBivariateSpline(R_a, T_1a)
print chi_a
And this is the error I get:
Traceback (most recent call last):
File "model.py", line 279, in <module>
gri_chi_a = RectBivariateSpline(op_r, op_T, op_val)
"/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/scipy/interpolate/fitpack2.py", line 882, in __init__
raise TypeError('x dimension of z must have same number of '
TypeError: x dimension of z must have same number of elements as x
It is the same as if I use the interp2d function. Any help would be appreciated.
Copy-n-pasting your arrays I get:
In [391]: op_r.shape
Out[391]: (19,)
In [393]: op_T.shape
Out[393]: (14,)
In [395]: log_op_val.shape
Out[395]: (15, 19)
15 does not equal 14 or 19!

Categories

Resources