memory leak in keras while training a GAN - python

I am trying to train a GAN using keras the problem is that I keep filling the RAM...
this is the code I used to train:
import gc
cont=0
while cont<20:
cont+=1
img_to_train_discr=image_generator(8)
#it reurns a tuple(image, 0/1)
discr.train_on_batch(img_to_train_discr[0], img_to_train_discr[1])
img_to_train_gan=image_generator_for_gan(8)
gan.train_on_batch(img_to_train_gan[0],img_to_train_gan[1])
found_objects = gc.get_objects()
both .fit and .train_on_batch show an increase of use memory through the epochs
I included gc.get_object because I I wanted to look into what element were not deleted
I itereted through the list found_objects and I found the probable cause of the problem.
it was saving the values...
but with .fit I saw that using .get_objects it saved some tensor such as:
the following was found while using.fit on gan
tf.Tensor(
[[[[ 0.8039216 0.8039216 0.8039216 ]
[ 0.77254903 0.77254903 0.77254903]
[ 0.7647059 0.7647059 0.7647059 ]
...
[ 0.9843137 0.9843137 0.9843137 ]
[ 0.9843137 0.9843137 0.9843137 ]
[ 0.9843137 0.9843137 0.9843137 ]]
[[ 0.14509805 0.14509805 0.14509805]
[-0.00392157 -0.00392157 -0.00392157]
[-0.19215687 -0.19215687 -0.19215687]
...
[ 0.9843137 0.9843137 0.9843137 ]
[ 0.9843137 0.9843137 0.9843137 ]
[ 0.9843137 0.9843137 0.9843137 ]]
[[-0.37254903 -0.37254903 -0.37254903]
[-0.34901962 -0.34901962 -0.34901962]
[-0.29411766 -0.29411766 -0.29411766]
...
[ 0.9843137 0.9843137 0.9843137 ]
[ 0.9843137 0.9843137 0.9843137 ]
[ 0.9843137 0.9843137 0.9843137 ]]
...
[[-0.99215686 -0.99215686 -0.99215686]
[-1. -1. -1. ]
[-0.9843137 -0.9843137 -0.9843137 ]
...
[-0.7019608 -0.7019608 -0.7019608 ]
[-0.81960785 -0.81960785 -0.81960785]
[-0.40392157 -0.40392157 -0.40392157]]
[[-0.8352941 -0.8352941 -0.8352941 ]
[-0.9843137 -0.9843137 -0.9843137 ]
[-0.9529412 -0.9529412 -0.9529412 ]
...
[-0.5921569 -0.5921569 -0.5921569 ]
[-0.77254903 -0.77254903 -0.77254903]
[-0.42745098 -0.42745098 -0.42745098]]
[[-0.654902 -0.654902 -0.654902 ]
[-0.90588236 -0.90588236 -0.90588236]
[-0.8666667 -0.8666667 -0.8666667 ]
...
[-0.77254903 -0.77254903 -0.77254903]
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.5529412 -0.5529412 -0.5529412 ]]]
[[[ 0.5764706 0.5764706 0.5764706 ]
[ 0.5921569 0.5921569 0.5921569 ]
[ 0.60784316 0.60784316 0.60784316]
...
[ 0.5372549 0.5372549 0.5372549 ]
[ 0.5058824 0.5058824 0.5058824 ]
[ 0.49803922 0.49803922 0.49803922]]
[[ 0.58431375 0.58431375 0.58431375]
[ 0.6 0.6 0.6 ]
[ 0.6156863 0.6156863 0.6156863 ]
...
[ 0.5294118 0.5294118 0.5294118 ]
[ 0.5058824 0.5058824 0.5058824 ]
[ 0.49803922 0.49803922 0.49803922]]
[[ 0.6 0.6 0.6 ]
[ 0.60784316 0.60784316 0.60784316]
[ 0.6156863 0.6156863 0.6156863 ]
...
[ 0.5294118 0.5294118 0.5294118 ]
[ 0.5294118 0.5294118 0.5294118 ]
[ 0.5137255 0.5137255 0.5137255 ]]
...
[[-0.8901961 -0.8901961 -0.8901961 ]
[-0.8352941 -0.8352941 -0.8352941 ]
[-0.6784314 -0.6784314 -0.6784314 ]
...
[-0.99215686 -0.99215686 -0.99215686]
[-1. -1. -1. ]
[-1. -1. -1. ]]
[[-0.9137255 -0.9137255 -0.9137255 ]
[-0.8901961 -0.8901961 -0.8901961 ]
[-0.56078434 -0.56078434 -0.56078434]
...
[-0.99215686 -0.99215686 -0.99215686]
[-1. -1. -1. ]
[-1. -1. -1. ]]
[[-0.77254903 -0.77254903 -0.77254903]
[-0.75686276 -0.75686276 -0.75686276]
[-0.7411765 -0.7411765 -0.7411765 ]
...
[-1. -1. -1. ]
[-1. -1. -1. ]
[-1. -1. -1. ]]]
[[[-0.94509804 -0.94509804 -0.94509804]
[-0.88235295 -0.88235295 -0.88235295]
[-0.8117647 -0.8117647 -0.8117647 ]
...
[-0.9372549 -0.9372549 -0.9372549 ]
[-0.8745098 -0.8745098 -0.8745098 ]
[-0.9372549 -0.9372549 -0.9372549 ]]
[[-0.9607843 -0.9607843 -0.9607843 ]
[-0.94509804 -0.94509804 -0.94509804]
[-0.7647059 -0.7647059 -0.7647059 ]
...
[-0.9529412 -0.9529412 -0.9529412 ]
[-0.8980392 -0.8980392 -0.8980392 ]
[-0.9372549 -0.9372549 -0.9372549 ]]
[[-0.9372549 -0.9372549 -0.9372549 ]
[-0.9607843 -0.9607843 -0.9607843 ]
[-0.7411765 -0.7411765 -0.7411765 ]
...
[-0.9607843 -0.9607843 -0.9607843 ]
[-0.92156863 -0.92156863 -0.92156863]
[-0.9137255 -0.9137255 -0.9137255 ]]
...
[[ 0.10588235 0.10588235 0.10588235]
[ 0.10588235 0.10588235 0.10588235]
[-0.01176471 -0.01176471 -0.01176471]
...
[-0.19215687 -0.19215687 -0.19215687]
[-0.23921569 -0.23921569 -0.23921569]
[-0.19215687 -0.19215687 -0.19215687]]
[[ 0.09019608 0.09019608 0.09019608]
[ 0.11372549 0.11372549 0.11372549]
[ 0.13725491 0.13725491 0.13725491]
...
[ 0.01176471 0.01176471 0.01176471]
[-0.05882353 -0.05882353 -0.05882353]
[-0.07450981 -0.07450981 -0.07450981]]
[[-0.08235294 -0.08235294 -0.08235294]
[-0.15294118 -0.15294118 -0.15294118]
[-0.09803922 -0.09803922 -0.09803922]
...
[-0.15294118 -0.15294118 -0.15294118]
[-0.01176471 -0.01176471 -0.01176471]
[-0.03529412 -0.03529412 -0.03529412]]]
...
[[[-0.54509807 -0.54509807 -0.54509807]
[-0.54509807 -0.54509807 -0.54509807]
[-0.4117647 -0.4117647 -0.4117647 ]
...
[-0.3647059 -0.3647059 -0.3647059 ]
[ 0.37254903 0.37254903 0.37254903]
[ 0.38039216 0.38039216 0.38039216]]
[[-0.38039216 -0.38039216 -0.38039216]
[-0.14509805 -0.14509805 -0.14509805]
[-0.11372549 -0.11372549 -0.11372549]
...
[-0.3882353 -0.3882353 -0.3882353 ]
[-0.21568628 -0.21568628 -0.21568628]
[ 0.16862746 0.16862746 0.16862746]]
[[-0.06666667 -0.06666667 -0.06666667]
[ 0.06666667 0.06666667 0.06666667]
[-0.28627452 -0.28627452 -0.28627452]
...
[ 0.38039216 0.38039216 0.38039216]
[-0.44313726 -0.44313726 -0.44313726]
[ 0.21568628 0.21568628 0.21568628]]
...
[[ 0.21568628 0.21568628 0.21568628]
[ 0.06666667 0.06666667 0.06666667]
[-0.04313726 -0.04313726 -0.04313726]
...
[-0.60784316 -0.60784316 -0.60784316]
[-0.6156863 -0.6156863 -0.6156863 ]
[-0.5686275 -0.5686275 -0.5686275 ]]
[[ 0.31764707 0.31764707 0.31764707]
[ 0.10588235 0.10588235 0.10588235]
[-0.2784314 -0.2784314 -0.2784314 ]
...
[-0.42745098 -0.42745098 -0.42745098]
[-0.4509804 -0.4509804 -0.4509804 ]
[-0.54509807 -0.54509807 -0.54509807]]
[[ 0.12941177 0.12941177 0.12941177]
[-0.08235294 -0.08235294 -0.08235294]
[-0.04313726 -0.04313726 -0.04313726]
...
[-0.79607844 -0.79607844 -0.79607844]
[-0.5686275 -0.5686275 -0.5686275 ]
[-0.2 -0.2 -0.2 ]]]
[[[-0.9529412 -0.9529412 -0.9529412 ]
[-0.79607844 -0.79607844 -0.79607844]
[-0.6156863 -0.6156863 -0.6156863 ]
...
[-0.44313726 -0.44313726 -0.44313726]
[-0.79607844 -0.79607844 -0.79607844]
[-0.73333335 -0.73333335 -0.73333335]]
[[-1. -1. -1. ]
[-0.90588236 -0.90588236 -0.90588236]
[-0.6313726 -0.6313726 -0.6313726 ]
...
[-0.3019608 -0.3019608 -0.3019608 ]
[-0.8352941 -0.8352941 -0.8352941 ]
[-0.7647059 -0.7647059 -0.7647059 ]]
[[-1. -1. -1. ]
[-0.99215686 -0.99215686 -0.99215686]
[-0.8039216 -0.8039216 -0.8039216 ]
...
[-0.29411766 -0.29411766 -0.29411766]
[-0.8117647 -0.8117647 -0.8117647 ]
[-0.6862745 -0.6862745 -0.6862745 ]]
...
[[-0.90588236 -0.90588236 -0.90588236]
[-0.81960785 -0.81960785 -0.81960785]
[-0.8117647 -0.8117647 -0.8117647 ]
...
[-0.7647059 -0.7647059 -0.7647059 ]
[-0.88235295 -0.88235295 -0.88235295]
[-0.9137255 -0.9137255 -0.9137255 ]]
[[-1. -1. -1. ]
[-0.9764706 -0.9764706 -0.9764706 ]
[-0.9529412 -0.9529412 -0.9529412 ]
...
[-0.8117647 -0.8117647 -0.8117647 ]
[-0.8352941 -0.8352941 -0.8352941 ]
[-0.8509804 -0.8509804 -0.8509804 ]]
[[-0.6862745 -0.6862745 -0.6862745 ]
[-0.62352943 -0.62352943 -0.62352943]
[-0.7411765 -0.7411765 -0.7411765 ]
...
[-0.8117647 -0.8117647 -0.8117647 ]
[-0.77254903 -0.77254903 -0.77254903]
[-0.84313726 -0.84313726 -0.84313726]]]
[[[-0.69411767 -0.69411767 -0.69411767]
[-0.6784314 -0.6784314 -0.6784314 ]
[-0.6627451 -0.6627451 -0.6627451 ]
...
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8509804 -0.8509804 -0.8509804 ]]
[[-0.70980394 -0.70980394 -0.70980394]
[-0.69411767 -0.69411767 -0.69411767]
[-0.67058825 -0.67058825 -0.67058825]
...
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8509804 -0.8509804 -0.8509804 ]]
[[-0.7176471 -0.7176471 -0.7176471 ]
[-0.69411767 -0.69411767 -0.69411767]
[-0.6784314 -0.6784314 -0.6784314 ]
...
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8509804 -0.8509804 -0.8509804 ]]
...
[[-0.6313726 -0.6313726 -0.6313726 ]
[-0.62352943 -0.62352943 -0.62352943]
[-0.62352943 -0.62352943 -0.62352943]
...
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8666667 -0.8666667 -0.8666667 ]
[-0.8745098 -0.8745098 -0.8745098 ]]
[[-0.6156863 -0.6156863 -0.6156863 ]
[-0.6156863 -0.6156863 -0.6156863 ]
[-0.60784316 -0.60784316 -0.60784316]
...
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8666667 -0.8666667 -0.8666667 ]
[-0.8745098 -0.8745098 -0.8745098 ]]
[[-0.6156863 -0.6156863 -0.6156863 ]
[-0.6156863 -0.6156863 -0.6156863 ]
[-0.60784316 -0.60784316 -0.60784316]
...
[-0.8509804 -0.8509804 -0.8509804 ]
[-0.8666667 -0.8666667 -0.8666667 ]
[-0.8666667 -0.8666667 -0.8666667 ]]]], shape=(16, 256, 256, 3), dtype=float32)
692453
tf.Tensor(
[[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]
[1.]], shape=(16, 1), dtype=float32)
and this is with training only discr
[<tf.Tensor: shape=(32, 256, 256, 3), dtype=float32, numpy=
array([[[[ 6.94478676e-03, -2.90532247e-03, 7.25293392e-03],
[ 1.20958146e-02, -1.07863108e-02, 1.04020014e-02],
[ 1.69709120e-02, -2.54366547e-02, 1.98477823e-02],
...,
[-4.30019619e-03, -8.35454836e-03, -2.21172324e-03],
[-4.14159754e-03, -1.14777510e-03, -1.21566129e-03],
[ 1.36303401e-03, 6.04543777e-04, -1.35964795e-03]],
[[ 2.50564199e-02, -6.16334006e-03, 1.92856099e-02],
[ 3.54985110e-02, -1.79717932e-02, 2.98348404e-02],
[ 2.62675621e-02, -1.90307051e-02, 2.65689045e-02],
...,
[-1.57777814e-03, -6.14548009e-03, 5.52629726e-03],
[ 3.56815499e-03, -6.90740068e-03, -7.03096506e-04],
[ 9.26138775e-04, -1.85872870e-03, 3.02374363e-04]],
[[ 2.74749734e-02, -1.49438502e-02, 2.80325040e-02],
[ 5.10839783e-02, -1.75167620e-02, 2.70463582e-02],
[ 3.75709981e-02, -2.34040022e-02, 2.50053518e-02],
...,
[ 8.94943625e-03, -1.73010174e-02, 1.82440877e-02],
[ 4.39342530e-03, -1.31681236e-02, 8.13111849e-03],
[ 4.34517069e-03, -4.70215734e-03, -1.63908151e-03]],
...,
[[ 6.95652468e-03, -3.63357402e-02, 4.07949500e-02],
[ 4.13575359e-02, -4.91991192e-02, 3.21018584e-02],
[ 4.74223010e-02, -7.47634992e-02, 2.35863868e-02],
...,
[ 8.26232806e-02, -6.68739378e-02, -6.99709053e-04],
[ 7.23878071e-02, -5.69532141e-02, -4.85424437e-02],
[ 2.66422518e-02, -3.07060555e-02, -5.80600202e-02]],
[[ 4.50124545e-03, -3.43432538e-02, 3.71103324e-02],
[ 4.32977863e-02, -4.92802262e-02, 3.27052958e-02],
[ 4.84924354e-02, -6.66223019e-02, 2.72663124e-02],
...,
[ 7.71504492e-02, -7.50505701e-02, 2.73561082e-03],
[ 8.03824887e-02, -6.13293871e-02, -3.52067165e-02],
[ 2.08804533e-02, -2.86836233e-02, -5.02964184e-02]],
[[-3.90984351e-03, -2.32026614e-02, 2.67444160e-02],
[ 1.65205617e-02, -3.42688598e-02, 1.98613424e-02],
[ 2.70076040e-02, -5.75522073e-02, 1.99076571e-02],
...,
[ 5.09059504e-02, -5.42278290e-02, 1.30892009e-03],
[ 6.47045597e-02, -3.80333811e-02, -2.18609013e-02],
[ 3.41063663e-02, -1.05063524e-02, -3.07822768e-02]]],
[[[ 1.00000000e+00, 1.00000000e+00, 9.92156863e-01],
[ 1.00000000e+00, 9.92156863e-01, 9.84313726e-01],
[ 9.92156863e-01, 9.76470590e-01, 9.76470590e-01],
...,
[ 9.76470590e-01, 1.00000000e+00, 9.84313726e-01],
[ 9.92156863e-01, 9.84313726e-01, 1.00000000e+00],
[ 1.00000000e+00, 9.84313726e-01, 1.00000000e+00]],
[[ 9.92156863e-01, 1.00000000e+00, 9.84313726e-01],
[ 1.00000000e+00, 1.00000000e+00, 9.92156863e-01],
[ 9.92156863e-01, 9.92156863e-01, 9.92156863e-01],
...,
[ 9.84313726e-01, 1.00000000e+00, 9.92156863e-01],
[ 1.00000000e+00, 9.92156863e-01, 1.00000000e+00],
[ 1.00000000e+00, 9.84313726e-01, 1.00000000e+00]],
[[ 9.84313726e-01, 1.00000000e+00, 1.00000000e+00],
[ 9.29411769e-01, 9.45098042e-01, 9.45098042e-01],
[ 9.68627453e-01, 9.84313726e-01, 9.84313726e-01],
...,
[ 9.76470590e-01, 1.00000000e+00, 9.84313726e-01],
[ 1.00000000e+00, 9.84313726e-01, 1.00000000e+00],
[ 1.00000000e+00, 9.92156863e-01, 1.00000000e+00]],
...,
[[ 1.00000000e+00, 9.92156863e-01, 1.00000000e+00],
[ 1.00000000e+00, 9.92156863e-01, 1.00000000e+00],
[ 1.00000000e+00, 1.00000000e+00, 1.00000000e+00],
...,
[ 9.68627453e-01, 9.84313726e-01, 9.84313726e-01],
[ 1.00000000e+00, 1.00000000e+00, 1.00000000e+00],
[ 1.00000000e+00, 1.00000000e+00, 1.00000000e+00]],
[[ 9.84313726e-01, 1.00000000e+00, 1.00000000e+00],
[ 9.76470590e-01, 9.92156863e-01, 9.92156863e-01],
[ 9.84313726e-01, 1.00000000e+00, 1.00000000e+00],
...,
[ 9.76470590e-01, 9.92156863e-01, 9.92156863e-01],
[ 1.00000000e+00, 9.92156863e-01, 1.00000000e+00],
[ 1.00000000e+00, 9.92156863e-01, 1.00000000e+00]],
[[ 9.68627453e-01, 1.00000000e+00, 1.00000000e+00],
[ 9.60784316e-01, 1.00000000e+00, 9.92156863e-01],
[ 9.84313726e-01, 1.00000000e+00, 1.00000000e+00],
...,
[ 9.60784316e-01, 1.00000000e+00, 9.92156863e-01],
[ 1.00000000e+00, 9.92156863e-01, 1.00000000e+00],
[ 1.00000000e+00, 9.92156863e-01, 1.00000000e+00]]],
[[[-9.76470590e-01, -1.00000000e+00, -1.05882354e-01],
[-9.76470590e-01, -1.00000000e+00, -9.01960805e-02],
[-9.60784316e-01, -9.92156863e-01, -5.88235296e-02],
...,
[-7.45098069e-02, -4.74509805e-01, -2.07843140e-01],
[ 1.05882354e-01, -3.56862754e-01, -1.37254909e-01],
[ 2.54901975e-01, -2.54901975e-01, -5.88235296e-02]],
[[-9.76470590e-01, -1.00000000e+00, -7.45098069e-02],
[-9.84313726e-01, -1.00000000e+00, -6.66666701e-02],
[-9.84313726e-01, -9.92156863e-01, -5.09803928e-02],
...,
[-1.05882354e-01, -4.98039216e-01, -2.31372550e-01],
[ 7.45098069e-02, -3.80392164e-01, -1.60784319e-01],
[ 2.07843140e-01, -2.78431386e-01, -9.01960805e-02]],
[[-1.00000000e+00, -1.00000000e+00, -6.66666701e-02],
[-1.00000000e+00, -1.00000000e+00, -5.88235296e-02],
[-1.00000000e+00, -9.92156863e-01, -3.52941193e-02],
...,
[-2.00000003e-01, -5.52941203e-01, -2.94117659e-01],
[ 3.92156886e-03, -4.19607848e-01, -1.92156866e-01],
[ 1.52941182e-01, -3.01960796e-01, -1.05882354e-01]],
...,
[[-1.00000000e+00, -5.52941203e-01, -4.66666669e-01],
[-8.27450991e-01, -2.31372550e-01, -1.84313729e-01],
[-8.50980401e-01, -8.23529437e-02, -1.29411772e-01],
...,
[-8.50980401e-01, -7.25490212e-01, -4.98039216e-01],
[-7.17647076e-01, -5.52941203e-01, -3.33333343e-01],
[-1.00000000e+00, -8.27450991e-01, -6.07843161e-01]],
[[-1.00000000e+00, -5.13725519e-01, -4.11764711e-01],
[-8.35294127e-01, -2.47058824e-01, -1.84313729e-01],
[-9.05882359e-01, -1.68627456e-01, -2.07843140e-01],
...,
[-1.00000000e+00, -1.00000000e+00, -7.56862760e-01],
[-8.43137264e-01, -6.70588255e-01, -4.50980395e-01],
[-8.03921580e-01, -5.60784340e-01, -3.64705890e-01]],
[[-1.00000000e+00, -5.29411793e-01, -4.27450985e-01],
[-9.21568632e-01, -3.09803933e-01, -2.54901975e-01],
[-9.52941179e-01, -2.47058824e-01, -2.86274523e-01],
...,
[-1.00000000e+00, -9.68627453e-01, -6.94117665e-01],
[-1.00000000e+00, -9.29411769e-01, -7.09803939e-01],
[-9.29411769e-01, -6.62745118e-01, -4.74509805e-01]]],
...,
[[[-1.05062379e-02, -1.98420249e-02, 1.05182398e-02],
[-3.95061001e-02, -2.57582217e-02, 1.40950643e-02],
[-2.30170805e-02, -2.37071346e-02, -4.61883796e-03],
...,
[-2.45160554e-02, -9.46635101e-03, -6.07647886e-03],
[-3.07144760e-03, 2.74786772e-03, -6.80177147e-03],
[ 5.86585980e-03, 2.40193726e-03, 3.39358579e-04]],
[[ 3.22993868e-03, -1.12008387e-02, 3.77045646e-02],
[-9.38666333e-03, -3.21227647e-02, 2.93544959e-02],
[-1.12627428e-02, -1.63189527e-02, 4.86864848e-03],
...,
[-2.86157615e-02, -8.67746118e-03, -9.11490759e-04],
[-1.50391981e-02, -5.08068223e-03, -9.21393745e-03],
[ 6.01480622e-03, -8.89253570e-04, 5.72130177e-03]],
[[ 2.37215031e-02, 1.73019955e-03, 3.52669656e-02],
[ 2.20054798e-02, 3.41841788e-03, 2.78164726e-02],
[ 2.26932168e-02, 2.25211773e-02, -7.15107657e-03],
...,
[-9.92084946e-03, -7.83571042e-03, 5.36113139e-03],
[-3.63909150e-03, -2.15192046e-02, 1.81183417e-03],
[ 9.87425633e-03, -1.63576566e-02, 9.68800485e-03]],
...,
[[ 9.26712807e-03, -3.34203020e-02, 3.94128822e-02],
[ 4.19912934e-02, -4.55853753e-02, 3.37843001e-02],
[ 4.08300571e-02, -6.73395097e-02, 2.53548753e-02],
...,
[ 8.61984789e-02, -7.02210069e-02, -4.39706072e-03],
[ 6.94279298e-02, -5.77976443e-02, -4.75803465e-02],
[ 2.45513227e-02, -3.38402092e-02, -5.75863346e-02]],
[[ 6.77845301e-03, -3.54054347e-02, 3.67174037e-02],
[ 4.35878709e-02, -4.94687334e-02, 3.45391147e-02],
[ 4.71395329e-02, -7.13703632e-02, 2.63372287e-02],
...,
[ 8.29759017e-02, -7.53538832e-02, 1.60004944e-04],
[ 8.16767067e-02, -6.00483567e-02, -3.75034474e-02],
[ 1.97965931e-02, -3.06959040e-02, -5.22228405e-02]],
[[-2.94655445e-03, -1.86929759e-02, 2.33796220e-02],
[ 1.59196425e-02, -3.28605361e-02, 1.64255649e-02],
[ 2.53022909e-02, -4.75350842e-02, 1.15010655e-02],
...,
[ 5.44254147e-02, -5.55038191e-02, -1.54604076e-03],
[ 6.76389188e-02, -3.61473970e-02, -2.54233293e-02],
[ 3.42441052e-02, -9.63416602e-03, -3.30452174e-02]]],
[[[ 2.54648067e-02, -1.19450670e-02, 3.30261998e-02],
[ 4.23403606e-02, -4.13185284e-02, 3.81897315e-02],
[ 4.00563851e-02, -6.79321066e-02, 4.91125546e-02],
...,
[ 4.02044021e-02, -5.85264936e-02, 5.48310988e-02],
[ 2.70577967e-02, -4.31953967e-02, 3.57147492e-02],
[ 6.32039411e-03, -2.48100758e-02, -7.03164516e-03]],
[[ 3.70886363e-02, -2.01733522e-02, 6.05700798e-02],
[ 7.77267516e-02, -5.13126105e-02, 6.01464622e-02],
[ 8.56612101e-02, -8.36809576e-02, 7.61673301e-02],
...,
[ 8.45839083e-02, -4.61416878e-02, 6.01974353e-02],
[ 5.06575927e-02, -2.32018791e-02, 2.58594193e-02],
[ 1.53260147e-02, -1.76541489e-02, -2.82484554e-02]],
[[ 2.97332872e-02, -2.54155342e-02, 7.12449625e-02],
[ 9.13045332e-02, -6.03631884e-02, 7.43178874e-02],
[ 9.26255956e-02, -9.32793990e-02, 7.50018954e-02],
...,
[ 1.09376043e-01, -5.31297959e-02, 4.94755656e-02],
[ 7.15198442e-02, -3.02166399e-02, 1.11023467e-02],
[ 1.66346878e-02, -3.10882907e-02, -3.92567255e-02]],
...,
[[ 1.50115313e-02, -5.51447719e-02, 6.36151060e-02],
[ 7.06077367e-02, -7.18016624e-02, 5.44297658e-02],
[ 6.90411255e-02, -1.04166776e-01, 3.75158228e-02],
...,
[ 9.69356075e-02, -7.91200697e-02, -8.72911653e-04],
[ 8.63938630e-02, -6.63577765e-02, -5.73743023e-02],
[ 3.16326991e-02, -3.84405665e-02, -6.70467839e-02]],
[[ 1.09571004e-02, -5.76814674e-02, 5.85661493e-02],
[ 7.11029768e-02, -7.61615336e-02, 5.38719222e-02],
[ 7.62823075e-02, -1.09212406e-01, 3.92470434e-02],
...,
[ 9.16330442e-02, -8.96104947e-02, 4.14223457e-03],
[ 9.69443470e-02, -7.11727366e-02, -4.28451747e-02],
[ 2.50954758e-02, -3.50896828e-02, -6.06248528e-02]],
[[-4.41576634e-03, -3.03974133e-02, 3.74333374e-02],
[ 2.65656877e-02, -5.15482500e-02, 2.55387109e-02],
[ 4.18888927e-02, -7.42964670e-02, 1.65963285e-02],
...,
[ 5.96264340e-02, -6.26873225e-02, 4.92919178e-04],
[ 7.69837126e-02, -4.49479558e-02, -2.73446627e-02],
[ 4.12025116e-02, -1.18885487e-02, -3.75647955e-02]]],
[[[ 2.21067071e-02, -1.11255171e-02, 2.79338863e-02],
[ 3.44552584e-02, -3.55523229e-02, 2.96750609e-02],
[ 2.81813368e-02, -5.43026328e-02, 3.58022302e-02],
...,
[ 1.64286569e-02, -2.56849099e-02, 1.86677016e-02],
[ 8.29113834e-03, -2.09341552e-02, 1.19914617e-02],
[ 9.81146120e-04, -9.67385620e-03, -4.16056439e-03]],
[[ 3.28787938e-02, -1.61599461e-02, 5.14922813e-02],
[ 6.91715330e-02, -4.29286025e-02, 4.98180874e-02],
[ 6.63172528e-02, -6.69079795e-02, 5.72132170e-02],
...,
[ 3.04000657e-02, -2.18527243e-02, 1.85649637e-02],
[ 1.70135573e-02, -1.09965997e-02, 8.97353794e-03],
[ 4.53805597e-03, -5.36913285e-03, -1.20095760e-02]],
[[ 2.78143492e-02, -1.96416155e-02, 6.09582961e-02],
[ 7.77031854e-02, -4.75680716e-02, 6.22547753e-02],
[ 7.32004791e-02, -7.40943998e-02, 5.90784885e-02],
...,
[ 3.82998213e-02, -2.72356309e-02, 1.36684459e-02],
[ 2.71414351e-02, -1.12283370e-02, 6.70646504e-03],
[ 9.00707394e-03, -1.34321274e-02, -1.47624528e-02]],
...,
[[ 1.46386446e-02, -5.67819588e-02, 6.60154596e-02],
[ 7.21150413e-02, -7.36338943e-02, 5.58988042e-02],
[ 7.18126446e-02, -1.06876150e-01, 3.95014845e-02],
...,
[ 8.86510685e-02, -7.16847479e-02, -5.57619939e-03],
[ 7.46077448e-02, -6.13973439e-02, -5.14714606e-02],
[ 2.69034542e-02, -3.52577232e-02, -6.09280579e-02]],
[[ 1.07796416e-02, -5.97048812e-02, 6.08111545e-02],
[ 7.26786703e-02, -7.89055601e-02, 5.58755845e-02],
[ 7.88586289e-02, -1.12460330e-01, 4.03497480e-02],
...,
[ 8.51700082e-02, -8.18825141e-02, 6.48484449e-04],
[ 8.65846053e-02, -6.51570857e-02, -3.90671641e-02],
[ 2.17908174e-02, -3.32889743e-02, -5.32799624e-02]],
[[-5.24961576e-03, -3.11195180e-02, 3.85811515e-02],
[ 2.61344314e-02, -5.40103428e-02, 2.65447777e-02],
[ 4.28958423e-02, -7.67953098e-02, 1.69624444e-02],
...,
[ 5.64608611e-02, -5.83225712e-02, -9.28662426e-04],
[ 7.05104247e-02, -3.99363190e-02, -2.56322399e-02],
[ 3.73998210e-02, -1.12367878e-02, -3.46269831e-02]]]],
dtype=float32)>, <tf.Tensor: shape=(32,), dtype=int64, numpy=array([0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0,
0, 1, 0, 1, 0, 1, 0, 0, 0, 0])>]
the same thing happened if instead of using train_on_batch I used .fit. But .fit is much faster because I am using a generator.
moreover I am confident that there is a memory leak inside the fit/train_on_batch as the inputs are numpy arrays, not tensor
It appears that the memory does not increase linearly but with spikes.
I am using a pretrained resnet_v2.ResNet50V2 in discr
PS I am not 100% sure of what is happening with train_on_batch as while printing the list of elements of gc.get_object it encounters some difficulties and the ram starts filling until it chrashes

There is a known issue where a memory leak appears in TF 2.x keras when calling the network repeatedly in a loop.
I have come across several suggestions online:
Call tf.keras.backend.clear_session() and possibly gc.collect() every now and then in the loop (via this question)
Wrap your train_on_batch or model call in a function with the #tf.function decorator (this worked for me)

Related

pytorch tensor of tensors to a tensor

When I print a torch tensor, I get the below output. How could I get that tensor without [] for the inner elements?
I printed the type of the first element and it returns <class 'torch.Tensor'> So this tensor seems to be a tensor of tensors...How could I convert it to a tensor of numbers?
tensor([[-5.6117e-01],
[ 3.5726e-01],
[-2.5853e-01],
[-4.8641e-01],
[-1.0581e-01],
[-1.8322e-01],
[-1.2732e+00],
[-5.9760e-02],
[ 1.2819e-01],
[ 6.3894e-02],
[-9.1817e-01],
[-1.6539e-01],
[-1.1471e+00],
[ 1.9666e-01],
[-6.3297e-01],
[-4.0876e-01],
[-2.4590e-02],
[ 2.7065e-01],
[ 3.5308e-01],
[-4.6348e-01],
[-4.1755e-01],
[-1.1554e-01],
[-4.2062e-01],
[ 1.4067e-01],
[-2.9788e-01],
[-7.4582e-02],
[-5.3751e-01],
[ 1.1344e-01],
[-2.6100e-01],
[ 2.6951e-02],
[-5.0437e-02],
[-1.9163e-01],
[-3.3893e-02],
[-5.9640e-01],
[-1.1574e-01],
[ 1.4613e-01],
[ 1.2263e-01],
[-1.5566e-01],
[ 1.4740e-01],
[-9.9924e-01],
[ 2.0878e-01],
[-2.0074e-01],
[ 7.8383e-02],
[ 7.4679e-02],
[-5.8065e-01],
[ 6.7777e-01],
[ 5.9879e-01],
[ 6.6301e-01],
[-4.7051e-01],
[-2.5468e-01],
[-2.7382e-01],
[ 1.7585e-01],
[ 3.6151e-01],
[-9.2532e-01],
[-1.6999e-01],
[ 8.4971e-02],
[-6.6083e-01],
[-3.1204e-02],
[ 6.3712e-01],
[-5.8580e-02],
[-7.7901e-04],
[-4.6792e-01],
[ 1.0796e-01],
[ 7.8766e-01],
[ 1.6809e-01],
[-7.0058e-01],
[-2.9299e-01],
[-8.2735e-02],
[ 2.0875e-01],
[-2.9426e-01],
[-7.6748e-02],
[-1.5762e-01],
[-5.7432e-01],
[-5.2042e-01],
[-1.5152e-01],
[ 1.4119e+00],
[-1.5752e-01],
[-3.0565e-01],
[-5.1378e-01],
[-5.8924e-01],
[-1.0163e+00],
[-2.2021e-01],
[ 2.9112e-02],
[ 1.8521e-01],
[ 6.2814e-01],
[-6.8793e-01],
[ 2.1395e-02],
[ 5.7168e-01],
[ 9.0977e-01],
[ 3.8899e-01],
[ 3.0209e-01],
[ 2.4655e-01],
[-1.1688e-01],
[-5.9835e-02],
[ 3.6426e-02],
[-5.2782e-01],
[ 1.4604e+00],
[ 2.9685e-01],
[-2.4077e-01],
[ 1.0163e+00],
[ 6.9770e-01],
[-2.6183e-01],
[ 3.6770e-01],
[ 3.6535e-03],
[ 4.2364e-01],
[-5.4703e-01],
[ 8.9173e-02],
[-3.9032e-01],
[-5.9740e-01],
[ 3.7479e-02],
[ 3.0257e-01],
[ 8.2539e-02],
[-6.0559e-01],
[-4.3660e-01],
[-7.0624e-01],
[-5.0503e-01],
[-4.0929e-01],
[-2.3300e-01],
[ 2.0298e-01],
[-6.3697e-01],
[-1.2584e-01],
[ 5.6092e-02],
[ 5.0150e-02],
[-1.5358e-01],
[ 2.9248e-02],
[ 1.1180e-01],
[-1.5535e-01],
[ 1.1964e-01],
[-6.5698e-01],
[ 4.1923e-01],
[ 7.4044e-02],
[ 2.4536e-02],
[ 3.2647e-01],
[-7.7464e-01],
[ 3.9898e-01],
[-2.5777e-01],
[ 8.5569e-02],
[-4.0305e-01],
[ 5.4463e-01],
[-3.4124e-01],
[-4.0789e-01],
[ 4.2093e-01],
[-3.8487e-01],
[-4.0491e-01],
[-2.1539e-01],
[-1.7979e-02],
[ 3.2492e-01],
[-2.0894e-01],
[ 2.5629e-01],
[ 9.6046e-01]], device='cuda:0', grad_fn=<AddmmBackward0>)
That tensor has a singleton dimension (ie. it's of shape [Nx1]). Just squeeze that dimension or pick the 0th element:
In [1]: import torch
In [2]: a = torch.zeros([10,1])
In [3]: a
Out[3]:
tensor([[0.],
[0.],
[0.],
[0.],
[0.],
[0.],
[0.],
[0.],
[0.],
[0.]])
In [4]: a[:,0]
Out[4]: tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])
In [5]: a.squeeze(1)
Out[5]: tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])
if i understand your question right you can use the flatten method
input:
tvalue=torch.tensor([[-5.6117e-01],
[ 3.5726e-01],
[-2.5853e-01],
[-4.8641e-01],
[-1.0581e-01],
[-1.8322e-01],
[-1.2732e+00],
[-5.9760e-02],
[ 1.2819e-01],
[ 6.3894e-02],
[-9.1817e-01],
[-1.6539e-01],
[-1.1471e+00],
[ 1.9666e-01],
[-6.3297e-01],
[-4.0876e-01],
[-2.4590e-02],
[ 2.7065e-01],
[ 3.5308e-01],
[-4.6348e-01],
[-4.1755e-01],
[-1.1554e-01],
[-4.2062e-01],
[ 1.4067e-01],
[-2.9788e-01],
[-7.4582e-02],
[-5.3751e-01],
[ 1.1344e-01],
[-2.6100e-01],
[ 2.6951e-02],
[-5.0437e-02],
[-1.9163e-01],
[-3.3893e-02],
[-5.9640e-01],
[-1.1574e-01],
[ 1.4613e-01],
[ 1.2263e-01],
[-1.5566e-01],
[ 1.4740e-01],
[-9.9924e-01],
[ 2.0878e-01],
[-2.0074e-01],
[ 7.8383e-02],
[ 7.4679e-02],
[-5.8065e-01],
[ 6.7777e-01],
[ 5.9879e-01],
[ 6.6301e-01],
[-4.7051e-01],
[-2.5468e-01],
[-2.7382e-01],
[ 1.7585e-01],
[ 3.6151e-01],
[-9.2532e-01],
[-1.6999e-01],
[ 8.4971e-02],
[-6.6083e-01],
[-3.1204e-02],
[ 6.3712e-01],
[-5.8580e-02],
[-7.7901e-04],
[-4.6792e-01],
[ 1.0796e-01],
[ 7.8766e-01],
[ 1.6809e-01],
[-7.0058e-01],
[-2.9299e-01],
[-8.2735e-02],
[ 2.0875e-01],
[-2.9426e-01],
[-7.6748e-02],
[-1.5762e-01],
[-5.7432e-01],
[-5.2042e-01],
[-1.5152e-01],
[ 1.4119e+00],
[-1.5752e-01],
[-3.0565e-01],
[-5.1378e-01],
[-5.8924e-01],
[-1.0163e+00],
[-2.2021e-01],
[ 2.9112e-02],
[ 1.8521e-01],
[ 6.2814e-01],
[-6.8793e-01],
[ 2.1395e-02],
[ 5.7168e-01],
[ 9.0977e-01],
[ 3.8899e-01],
[ 3.0209e-01],
[ 2.4655e-01],
[-1.1688e-01],
[-5.9835e-02],
[ 3.6426e-02],
[-5.2782e-01],
[ 1.4604e+00],
[ 2.9685e-01],
[-2.4077e-01],
[ 1.0163e+00],
[ 6.9770e-01],
[-2.6183e-01],
[ 3.6770e-01],
[ 3.6535e-03],
[ 4.2364e-01],
[-5.4703e-01],
[ 8.9173e-02],
[-3.9032e-01],
[-5.9740e-01],
[ 3.7479e-02],
[ 3.0257e-01],
[ 8.2539e-02],
[-6.0559e-01],
[-4.3660e-01],
[-7.0624e-01],
[-5.0503e-01],
[-4.0929e-01],
[-2.3300e-01],
[ 2.0298e-01],
[-6.3697e-01],
[-1.2584e-01],
[ 5.6092e-02],
[ 5.0150e-02],
[-1.5358e-01],
[ 2.9248e-02],
[ 1.1180e-01],
[-1.5535e-01],
[ 1.1964e-01],
[-6.5698e-01],
[ 4.1923e-01],
[ 7.4044e-02],
[ 2.4536e-02],
[ 3.2647e-01],
[-7.7464e-01],
[ 3.9898e-01],
[-2.5777e-01],
[ 8.5569e-02],
[-4.0305e-01],
[ 5.4463e-01],
[-3.4124e-01],
[-4.0789e-01],
[ 4.2093e-01],
[-3.8487e-01],
[-4.0491e-01],
[-2.1539e-01],
[-1.7979e-02],
[ 3.2492e-01],
[-2.0894e-01],
[ 2.5629e-01],
[ 9.6046e-01]])
output
tvalue.flatten()
tensor([-5.6117e-01, 3.5726e-01, -2.5853e-01, -4.8641e-01, -1.0581e-01,
-1.8322e-01, -1.2732e+00, -5.9760e-02, 1.2819e-01, 6.3894e-02,
-9.1817e-01, -1.6539e-01, -1.1471e+00, 1.9666e-01, -6.3297e-01,
-4.0876e-01, -2.4590e-02, 2.7065e-01, 3.5308e-01, -4.6348e-01,
-4.1755e-01, -1.1554e-01, -4.2062e-01, 1.4067e-01, -2.9788e-01,
-7.4582e-02, -5.3751e-01, 1.1344e-01, -2.6100e-01, 2.6951e-02,
-5.0437e-02, -1.9163e-01, -3.3893e-02, -5.9640e-01, -1.1574e-01,
1.4613e-01, 1.2263e-01, -1.5566e-01, 1.4740e-01, -9.9924e-01,
2.0878e-01, -2.0074e-01, 7.8383e-02, 7.4679e-02, -5.8065e-01,
6.7777e-01, 5.9879e-01, 6.6301e-01, -4.7051e-01, -2.5468e-01,
-2.7382e-01, 1.7585e-01, 3.6151e-01, -9.2532e-01, -1.6999e-01,
8.4971e-02, -6.6083e-01, -3.1204e-02, 6.3712e-01, -5.8580e-02,
-7.7901e-04, -4.6792e-01, 1.0796e-01, 7.8766e-01, 1.6809e-01,
-7.0058e-01, -2.9299e-01, -8.2735e-02, 2.0875e-01, -2.9426e-01,
-7.6748e-02, -1.5762e-01, -5.7432e-01, -5.2042e-01, -1.5152e-01,
1.4119e+00, -1.5752e-01, -3.0565e-01, -5.1378e-01, -5.8924e-01,
-1.0163e+00, -2.2021e-01, 2.9112e-02, 1.8521e-01, 6.2814e-01,
-6.8793e-01, 2.1395e-02, 5.7168e-01, 9.0977e-01, 3.8899e-01,
3.0209e-01, 2.4655e-01, -1.1688e-01, -5.9835e-02, 3.6426e-02,
-5.2782e-01, 1.4604e+00, 2.9685e-01, -2.4077e-01, 1.0163e+00,
6.9770e-01, -2.6183e-01, 3.6770e-01, 3.6535e-03, 4.2364e-01,
-5.4703e-01, 8.9173e-02, -3.9032e-01, -5.9740e-01, 3.7479e-02,
3.0257e-01, 8.2539e-02, -6.0559e-01, -4.3660e-01, -7.0624e-01,
-5.0503e-01, -4.0929e-01, -2.3300e-01, 2.0298e-01, -6.3697e-01,
-1.2584e-01, 5.6092e-02, 5.0150e-02, -1.5358e-01, 2.9248e-02,
1.1180e-01, -1.5535e-01, 1.1964e-01, -6.5698e-01, 4.1923e-01,
7.4044e-02, 2.4536e-02, 3.2647e-01, -7.7464e-01, 3.9898e-01,
-2.5777e-01, 8.5569e-02, -4.0305e-01, 5.4463e-01, -3.4124e-01,
-4.0789e-01, 4.2093e-01, -3.8487e-01, -4.0491e-01, -2.1539e-01,
-1.7979e-02, 3.2492e-01, -2.0894e-01, 2.5629e-01, 9.6046e-01])

How to use all the elements of the array using for loop?

actually, I need to put the returned values of the function (global_displacement(X)) into another running loop.
can someone please tell me how to obtain the required output?
and what idiotic mistake I have been doing.
as every time it gives me only the first([ 0, 0, X[0], X[1]]) OR
the last value([ X[20], X[21], X[53], X[54]]) in the output,
because of wrong indendation of "return j" in the below written code .
import numpy as np
X = [ 0.19515612, 0.36477665, 0.244737, 0.42873321, 0.16864666, 0.08636661, 0.05376605, -0.57201897, -0.00935055, -1.24923862, 0., -1.53111525, 0.00935055, -1.24923862, -0.05376605, -0.57201897, -0.1686466,
0.08636661, -0.244737, 0.42873321, -0.19515612, 0.36477665, 0.02279911, 0. , 0.3563355 , 0.01379104, 0. , 0.42289958, -0.00747999, 0. , 0.0825908, -0.02949519 , 0. , -0.57435396,
-0.04074819, 0. , -1.25069528 ,-0.02972642, 0. , -1.53227704, -0. , 0. , -1.25069528 , 0.02972642 , 0. , -0.57435396 , 0.04074819 , 0. , 0.0825908, 0.02949519, 0. ,
0.42289958, 0.00747999 , 0. , 0.3563355 , -0.01379104, -0.02279911]
def global_displacement(X):
global_displacements = np.array( [[ 0, 0, X[0], X[1]], [ X[0], X[1], X[2], X[3]], [ X[2], X[3],X[4], X[5]], [ X[4],X[5],X[6], X[7]],[ X[6],X[7],X[8],X[9]], [ X[8],X[9],X[10], X[11] ], [ X[10], X[11],X[12], X[13]], [ X[12], X[13],X[14], X[15]],[ X[14], X[15],X[16], X[17]],[ X[16], X[17],X[18], X[19]], [ X[18], X[19],X[20], X[21]],[ X[20], X[21], 0, 0],
[ X[0], X[1], X[23], X[24]], [ X[2], X[3], X[26],X[27]], [ X[4], X[5], X[29],X[30]], [ X[6], X[7], X[32],X[33]], [ X[8],X[9],X[35], X[36]], [ X[10], X[11], X[38], X[39]], [ X[12], X[13], X[41], X[42]] ,[ X[14], X[15], X[44], X[45]],[ X[16], X[17], X[47], X[48]],[ X[18], X[19], X[50], X[51]], [ X[20], X[21], X[53], X[54]] ] )
for i in (global_displacements):
j = i.reshape(4,1)
return j
print(global_displacement(X))
this is the expected output, and I need to put these values in another loop, by calling this function.
[[0. ]
[0. ]
[0.19515612]
[0.36477665]]
[[0.19515612]
[0.36477665]
[0.244737 ]
[0.42873321]]
[[0.244737 ]
[0.42873321]
[0.16864666]
[0.08636661]]
[[ 0.16864666]
[ 0.08636661]
[ 0.05376605]
[-0.57201897]]
[[ 0.05376605]
[-0.57201897]
[-0.00935055]
[-1.24923862]]
[[-0.00935055]
[-1.24923862]
[ 0. ]
[-1.53111525]]
[[ 0. ]
[-1.53111525]
[ 0.00935055]
[-1.24923862]]
[[ 0.00935055]
[-1.24923862]
[-0.05376605]
[-0.57201897]]
[[-0.05376605]
[-0.57201897]
[-0.1686466 ]
[ 0.08636661]]
[[-0.1686466 ]
[ 0.08636661]
[-0.244737 ]
[ 0.42873321]]
[[-0.244737 ]
[ 0.42873321]
[-0.19515612]
[ 0.36477665]]
[[-0.19515612]
[ 0.36477665]
[ 0. ]
[ 0. ]]
[[0.19515612]
[0.36477665]
[0. ]
[0.3563355 ]]
[[0.244737 ]
[0.42873321]
[0. ]
[0.42289958]]
[[0.16864666]
[0.08636661]
[0. ]
[0.0825908 ]]
[[ 0.05376605]
[-0.57201897]
[ 0. ]
[-0.57435396]]
[[-0.00935055]
[-1.24923862]
[ 0. ]
[-1.25069528]]
[[ 0. ]
[-1.53111525]
[ 0. ]
[-1.53227704]]
[[ 0.00935055]
[-1.24923862]
[ 0. ]
[-1.25069528]]
[[-0.05376605]
[-0.57201897]
[ 0. ]
[-0.57435396]]
[[-0.1686466 ]
[ 0.08636661]
[ 0. ]
[ 0.0825908 ]]
[[-0.244737 ]
[ 0.42873321]
[ 0. ]
[ 0.42289958]]
[[-0.19515612]
[ 0.36477665]
[ 0. ]
[ 0.3563355 ]]
Your function already converts everything into the right format except that the inner values should be stored into a list. For this you can use numpy.newaxis. It is used to add a new dimension to your array (good post about its functionality).
import numpy as np
def global_displacement(X):
global_displacements = np.array( [[ 0, 0, X[0], X[1]], [ X[0], X[1], X[2], X[3]], [ X[2], X[3],X[4], X[5]], [ X[4],X[5],X[6], X[7]],[ X[6],X[7],X[8],X[9]], [ X[8],X[9],X[10], X[11] ], [ X[10], X[11],X[12], X[13]], [ X[12], X[13],X[14], X[15]],[ X[14], X[15],X[16], X[17]],[ X[16], X[17],X[18], X[19]], [ X[18], X[19],X[20], X[21]],[ X[20], X[21], 0, 0],
[ X[0], X[1], X[23], X[24]], [ X[2], X[3], X[26],X[27]], [ X[4], X[5], X[29],X[30]], [ X[6], X[7], X[32],X[33]], [ X[8],X[9],X[35], X[36]], [ X[10], X[11], X[38], X[39]], [ X[12], X[13], X[41], X[42]] ,[ X[14], X[15], X[44], X[45]],[ X[16], X[17], X[47], X[48]],[ X[18], X[19], X[50], X[51]], [ X[20], X[21], X[53], X[54]] ] )
new_structure = global_displacements[:, :, np.newaxis]
return new_structure
X = [ 0.19515612, 0.36477665, 0.244737, 0.42873321, 0.16864666, 0.08636661, 0.05376605, -0.57201897, -0.00935055, -1.24923862, 0., -1.53111525, 0.00935055, -1.24923862, -0.05376605, -0.57201897, -0.1686466,
0.08636661, -0.244737, 0.42873321, -0.19515612, 0.36477665, 0.02279911, 0. , 0.3563355 , 0.01379104, 0. , 0.42289958, -0.00747999, 0. , 0.0825908, -0.02949519 , 0. , -0.57435396,
-0.04074819, 0. , -1.25069528 ,-0.02972642, 0. , -1.53227704, -0. , 0. , -1.25069528 , 0.02972642 , 0. , -0.57435396 , 0.04074819 , 0. , 0.0825908, 0.02949519, 0. ,
0.42289958, 0.00747999 , 0. , 0.3563355 , -0.01379104, -0.02279911]
result = global_displacement(X)
print(result)
Output:
[[[ 0. ]
[ 0. ]
[ 0.19515612]
[ 0.36477665]]
[[ 0.19515612]
[ 0.36477665]
[ 0.244737 ]
[ 0.42873321]]
[[ 0.244737 ]
[ 0.42873321]
[ 0.16864666]
[ 0.08636661]]
[[ 0.16864666]
[ 0.08636661]
[ 0.05376605]
[-0.57201897]]
[[ 0.05376605]
[-0.57201897]
[-0.00935055]
[-1.24923862]]
[[-0.00935055]
[-1.24923862]
[ 0. ]
[-1.53111525]]
[[ 0. ]
[-1.53111525]
[ 0.00935055]
[-1.24923862]]
[[ 0.00935055]
[-1.24923862]
[-0.05376605]
[-0.57201897]]
[[-0.05376605]
[-0.57201897]
[-0.1686466 ]
[ 0.08636661]]
[[-0.1686466 ]
[ 0.08636661]
[-0.244737 ]
[ 0.42873321]]
[[-0.244737 ]
[ 0.42873321]
[-0.19515612]
[ 0.36477665]]
[[-0.19515612]
[ 0.36477665]
[ 0. ]
[ 0. ]]
[[ 0.19515612]
[ 0.36477665]
[ 0. ]
[ 0.3563355 ]]
[[ 0.244737 ]
[ 0.42873321]
[ 0. ]
[ 0.42289958]]
[[ 0.16864666]
[ 0.08636661]
[ 0. ]
[ 0.0825908 ]]
[[ 0.05376605]
[-0.57201897]
[ 0. ]
[-0.57435396]]
[[-0.00935055]
[-1.24923862]
[ 0. ]
[-1.25069528]]
[[ 0. ]
[-1.53111525]
[ 0. ]
[-1.53227704]]
[[ 0.00935055]
[-1.24923862]
[ 0. ]
[-1.25069528]]
[[-0.05376605]
[-0.57201897]
[ 0. ]
[-0.57435396]]
[[-0.1686466 ]
[ 0.08636661]
[ 0. ]
[ 0.0825908 ]]
[[-0.244737 ]
[ 0.42873321]
[ 0. ]
[ 0.42289958]]
[[-0.19515612]
[ 0.36477665]
[ 0. ]
[ 0.3563355 ]]]
First off, you don't need .reshape to transform a 1D array of N elements into a 2D array that's N by 1. You can just add a dimension to the array.
Second, you generally don't want to write loops to handle a Numpy array. You want to use Numpy tools to process everything at once. Just think about the problem in the full number of dimensions: you want to transform a 2D array that's M by N, into a 3D one that's M by N by 1. That's... still just adding a dimension to the array.
So:
global_displacements = np.array(...)
return global_displacements[..., np.newaxis]

Mongo geoJSON corrupted polygon

Im searching in the MongoDB by using a GEOJSON polygon without holes. I get an error when accesing the cursor. The error is:
OperationFailure: Secondary loops not contained by first exterior loop - secondary loops must be holes.
Code:
import pymongo
import json
def loadjson(filename):
with open(filename) as f:
mydict = json.load(f)
return mydict
client = pymongo.MongoClient('mongodb://' \
+ 'username' \
+ ':' \
+ "password" + \
'#localhost:num')
gjOr=loadjson('polygon.json')
database = client.database
collection = database["name"]
filParam = {"field": {"$geoWithin": {"$geometry": gjOr["geometry"]}}}
exParam = {"parameter": 1}
cursorItineraries = collection.find(filParam, exParam).batch_size(2)
for i in cursorItineraries:
print(i)
Error :
OperationFailure: Secondary loops not contained by first exterior loop - secondary loops must be holes: [ [ 4.423298831017474, 45.92246510136637 ], [ 4.423272328729544, 45.9224124914541 ], [ 4.423296014293201, 45.92245885344253 ], [ 4.423298831017474, 45.92246510136637 ] ] first loop: [ [ 45.80122758057101, 4.376352681950241 ], [ 45.8044838524037, 4.376220111724892 ], [ 45.80502003601278, 4.384299007716068 ], [ 45.80303745929081, 4.388437890276315 ], [ 45.81747689026152, 4.380885175476993 ], [ 45.82029916312589, 4.38306756294014 ], [ 45.82485210019716, 4.37663747923372 ], [ 45.82534211272147, 4.381172591846549 ], [ 45.83402693020424, 4.384295932265717 ], [ 45.83750180635732, 4.391204268525621 ], [ 45.83984258047744, 4.387611887543129 ], [ 45.84600085935993, 4.383682442715087 ], [ 45.85031045846961, 4.391211993814656 ], [ 45.85985548506667, 4.396744466054351 ], [ 45.8651960481546, 4.395075656820409 ], [ 45.86464844812487, 4.390221929905438 ], [ 45.86619589736199, 4.361702458243602 ], [ 45.87693527428829, 4.343076054047009 ], [ 45.88006074780913, 4.340914771281566 ], [ 45.8901897554254, 4.337545959836689 ], [ 45.89739332872952, 4.326829637196063 ], [ 45.90391638987086, 4.32382549599472 ], [ 45.905667965693, 4.327990843251258 ], [ 45.91579311238129, 4.346817863136088 ], [ 45.91668891008069, 4.342257953265195 ], [ 45.9216251460626, 4.338982596932619 ], [ 45.92997139286699, 4.346628121156797 ], [ 45.93185900371628, 4.337155914633498 ], [ 45.94115045303277, 4.322661854063225 ], [ 45.9423274157955, 4.313001397538913 ], [ 45.95343445337902, 4.294525894838753 ], [ 45.95835570732176, 4.301188120177739 ], [ 45.9732026997347, 4.289431726797376 ], [ 45.97782302017173, 4.276356808225782 ], [ 45.97999822296752, 4.273060858987787 ], [ 45.98120074142516, 4.259590688670001 ], [ 45.98675497867428, 4.248658637365085 ], [ 45.99308572400004, 4.247947359178051 ], [ 45.99614759409149, 4.246662259181909 ], [ 45.99563177014321, 4.251765125404147 ], [ 45.99854856923263, 4.265285854935993 ], [ 45.99274541001228, 4.270233183990548 ], [ 45.99239729542708, 4.275045816032846 ], [ 45.99568744605374, 4.289609476470726 ], [ 45.98892048061472, 4.300698529315564 ], [ 45.99439301256719, 4.304534538221632 ], [ 46.0051569133273, 4.31261301303475 ], [ 46.00804632488756, 4.310942716001039 ], [ 46.01128401825687, 4.308859856852684 ], [ 46.02162453052691, 4.306458712603317 ], [ 46.0215031603917, 4.305677962950914 ], [ 46.02080232740203, 4.301223173083229 ], [ 46.01839338755868, 4.288344635543249 ], [ 46.0218025518577, 4.28062738017141 ], [ 46.02393104466132, 4.276598681060639 ], [ 46.03355588764234, 4.270436612377799 ], [ 46.03608569278646, 4.261172108339184 ], [ 46.03914954925174, 4.261574439856753 ], [ 46.04921372290215, 4.251694718008523 ], [ 46.05182172309517, 4.254034942879366 ], [ 46.05394712145236, 4.25770790411945 ], [ 46.05200674707666, 4.266837389513737 ], [ 46.05371908926419, 4.285713698620556 ], [ 46.06348629342862, 4.297946350027559 ], [ 46.06607892681861, 4.300652169094047 ], [ 46.07849083960166, 4.302039193299913 ], [ 46.07922531473847, 4.305599848572079 ], [ 46.08175515713837, 4.311015193331216 ], [ 46.08505691781212, 4.310620920782156 ], [ 46.09488700704484, 4.311938135622067 ], [ 46.10036561585792, 4.306608118283756 ], [ 46.11003344270973, 4.30990269743367 ], [ 46.11227779249233, 4.313417891784874 ], [ 46.11685288921163, 4.320328061893218 ], [ 46.13001463483668, 4.322725351722958 ], [ 46.13326362593953, 4.330293102924566 ], [ 46.13554160597837, 4.333348440257132 ], [ 46.13830957901551, 4.357233936672898 ], [ 46.13849351079732, 4.362195204890961 ], [ 46.1498366233474, 4.377795652104825 ], [ 46.14969360157965, 4.382423398963222 ], [ 46.14767758414561, 4.386051143732711 ], [ 46.13709181954217, 4.408851926159123 ], [ 46.13594976925808, 4.417961239075054 ], [ 46.15142016044436, 4.424267792344496 ], [ 46.15560425283834, 4.431770589521594 ], [ 46.16791483778646, 4.439193639737003 ], [ 46.16998904853596, 4.435772060175736 ], [ 46.18010788550735, 4.414915433192237 ], [ 46.18156481929947, 4.411150008217347 ], [ 46.18227866102438, 4.406966209179808 ], [ 46.18456671088968, 4.403509174410789 ], [ 46.18925424643544, 4.396783755671023 ], [ 46.19570191957066, 4.39747939068123 ], [ 46.20316836045577, 4.423571836902974 ], [ 46.20706171749928, 4.422359640985007 ], [ 46.21010077361071, 4.420471659265854 ], [ 46.21965844746768, 4.407356367902624 ], [ 46.21982737938801, 4.388683312501862 ], [ 46.22571105646608, 4.386829907900418 ], [ 46.23755075963317, 4.39209447339758 ], [ 46.24354546160409, 4.388785758384324 ], [ 46.24619830411654, 4.386173183144986 ], [ 46.24794900440396, 4.38889827660288 ], [ 46.2643415894465, 4.393582057210332 ], [ 46.26655827183746, 4.397449686073675 ], [ 46.27187809126278, 4.397064201352286 ], [ 46.27457798360896, 4.396974174839016 ], [ 46.28435539290889, 4.399371429518632 ], [ 46.29277972056357, 4.407707294944299 ], [ 46.29609636649197, 4.406422030172297 ], [ 46.29537513021594, 4.411216732267508 ], [ 46.29633635905063, 4.424994132856644 ], [ 46.30286120593534, 4.427650449880751 ], [ 46.29631496657498, 4.438330372797787 ], [ 46.29309133554136, 4.440062626728649 ], [ 46.29647617285935, 4.453855067913997 ], [ 46.29405157990985, 4.46279605745037 ], [ 46.28443417433566, 4.47602647504644 ], [ 46.28709961733546, 4.484382424854218 ], [ 46.28803138168911, 4.489069361733742 ], [ 46.27010728719618, 4.50209226866315 ], [ 46.26716933623024, 4.504596974106646 ], [ 46.2719961499778, 4.542944499553459 ], [ 46.27395480446275, 4.547049893978835 ], [ 46.2832619806662, 4.548685286189365 ], [ 46.29129615541792, 4.556044471210963 ], [ 46.29402942721709, 4.55844617134213 ], [ 46.29306430926767, 4.570871194910193 ], [ 46.29001185910924, 4.572758405594865 ], [ 46.27707566137231, 4.573323169325609 ], [ 46.27025349602457, 4.58326888257721 ], [ 46.26839674289919, 4.587119484978682 ], [ 46.26643179980633, 4.615050419767011 ], [ 46.2654086056003, 4.617672483965549 ], [ 45.80592709186426, 4.865072663474678 ], [ 45.80434184846744, 4.864499321786514 ], [ 45.80433950695036, 4.864498347340356 ], [ 45.80545594864911, 4.848595888972264 ], [ 45.76435319429793, 4.801430760920382 ], [ 45.71917418711324, 4.781774751613478 ], [ 45.68363470756887, 4.80870542554455 ], [ 45.80122758057101, 4.376352681950241 ] ], full error: {'ok': 0.0, 'errmsg': 'Secondary loops not contained by first exterior loop - secondary loops must be holes: [ [ 4.423298831017474, 45.92246510136637 ], [ 4.423272328729544, 45.9224124914541 ], [ 4.423296014293201, 45.92245885344253 ], [ 4.423298831017474, 45.92246510136637 ] ] first loop: [ [ 45.80122758057101, 4.376352681950241 ], [ 45.8044838524037, 4.376220111724892 ], [ 45.80502003601278, 4.384299007716068 ], [ 45.80303745929081, 4.388437890276315 ], [ 45.81747689026152, 4.380885175476993 ], [ 45.82029916312589, 4.38306756294014 ], [ 45.82485210019716, 4.37663747923372 ], [ 45.82534211272147, 4.381172591846549 ], [ 45.83402693020424, 4.384295932265717 ], [ 45.83750180635732, 4.391204268525621 ], [ 45.83984258047744, 4.387611887543129 ], [ 45.84600085935993, 4.383682442715087 ], [ 45.85031045846961, 4.391211993814656 ], [ 45.85985548506667, 4.396744466054351 ], [ 45.8651960481546, 4.395075656820409 ], [ 45.86464844812487, 4.390221929905438 ], [ 45.86619589736199, 4.361702458243602 ], [ 45.87693527428829, 4.343076054047009 ], [ 45.88006074780913, 4.340914771281566 ], [ 45.8901897554254, 4.337545959836689 ], [ 45.89739332872952, 4.326829637196063 ], [ 45.90391638987086, 4.32382549599472 ], [ 45.905667965693, 4.327990843251258 ], [ 45.91579311238129, 4.346817863136088 ], [ 45.91668891008069, 4.342257953265195 ], [ 45.9216251460626, 4.338982596932619 ], [ 45.92997139286699, 4.346628121156797 ], [ 45.93185900371628, 4.337155914633498 ], [ 45.94115045303277, 4.322661854063225 ], [ 45.9423274157955, 4.313001397538913 ], [ 45.95343445337902, 4.294525894838753 ], [ 45.95835570732176, 4.301188120177739 ], [ 45.9732026997347, 4.289431726797376 ], [ 45.97782302017173, 4.276356808225782 ], [ 45.97999822296752, 4.273060858987787 ], [ 45.98120074142516, 4.259590688670001 ], [ 45.98675497867428, 4.248658637365085 ], [ 45.99308572400004, 4.247947359178051 ], [ 45.99614759409149, 4.246662259181909 ], [ 45.99563177014321, 4.251765125404147 ], [ 45.99854856923263, 4.265285854935993 ], [ 45.99274541001228, 4.270233183990548 ], [ 45.99239729542708, 4.275045816032846 ], [ 45.99568744605374, 4.289609476470726 ], [ 45.98892048061472, 4.300698529315564 ], [ 45.99439301256719, 4.304534538221632 ], [ 46.0051569133273, 4.31261301303475 ], [ 46.00804632488756, 4.310942716001039 ], [ 46.01128401825687, 4.308859856852684 ], [ 46.02162453052691, 4.306458712603317 ], [ 46.0215031603917, 4.305677962950914 ], [ 46.02080232740203, 4.301223173083229 ], [ 46.01839338755868, 4.288344635543249 ], [ 46.0218025518577, 4.28062738017141 ], [ 46.02393104466132, 4.276598681060639 ], [ 46.03355588764234, 4.270436612377799 ], [ 46.03608569278646, 4.261172108339184 ], [ 46.03914954925174, 4.261574439856753 ], [ 46.04921372290215, 4.251694718008523 ], [ 46.05182172309517, 4.254034942879366 ], [ 46.05394712145236, 4.25770790411945 ], [ 46.05200674707666, 4.266837389513737 ], [ 46.05371908926419, 4.285713698620556 ], [ 46.06348629342862, 4.297946350027559 ], [ 46.06607892681861, 4.300652169094047 ], [ 46.07849083960166, 4.302039193299913 ], [ 46.07922531473847, 4.305599848572079 ], [ 46.08175515713837, 4.311015193331216 ], [ 46.08505691781212, 4.310620920782156 ], [ 46.09488700704484, 4.311938135622067 ], [ 46.10036561585792, 4.306608118283756 ], [ 46.11003344270973, 4.30990269743367 ], [ 46.11227779249233, 4.313417891784874 ], [ 46.11685288921163, 4.320328061893218 ], [ 46.13001463483668, 4.322725351722958 ], [ 46.13326362593953, 4.330293102924566 ], [ 46.13554160597837, 4.333348440257132 ], [ 46.13830957901551, 4.357233936672898 ], [ 46.13849351079732, 4.362195204890961 ], [ 46.1498366233474, 4.377795652104825 ], [ 46.14969360157965, 4.382423398963222 ], [ 46.14767758414561, 4.386051143732711 ], [ 46.13709181954217, 4.408851926159123 ], [ 46.13594976925808, 4.417961239075054 ], [ 46.15142016044436, 4.424267792344496 ], [ 46.15560425283834, 4.431770589521594 ], [ 46.16791483778646, 4.439193639737003 ], [ 46.16998904853596, 4.435772060175736 ], [ 46.18010788550735, 4.414915433192237 ], [ 46.18156481929947, 4.411150008217347 ], [ 46.18227866102438, 4.406966209179808 ], [ 46.18456671088968, 4.403509174410789 ], [ 46.18925424643544, 4.396783755671023 ], [ 46.19570191957066, 4.39747939068123 ], [ 46.20316836045577, 4.423571836902974 ], [ 46.20706171749928, 4.422359640985007 ], [ 46.21010077361071, 4.420471659265854 ], [ 46.21965844746768, 4.407356367902624 ], [ 46.21982737938801, 4.388683312501862 ], [ 46.22571105646608, 4.386829907900418 ], [ 46.23755075963317, 4.39209447339758 ], [ 46.24354546160409, 4.388785758384324 ], [ 46.24619830411654, 4.386173183144986 ], [ 46.24794900440396, 4.38889827660288 ], [ 46.2643415894465, 4.393582057210332 ], [ 46.26655827183746, 4.397449686073675 ], [ 46.27187809126278, 4.397064201352286 ], [ 46.27457798360896, 4.396974174839016 ], [ 46.28435539290889, 4.399371429518632 ], [ 46.29277972056357, 4.407707294944299 ], [ 46.29609636649197, 4.406422030172297 ], [ 46.29537513021594, 4.411216732267508 ], [ 46.29633635905063, 4.424994132856644 ], [ 46.30286120593534, 4.427650449880751 ], [ 46.29631496657498, 4.438330372797787 ], [ 46.29309133554136, 4.440062626728649 ], [ 46.29647617285935, 4.453855067913997 ], [ 46.29405157990985, 4.46279605745037 ], [ 46.28443417433566, 4.47602647504644 ], [ 46.28709961733546, 4.484382424854218 ], [ 46.28803138168911, 4.489069361733742 ], [ 46.27010728719618, 4.50209226866315 ], [ 46.26716933623024, 4.504596974106646 ], [ 46.2719961499778, 4.542944499553459 ], [ 46.27395480446275, 4.547049893978835 ], [ 46.2832619806662, 4.548685286189365 ], [ 46.29129615541792, 4.556044471210963 ], [ 46.29402942721709, 4.55844617134213 ], [ 46.29306430926767, 4.570871194910193 ], [ 46.29001185910924, 4.572758405594865 ], [ 46.27707566137231, 4.573323169325609 ], [ 46.27025349602457, 4.58326888257721 ], [ 46.26839674289919, 4.587119484978682 ], [ 46.26643179980633, 4.615050419767011 ], [ 46.2654086056003, 4.617672483965549 ], [ 45.80592709186426, 4.865072663474678 ], [ 45.80434184846744, 4.864499321786514 ], [ 45.80433950695036, 4.864498347340356 ], [ 45.80545594864911, 4.848595888972264 ], [ 45.76435319429793, 4.801430760920382 ], [ 45.71917418711324, 4.781774751613478 ], [ 45.68363470756887, 4.80870542554455 ], [ 45.80122758057101, 4.376352681950241 ] ]', 'code': 2, 'codeName': 'BadValue', 'operationTime': Timestamp(1611827959, 1), '$clusterTime': {'clusterTime': Timestamp(1611827959, 1), 'signature': {'hash': b'\x8d\xb6\x9b7\x0b\xe7!\x11\xe6\xc1\x13\x06\xf4\xe0\xca\xfe\xcbN\x0cy', 'keyId': 6872375198939611144}}}

How do I write an accumulator to squash an array around a midpoint?

Say I've got an array of arrays as follows:
array([[ 7108.4, -600. ],
[ 7101.3, -500. ],
[ 7094.3, -400. ],
[ 7087.2, -300. ],
[ 7080.1, -200. ],
[ 7073.1, -100. ],
[ 7058.3, 100. ],
[ 7051.3, 200. ],
[ 7044.2, 300. ],
[ 7037.1, 400. ],
[ 7030.1, 500. ],
[ 7023. , 600. ]])
This is a list of prices and quantities, with the positive numbers being buys and the negative numbers being sells.
If the current market price is 7050, I want to 'squash' my prices, as follows:
array([[ 7108.4, -600. ],
[ 7101.3, -500. ],
[ 7094.3, -400. ],
[ 7087.2, -300. ],
[ 7080.1, -200. ],
[ 7073.1, -100. ],
[ 7050. , 300. ], # Price & quantity changed here
[ 7044.2, 300. ],
[ 7037.1, 400. ],
[ 7030.1, 500. ],
[ 7023. , 600. ]])
and and do the same in thing in the other direction, so if the market price were 7085:
array([[ 7108.4, -600. ],
[ 7101.3, -500. ],
[ 7094.3, -400. ],
[ 7087.2, -300. ],
[ 7085. , -300. ], # Price & quantity change here
[ 7058.3, 100. ],
[ 7051.3, 200. ],
[ 7044.2, 300. ],
[ 7037.1, 400. ],
[ 7030.1, 500. ],
[ 7023. , 600. ]])
What's the cleanest way to write such a thing? Ideally, I'd like to write the squashing and the quantity grouping as two separate steps.
This is the kind of operations that are made easier by pandas.
>>> import numpy as np
>>> import pandas as pd
>>>
>>> t = np.array([[ 7108.4, -600. ],
... [ 7101.3, -500. ],
... [ 7094.3, -400. ],
... [ 7087.2, -300. ],
... [ 7080.1, -200. ],
... [ 7073.1, -100. ],
... [ 7058.3, 100. ],
... [ 7051.3, 200. ],
... [ 7044.2, 300. ],
... [ 7037.1, 400. ],
... [ 7030.1, 500. ],
... [ 7023. , 600. ]])
>>>
>>> df = pd.DataFrame(t, columns=['price', 'quantity'])
>>> (df.price > 7050) & (df.quantity > 0)
0 False
1 False
2 False
3 False
4 False
5 False
6 True
7 True
8 False
9 False
10 False
11 False
dtype: bool
>>> df[(df.price > 7050) & (df.quantity > 0)]
price quantity
6 7058.3 100.0
7 7051.3 200.0
>>> df[(df.price > 7050) & (df.quantity > 0)].quantity.sum()
300.0
>>> df[(df.price < 7085) & (df.quantity < 0)]
price quantity
4 7080.1 -200.0
5 7073.1 -100.0
>>> df[(df.price < 7085) & (df.quantity < 0)].quantity.sum()
-300.0
You could still stick with numpy if you want:
>>> import numpy as np
>>> import pandas as pd
>>>
>>> t = np.array([[ 7108.4, -600. ],
... [ 7101.3, -500. ],
... [ 7094.3, -400. ],
... [ 7087.2, -300. ],
... [ 7080.1, -200. ],
... [ 7073.1, -100. ],
... [ 7058.3, 100. ],
... [ 7051.3, 200. ],
... [ 7044.2, 300. ],
... [ 7037.1, 400. ],
... [ 7030.1, 500. ],
... [ 7023. , 600. ]])
>>>
>>>
>>> t[:,0]
array([ 7108.4, 7101.3, 7094.3, 7087.2, 7080.1, 7073.1, 7058.3,
7051.3, 7044.2, 7037.1, 7030.1, 7023. ])
>>> t[:,0] > 7050
array([ True, True, True, True, True, True, True, True, False,
False, False, False], dtype=bool)
>>> (t[:,0] > 7050) & (t[:,1] > 0)
array([False, False, False, False, False, False, True, True, False,
False, False, False], dtype=bool)
>>> t[(t[:,0] > 7050) & (t[:,1] > 0)]
array([[ 7058.3, 100. ],
[ 7051.3, 200. ]])
>>> t[(t[:,0] < 7085) & (t[:,1] < 0)]
array([[ 7080.1, -200. ],
[ 7073.1, -100. ]])
>>> t[(t[:,0] < 7085) & (t[:,1] < 0)][:,1]
array([-200., -100.])
>>> t[(t[:,0] < 7085) & (t[:,1] < 0)][:,1].sum()
-300.0
since Eric answer is great but as per your comment :
Is possible to modify the original array, so that the output is the
full list of orders with changes inside?
Here is my approach without numpy :
array=[[ 7108.4, -600. ],
[ 7101.3, -500. ],
[ 7094.3, -400. ],
[ 7087.2, -300. ],
[ 7080.1, -200. ],
[ 7073.1, -100. ],
[ 7058.3, 100. ],
[ 7051.3, 200. ],
[ 7044.2, 300. ],
[ 7037.1, 400. ],
[ 7030.1, 500. ],
[ 7023. , 600. ]]
price=0
positive_value=7050
updated_list=[]
track=[]
for index,item in enumerate(array):
if item[0]>positive_value:
if item[1]>0:
price+=item[1]
track.append(index)
else:
updated_list.append(item)
else:
updated_list.append(item)
updated_list.insert(track[0],[positive_value,price])
print(updated_list)
output:
[[7108.4, -600.0], [7101.3, -500.0], [7094.3, -400.0], [7087.2, -300.0], [7080.1, -200.0], [7073.1, -100.0], [7050, 300.0], [7044.2, 300.0], [7037.1, 400.0], [7030.1, 500.0], [7023.0, 600.0]]
for second one:
array=[[ 7108.4, -600. ],
[ 7101.3, -500. ],
[ 7094.3, -400. ],
[ 7087.2, -300. ],
[ 7080.1, -200. ],
[ 7073.1, -100. ],
[ 7058.3, 100. ],
[ 7051.3, 200. ],
[ 7044.2, 300. ],
[ 7037.1, 400. ],
[ 7030.1, 500. ],
[ 7023. , 600. ]]
price=0
positive_value=7085
updated_list=[]
track=[]
for index,item in enumerate(array):
if item[0] < positive_value:
if item[1] < 0:
price+=item[1]
track.append(index)
else:
updated_list.append(item)
else:
updated_list.append(item)
updated_list.insert(track[0],[positive_value,price])
print(updated_list)
output:
[[7108.4, -600.0], [7101.3, -500.0], [7094.3, -400.0], [7087.2, -300.0], [7085, -300.0], [7058.3, 100.0], [7051.3, 200.0], [7044.2, 300.0], [7037.1, 400.0], [7030.1, 500.0], [7023.0, 600.0]]

Maximizing interpolated matrix

Say I need to max_(a', m') f(a, m, e, m', a'), and I have approximated f with a grid V1. This is a numpy matrix with shape (nA, nM, nE, nM, nA) (attached in the end).
I want to first interpolate and then do the maximization. The following is my current code (I paste code to recreate Grid in the end):
# takes grid indices (first three dimensions) idx and interpolates on V
def interpolateV(idx, V, Grid):
from scipy.interpolate import interp2d
f = interp2d(Grid.mGrid, Grid.aGrid, V[idx])
return f
# (somewhere else:)
s2 = (Grid.nM, Grid.nA, Grid.nE)
v1Max = np.empty(s2)
v1ArgMaxA = np.empty(s2)
v1ArgMaxM = np.empty(s2)
from scipy import optimize
for idx in np.ndindex(V1[..., 0,0].shape):
V1i = interpolateV(idx, V1, Grid)
x, f, d = optimize.fmin_l_bfgs_b(lambda x: -V1i(x[0], x[1]), np.array([1, 1]), bounds=[(Grid.aMin, Grid.aMax), (Grid.mMin, Grid.mMax)], approx_grad=True)
v1Max[idx] = f
v1ArgMaxA[idx], v1ArgMaxM[idx] = x
# let's compare with standard grid-wise optimization (without interpolation):
temp = V1.max(axis=-1)
# maximize over m
v1Max = temp.max(axis=-1)
# now max over a, given optimal m
v1ArgMaxAGrid = temp.argmax(axis=-1)
So far, so good. However, the values of the interpolated maximized are way off:
In[51]: v1ArgMaxAGrid[:,:,0]
Out[51]:
array([[0, 0, 0, 0, 2],
[0, 0, 0, 0, 2],
[0, 0, 0, 2, 2],
[0, 0, 0, 2, 3],
[0, 0, 0, 2, 3]], dtype=int64)
In[54]: Grid.aGrid[v1ArgMaxAGrid[:,:,0]]
Out[54]:
array([[ 0. , 0. , 0. , 0. , 3.5 ],
[ 0. , 0. , 0. , 0. , 3.5 ],
[ 0. , 0. , 0. , 3.5 , 3.5 ],
[ 0. , 0. , 0. , 3.5 , 5.25],
[ 0. , 0. , 0. , 3.5 , 5.25]])
In[52]: v1ArgMaxA[:,:,0]
Out[52]:
array([[ 0. , 0.75 , 2.25 , 7. , 7. ],
[ 0. , 1.5 , 4.247, 7. , 7. ],
[ 0.75 , 2.25 , 7. , 7. , 7. ],
[ 1.5 , 1.5 , 7. , 7. , 7. ],
[ 2.25 , 4.939, 7. , 7. , 7. ]])
What is happening here; why are the values so off? Am I doing a mistake?
following the copy paste to recreate Grid, V1:
class Grids(object):
nE = 2
nA = 5
nM = 5
M = 3
A = 7
mMin = 0
mMax = M
aMin = 0
aMax = A
def __init__(self):
self.reset();
def reset(self):
self.mGrid = np.linspace(self.mMin, self.mMax, self.nM)
self.aGrid = np.linspace(self.aMin, self.aMax, self.nA)
self.eGrid = np.array([0.318, 3.149])
self.transitionE = np.array([[1., 0.],
[0., 1.]])
import numpy as np
Grid = Grids()
V1 = np.array([[[[[ 1.19 , 0.975, -0.371, -2.848, -6.456],
[ -1.463, -4.313, -8.294, -13.407, -19.65 ],
[ -9.888, -15.377, -21.997, -29.748, -38.63 ],
[-24.574, -32.701, -41.958, -52.347, -63.866],
[-45.562, -56.325, -68.218, -81.242, -95.397]],
[[ 64.724, 64.672, 64.567, 64.358, 54.127],
[ 64.247, 63.964, 53.759, 52.687, 50.487],
[ 53.526, 52.078, 49.501, 45.799, 40.969],
[ 48.389, 44.307, 39.105, 32.769, 25.314],
[ 37.062, 30.347, 22.52 , 13.553, 3.47 ]]],
[[[ 12.624, 12.704, 12.591, 2.602, 1.618],
[ 2.237, 2.011, 0.655, -1.832, -5.45 ],
[ -0.064, -2.928, -6.923, -12.049, -18.306],
[ -8.624, -14.126, -20.759, -28.522, -37.416],
[-23.488, -31.625, -40.894, -51.293, -62.822]],
[[ 65.686, 65.695, 65.679, 65.631, 65.537],
[ 65.401, 65.342, 65.23 , 65.014, 54.778],
[ 65.174, 64.881, 54.667, 53.59 , 51.385],
[ 54.43 , 52.973, 50.396, 46.685, 41.855],
[ 49.228, 45.138, 39.936, 33.594, 26.136]]],
[[[ 13.681, 13.872, 14.024, 14.117, 14.093],
[ 13.671, 13.74 , 13.617, 3.617, 2.624],
[ 3.636, 3.397, 2.027, -0.474, -4.106],
[ 1.2 , -1.677, -5.684, -10.823, -17.092],
[ -7.538, -13.051, -19.694, -27.468, -36.373]],
[[ 66.553, 66.597, 66.623, 66.631, 66.614],
[ 66.362, 66.364, 66.342, 66.287, 66.188],
[ 66.327, 66.259, 66.138, 65.917, 55.676],
[ 66.077, 65.776, 55.562, 54.476, 52.271],
[ 55.269, 53.804, 51.227, 47.51 , 42.677]]],
[[[ 14.6 , 14.839, 15.054, 15.242, 15.394],
[ 14.728, 14.909, 15.05 , 15.133, 15.098],
[ 15.07 , 15.126, 14.989, 4.975, 3.968],
[ 4.9 , 4.648, 3.265, 0.752, -2.892],
[ 2.286, -0.601, -4.62 , -9.769, -16.048]],
[[ 67.36 , 67.427, 67.481, 67.52 , 67.543],
[ 67.229, 67.266, 67.286, 67.287, 67.265],
[ 67.288, 67.281, 67.25 , 67.19 , 67.085],
[ 67.231, 67.154, 67.033, 66.803, 56.562],
[ 66.917, 66.607, 56.393, 55.301, 53.093]]],
[[[ 15.442, 15.71 , 15.96 , 16.191, 16.4 ],
[ 15.647, 15.875, 16.08 , 16.257, 16.399],
[ 16.128, 16.294, 16.422, 16.491, 16.443],
[ 16.334, 16.377, 16.227, 6.201, 5.182],
[ 5.986, 5.723, 4.33 , 1.806, -1.849]],
[[ 68.123, 68.207, 68.28 , 68.342, 68.391],
[ 68.036, 68.096, 68.143, 68.176, 68.194],
[ 68.155, 68.183, 68.195, 68.19 , 68.163],
[ 68.192, 68.176, 68.145, 68.076, 67.971],
[ 68.07 , 67.984, 67.864, 67.628, 57.384]]]],
[[[[ 11.877, 1.81 , 1.59 , 0.238, -2.246],
[ 0.873, -0.853, -3.709, -7.696, -12.814],
[ -4.928, -9.292, -14.787, -21.413, -29.17 ],
[-16.988, -23.99 , -32.123, -41.386, -51.78 ],
[-35.352, -44.989, -55.758, -67.657, -80.686]],
[[ 65.151, 65.131, 65.075, 64.966, 64.753],
[ 64.779, 64.647, 64.36 , 54.151, 53.076],
[ 54.24 , 53.917, 52.465, 49.888, 46.183],
[ 51.728, 48.771, 44.694, 39.483, 33.153],
[ 43.026, 37.436, 30.734, 22.892, 13.934]]],
[[[ 13.101, 13.245, 13.318, 13.2 , 3.204],
[ 12.924, 2.847, 2.616, 1.253, -1.24 ],
[ 2.272, 0.533, -2.337, -6.338, -11.47 ],
[ -3.664, -8.041, -13.548, -20.187, -27.956],
[-15.902, -22.915, -31.058, -40.332, -50.737]],
[[ 66.066, 66.092, 66.098, 66.078, 66.025],
[ 65.827, 65.8 , 65.738, 65.622, 65.403],
[ 65.706, 65.564, 65.268, 55.054, 53.974],
[ 55.144, 54.812, 53.36 , 50.774, 47.069],
[ 52.567, 49.602, 45.525, 40.308, 33.975]]],
[[[ 14.087, 14.302, 14.487, 14.632, 14.72 ],
[ 14.148, 14.281, 14.344, 14.216, 4.21 ],
[ 14.323, 4.232, 3.987, 2.611, 0.104],
[ 3.536, 1.784, -1.099, -5.112, -10.256],
[ -2.578, -6.965, -12.484, -19.133, -26.912]],
[[ 66.905, 66.96 , 66.999, 67.022, 67.025],
[ 66.742, 66.762, 66.76 , 66.734, 66.676],
[ 66.754, 66.718, 66.646, 66.525, 66.301],
[ 66.609, 66.459, 66.163, 55.94 , 54.86 ],
[ 55.983, 55.643, 54.191, 51.599, 47.891]]],
[[[ 14.969, 15.221, 15.454, 15.663, 15.844],
[ 15.134, 15.338, 15.513, 15.648, 15.725],
[ 15.548, 15.667, 15.716, 15.574, 5.554],
[ 15.587, 5.483, 5.226, 3.837, 1.318],
[ 4.622, 2.859, -0.034, -4.058, -9.213]],
[[ 67.691, 67.766, 67.829, 67.879, 67.915],
[ 67.581, 67.629, 67.662, 67.678, 67.676],
[ 67.669, 67.679, 67.669, 67.637, 67.574],
[ 67.658, 67.612, 67.541, 67.411, 67.187],
[ 67.449, 67.29 , 66.994, 56.765, 55.682]]],
[[[ 15.786, 16.063, 16.324, 16.569, 16.794],
[ 16.016, 16.258, 16.48 , 16.679, 16.85 ],
[ 16.533, 16.724, 16.884, 17.006, 17.07 ],
[ 16.811, 16.918, 16.954, 16.8 , 6.768],
[ 16.673, 6.559, 6.29 , 4.891, 2.362]],
[[ 68.439, 68.529, 68.61 , 68.679, 68.737],
[ 68.368, 68.436, 68.492, 68.535, 68.566],
[ 68.507, 68.546, 68.571, 68.581, 68.574],
[ 68.572, 68.573, 68.563, 68.523, 68.46 ],
[ 68.497, 68.443, 68.372, 68.236, 68.009]]]],
[[[[ 12.453, 12.498, 2.425, 2.198, 0.84 ],
[ 2.083, 1.483, -0.248, -3.111, -7.104],
[ -1.092, -4.331, -8.701, -14.202, -20.834],
[-10.528, -16.405, -23.412, -31.551, -40.82 ],
[-26.266, -34.779, -44.422, -55.196, -67.101]],
[[ 65.555, 65.558, 65.534, 65.474, 65.36 ],
[ 65.252, 65.179, 65.043, 64.752, 54.54 ],
[ 64.974, 54.631, 54.304, 52.852, 50.272],
[ 53.942, 52.11 , 49.158, 45.072, 39.867],
[ 47.865, 43.4 , 37.823, 31.106, 23.273]]],
[[[ 13.541, 13.722, 13.859, 13.927, 13.802],
[ 13.5 , 13.534, 3.451, 3.214, 1.846],
[ 3.482, 2.868, 1.123, -1.753, -5.76 ],
[ 0.172, -3.08 , -7.463, -12.976, -19.62 ],
[ -9.442, -15.329, -22.348, -30.497, -39.776]],
[[ 66.433, 66.473, 66.495, 66.496, 66.472],
[ 66.231, 66.227, 66.196, 66.13 , 66.011],
[ 66.178, 66.096, 65.952, 65.655, 55.438],
[ 65.877, 55.526, 55.199, 53.738, 51.158],
[ 54.781, 52.941, 49.989, 45.897, 40.689]]],
[[[ 14.475, 14.708, 14.917, 15.095, 15.235],
[ 14.588, 14.759, 14.886, 14.943, 14.808],
[ 14.899, 14.92 , 4.823, 4.572, 3.19 ],
[ 4.746, 4.119, 2.362, -0.527, -4.546],
[ 1.258, -2.005, -6.398, -11.922, -18.577]],
[[ 67.247, 67.312, 67.362, 67.398, 67.417],
[ 67.109, 67.142, 67.158, 67.152, 67.123],
[ 67.158, 67.145, 67.105, 67.033, 66.909],
[ 67.082, 66.991, 66.847, 66.541, 56.324],
[ 66.717, 56.357, 56.03 , 54.563, 51.98 ]]],
[[[ 15.325, 15.59 , 15.836, 16.062, 16.265],
[ 15.522, 15.744, 15.943, 16.111, 16.24 ],
[ 15.987, 16.144, 16.257, 16.301, 16.152],
[ 16.163, 16.171, 6.061, 5.798, 4.404],
[ 5.832, 5.195, 3.426, 0.527, -3.502]],
[[ 68.016, 68.098, 68.169, 68.228, 68.274],
[ 67.924, 67.981, 68.025, 68.054, 68.067],
[ 68.036, 68.059, 68.066, 68.056, 68.021],
[ 68.061, 68.039, 68. , 67.919, 67.794],
[ 67.921, 67.822, 67.677, 67.366, 57.146]]],
[[[ 16.121, 16.406, 16.678, 16.933, 17.171],
[ 16.372, 16.626, 16.862, 17.078, 17.271],
[ 16.921, 17.13 , 17.314, 17.469, 17.585],
[ 17.251, 17.395, 17.496, 17.527, 17.366],
[ 17.249, 17.246, 7.126, 6.852, 5.447]],
[[ 68.749, 68.846, 68.932, 69.008, 69.074],
[ 68.692, 68.768, 68.832, 68.884, 68.925],
[ 68.85 , 68.898, 68.934, 68.957, 68.965],
[ 68.939, 68.954, 68.961, 68.941, 68.906],
[ 68.901, 68.87 , 68.831, 68.744, 68.617]]]],
[[[[ 12.947, 13.074, 13.112, 3.034, 2.8 ],
[ 12.693, 2.693, 2.087, 0.35 , -2.518],
[ 1.618, -0.496, -3.741, -8.117, -13.624],
[ -5.192, -9.944, -15.827, -22.84 , -30.984],
[-18.306, -25.693, -34.212, -43.861, -54.64 ]],
[[ 65.941, 65.962, 65.961, 65.932, 65.868],
[ 65.688, 65.652, 65.575, 65.435, 65.141],
[ 65.537, 65.364, 55.018, 54.691, 53.236],
[ 55.031, 54.324, 52.497, 49.536, 45.456],
[ 51.579, 48.239, 43.787, 38.195, 31.487]]],
[[[ 13.954, 14.162, 14.337, 14.468, 14.529],
[ 13.994, 14.11 , 14.139, 4.049, 3.806],
[ 14.093, 4.079, 3.459, 1.708, -1.174],
[ 2.882, 0.755, -2.502, -6.891, -12.41 ],
[ -4.106, -8.869, -14.762, -21.786, -29.941]],
[[ 66.789, 66.84 , 66.876, 66.893, 66.891],
[ 66.617, 66.631, 66.623, 66.588, 66.519],
[ 66.614, 66.569, 66.484, 66.339, 66.039],
[ 66.441, 66.259, 55.913, 55.577, 54.122],
[ 55.87 , 55.155, 53.328, 50.361, 46.278]]],
[[[ 14.847, 15.096, 15.323, 15.525, 15.698],
[ 15.001, 15.198, 15.363, 15.484, 15.535],
[ 15.394, 15.496, 15.51 , 5.407, 5.15 ],
[ 15.356, 5.33 , 4.697, 2.934, 0.04 ],
[ 3.968, 1.831, -1.438, -5.837, -11.366]],
[[ 67.582, 67.654, 67.714, 67.761, 67.792],
[ 67.465, 67.509, 67.538, 67.549, 67.542],
[ 67.543, 67.548, 67.532, 67.492, 67.417],
[ 67.518, 67.463, 67.379, 67.224, 66.925],
[ 67.28 , 67.09 , 56.744, 56.402, 54.944]]],
[[[ 15.672, 15.946, 16.204, 16.444, 16.665],
[ 15.894, 16.132, 16.349, 16.541, 16.703],
[ 16.4 , 16.584, 16.734, 16.842, 16.879],
[ 16.657, 16.746, 16.749, 6.633, 6.364],
[ 16.443, 6.405, 5.762, 3.988, 1.083]],
[[ 68.334, 68.423, 68.501, 68.568, 68.623],
[ 68.258, 68.324, 68.377, 68.417, 68.443],
[ 68.391, 68.426, 68.447, 68.453, 68.439],
[ 68.447, 68.443, 68.427, 68.378, 68.302],
[ 68.357, 68.294, 68.209, 68.049, 67.747]]],
[[[ 16.448, 16.742, 17.021, 17.286, 17.535],
[ 16.719, 16.983, 17.23 , 17.46 , 17.67 ],
[ 17.294, 17.517, 17.72 , 17.899, 18.048],
[ 17.664, 17.835, 17.973, 18.068, 18.093],
[ 17.744, 17.822, 17.813, 7.687, 7.408]],
[[ 69.055, 69.156, 69.248, 69.331, 69.403],
[ 69.01 , 69.092, 69.163, 69.224, 69.273],
[ 69.184, 69.241, 69.286, 69.32 , 69.341],
[ 69.295, 69.321, 69.341, 69.339, 69.325],
[ 69.286, 69.274, 69.257, 69.203, 69.125]]]],
[[[[ 13.398, 13.568, 13.688, 13.721, 3.636],
[ 13.32 , 13.303, 3.298, 2.685, 0.942],
[ 3.204, 2.215, 0.095, -3.156, -7.538],
[ -0.982, -4.609, -9.366, -15.255, -22.274],
[-11.47 , -17.733, -25.126, -33.65 , -43.305]],
[[ 66.312, 66.348, 66.364, 66.359, 66.327],
[ 66.099, 66.088, 66.048, 65.967, 65.825],
[ 66.025, 65.928, 65.752, 55.405, 55.075],
[ 65.656, 55.413, 54.711, 52.875, 49.92 ],
[ 54.168, 51.953, 48.626, 44.159, 38.576]]],
[[[ 14.347, 14.575, 14.776, 14.945, 15.07 ],
[ 14.445, 14.605, 14.714, 14.737, 4.642],
[ 14.72 , 14.689, 4.669, 4.043, 2.286],
[ 4.468, 3.466, 1.333, -1.93 , -6.324],
[ 0.104, -3.533, -8.302, -14.201, -21.23 ]],
[[ 67.134, 67.195, 67.243, 67.274, 67.288],
[ 66.988, 67.017, 67.027, 67.015, 66.978],
[ 67.025, 67.005, 66.956, 66.871, 66.722],
[ 66.929, 66.822, 66.646, 56.291, 55.961],
[ 66.496, 56.244, 55.542, 53.7 , 50.742]]],
[[[ 15.208, 15.468, 15.71 , 15.931, 16.128],
[ 15.394, 15.611, 15.802, 15.961, 16.076],
[ 15.844, 15.99 , 16.086, 16.095, 5.986],
[ 15.983, 15.94 , 5.908, 5.269, 3.5 ],
[ 5.554, 4.541, 2.398, -0.876, -5.281]],
[[ 67.909, 67.988, 68.057, 68.113, 68.155],
[ 67.81 , 67.865, 67.905, 67.93 , 67.939],
[ 67.915, 67.934, 67.936, 67.919, 67.875],
[ 67.929, 67.9 , 67.851, 67.756, 67.608],
[ 67.768, 67.653, 67.477, 57.116, 56.783]]],
[[[ 16.01 , 16.293, 16.561, 16.813, 17.047],
[ 16.255, 16.505, 16.736, 16.947, 17.133],
[ 16.794, 16.997, 17.174, 17.319, 17.42 ],
[ 17.108, 17.241, 17.324, 17.321, 7.2 ],
[ 17.07 , 17.015, 6.972, 6.323, 4.544]],
[[ 68.647, 68.741, 68.825, 68.899, 68.962],
[ 68.585, 68.658, 68.719, 68.769, 68.806],
[ 68.737, 68.782, 68.814, 68.834, 68.836],
[ 68.818, 68.829, 68.83 , 68.804, 68.761],
[ 68.768, 68.731, 68.682, 68.581, 68.431]]],
[[[ 16.769, 17.069, 17.356, 17.63 , 17.888],
[ 17.057, 17.329, 17.587, 17.829, 18.053],
[ 17.654, 17.89 , 18.108, 18.305, 18.477],
[ 18.057, 18.248, 18.412, 18.545, 18.634],
[ 18.194, 18.316, 18.389, 18.375, 8.243]],
[[ 69.355, 69.461, 69.559, 69.647, 69.725],
[ 69.323, 69.41 , 69.488, 69.555, 69.613],
[ 69.511, 69.575, 69.628, 69.672, 69.704],
[ 69.64 , 69.677, 69.708, 69.719, 69.722],
[ 69.658, 69.66 , 69.661, 69.63 , 69.584]]]]])

Categories

Resources