Python - Fit gaussian to noisy data with lmfit - python

I'm trying to fit a gaussian to this data
x = [4170.177259096838, 4170.377258006199, 4170.577256915561, 4170.777255824922, 4170.977254734283, 4171.177253643645, 4171.377252553006, 4171.577251462368, 4171.777250371729, 4171.977249281091, 4172.177248190453, 4172.377247099814, 4172.577246009175, 4172.777244918537, 4172.977243827898, 4173.17724273726, 4173.377241646621, 4173.577240555983, 4173.777239465344, 4173.977238374706, 4174.177237284067, 4174.377236193429, 4174.57723510279, 4174.777234012152, 4174.977232921513, 4175.177231830875, 4175.377230740236, 4175.577229649598, 4175.777228558959, 4175.977227468321, 4176.177226377682, 4176.377225287044, 4176.577224196405, 4176.777223105767, 4176.977222015128, 4177.17722092449, 4177.377219833851, 4177.577218743213, 4177.777217652574, 4177.977216561936, 4178.177215471297, 4178.377214380659, 4178.57721329002, 4178.777212199382, 4178.977211108743, 4179.177210018105, 4179.377208927466, 4179.577207836828, 4179.777206746189, 4179.977205655551, 4180.177204564912, 4180.377203474274, 4180.577202383635, 4180.777201292997, 4180.977200202357, 4181.17719911172, 4181.377198021081, 4181.577196930443, 4181.777195839804, 4181.977194749166, 4182.177193658527, 4182.377192567888, 4182.5771914772495, 4182.777190386612, 4182.9771892959725, 4183.177188205335, 4183.377187114696, 4183.577186024058, 4183.777184933419, 4183.9771838427805, 4184.177182752143, 4184.3771816615035, 4184.5771805708655, 4184.777179480228, 4184.977178389589, 4185.1771772989505, 4185.3771762083115, 4185.5771751176735, 4185.777174027035, 4185.977172936397, 4186.1771718457585, 4186.3771707551205, 4186.5771696644815, 4186.777168573843, 4186.977167483204, 4187.177166392566, 4187.377165301927, 4187.577164211289, 4187.77716312065, 4187.977162030013, 4188.177160939374, 4188.377159848735, 4188.577158758096, 4188.777157667458, 4188.977156576819, 4189.177155486181, 4189.377154395542, 4189.577153304904, 4189.777152214265, 4189.977151123627, 4190.177150032989, 4190.37714894235, 4190.577147851711, 4190.777146761073, 4190.977145670434, 4191.177144579796, 4191.377143489157, 4191.577142398519, 4191.77714130788, 4191.977140217242, 4192.177139126603, 4192.377138035965, 4192.577136945326, 4192.777135854688, 4192.977134764049, 4193.177133673411, 4193.377132582772, 4193.577131492134, 4193.777130401495, 4193.977129310857, 4194.177128220218, 4194.377127129579, 4194.577126038941, 4194.777124948303, 4194.977123857664, 4195.177122767026, 4195.377121676387, 4195.577120585749, 4195.77711949511, 4195.977118404472, 4196.177117313833, 4196.377116223195, 4196.577115132556, 4196.777114041918, 4196.977112951279, 4197.177111860641, 4197.377110770002, 4197.577109679364, 4197.777108588725, 4197.977107498087, 4198.177106407448, 4198.37710531681, 4198.577104226171, 4198.777103135533, 4198.977102044893, 4199.177100954256, 4199.377099863617, 4199.577098772979, 4199.77709768234, 4199.977096591702, 4200.177095501063, 4200.377094410424, 4200.5770933197855, 4200.777092229148, 4200.9770911385085, 4201.177090047871, 4201.377088957232, 4201.577087866594, 4201.7770867759555, 4201.9770856853165, 4202.177084594679, 4202.377083504041, 4202.5770824134015, 4202.777081322764, 4202.977080232125, 4203.1770791414865, 4203.377078050848, 4203.5770769602095, 4203.777075869571, 4203.9770747789335, 4204.1770736882945, 4204.3770725976565, 4204.5770715070175, 4204.777070416379, 4204.97706932574, 4205.177068235102, 4205.377067144463, 4205.577066053825, 4205.777064963186, 4205.977063872549, 4206.17706278191, 4206.377061691271, 4206.577060600632, 4206.777059509994, 4206.977058419355, 4207.177057328717, 4207.377056238078, 4207.57705514744, 4207.777054056801, 4207.977052966163, 4208.177051875525, 4208.377050784886, 4208.577049694247, 4208.777048603609, 4208.977047512971, 4209.177046422332, 4209.377045331693, 4209.577044241055, 4209.777043150416, 4209.977042059778, 4210.177040969139, 4210.377039878501, 4210.577038787862, 4210.777037697224, 4210.977036606585, 4211.177035515947, 4211.377034425308, 4211.57703333467, 4211.777032244031, 4211.977031153393, 4212.177030062754, 4212.377028972116, 4212.577027881477, 4212.777026790839, 4212.9770257002, 4213.177024609562, 4213.377023518923, 4213.577022428285, 4213.777021337646, 4213.977020247008, 4214.177019156369, 4214.377018065731, 4214.577016975092, 4214.777015884454, 4214.977014793814, 4215.177013703177, 4215.377012612538, 4215.5770115219, 4215.777010431261, 4215.977009340623, 4216.177008249984, 4216.377007159345, 4216.577006068707, 4216.777004978069, 4216.977003887429, 4217.177002796792, 4217.377001706153, 4217.577000615515, 4217.776999524876, 4217.976998434238, 4218.176997343599, 4218.37699625296, 4218.5769951623215, 4218.776994071684, 4218.9769929810445, 4219.176991890407, 4219.376990799769, 4219.5769897091295, 4219.7769886184915, 4219.9769875278525, 4220.176986437215, 4220.376985346577, 4220.5769842559375, 4220.7769831653, 4220.9769820746615, 4221.1769809840225, 4221.376979893384, 4221.5769788027455, 4221.776977712107, 4221.9769766214695, 4222.17697553083, 4222.3769744401925, 4222.576973349554, 4222.776972258915, 4222.976971168276, 4223.176970077638, 4223.376968986999, 4223.576967896361, 4223.776966805722, 4223.976965715085, 4224.176964624445, 4224.376963533807, 4224.576962443168, 4224.77696135253, 4224.976960261891, 4225.176959171253, 4225.376958080614, 4225.576956989976, 4225.776955899337, 4225.976954808699, 4226.17695371806, 4226.376952627422, 4226.576951536783, 4226.776950446145, 4226.976949355506, 4227.176948264868, 4227.376947174229, 4227.576946083591, 4227.776944992952, 4227.976943902314, 4228.176942811675, 4228.376941721037, 4228.576940630398, 4228.776939539759, 4228.976938449121, 4229.176937358483, 4229.376936267844, 4229.576935177205, 4229.776934086567, 4229.976932995929]
y = [1.0063203573226929, 0.9789621233940125, 0.9998905658721924, 0.9947001934051514, 1.023498773574829, 1.0001505613327026, 0.9659610986709596, 1.0141736268997192, 0.9910064339637756, 0.961456060409546, 0.9808377623558044, 0.9717124700546264, 1.0020164251327517, 0.9276596307754515, 1.0044682025909424, 0.9898168444633484, 1.0139398574829102, 1.016809344291687, 0.9985541105270386, 1.0404949188232422, 1.0104306936264038, 1.0101377964019775, 1.0228283405303955, 1.014385461807251, 0.9949180483818054, 0.9398794174194336, 1.0047662258148191, 1.0185784101486206, 0.9942153096199036, 1.0496678352355957, 0.929694890975952, 1.0259612798690796, 1.0174839496612549, 0.9557819366455078, 1.009858012199402, 1.0258405208587646, 1.0318727493286133, 0.9781686067581176, 0.9566296339035034, 0.9626089930534364, 1.040783166885376, 0.9469046592712402, 0.9732370972633362, 1.0082777738571167, 1.0438332557678225, 1.067220687866211, 1.0809389352798462, 1.0122139453887942, 0.995375156402588, 1.025692343711853, 1.0900095701217651, 1.0033329725265503, 0.9947514533996582, 0.9366152882575988, 1.0340673923492432, 1.0574461221694946, 0.9984419345855712, 0.9406535029411316, 1.0367794036865234, 1.0252420902252195, 0.9390246868133544, 1.057265043258667, 1.0652446746826172, 1.0001699924468994, 1.0561981201171875, 0.9452269077301024, 1.0119216442108154, 1.000349760055542, 0.9879921674728394, 0.9834288954734802, 0.976799249649048, 0.9408118724822998, 1.0574927330017092, 1.0466219186782837, 0.97526878118515, 0.9811903238296508, 0.9985196590423584, 0.9862677454948424, 0.964194357395172, 1.0116554498672483, 0.9122620820999146, 0.9972245693206788, 0.9447768926620485, 1.0320085287094116, 1.0034307241439822, 0.965615689754486, 1.0228805541992188, 0.9555847048759459, 1.00389301776886, 0.9856386780738832, 0.9894683361053468, 1.0711736679077148, 0.990192711353302, 1.016653060913086, 1.0263935327529907, 0.9454292058944702, 0.9236765503883362, 0.9511216878890992, 0.9773555994033812, 0.9222095608711244, 0.9599731564521791, 1.0067923069000244, 1.0022263526916504, 0.9766445159912108, 1.0026237964630127, 1.010635256767273, 0.9901092052459716, 0.9869268536567688, 1.0354781150817869, 0.9797658920288086, 0.9543874263763428, 0.9747632145881652, 0.9942164421081544, 1.008299469947815, 0.9546594023704528, 1.0318409204483032, 1.0383642911911009, 1.0332415103912354, 1.0234425067901611, 1.0186198949813845, 1.0179851055145264, 1.0760197639465332, 0.9456835985183716, 1.0079874992370603, 0.9838529229164124, 0.8951097726821899, 0.9530791640281676, 0.9732348322868348, 0.9659185409545898, 1.0089071989059448, 0.963958203792572, 1.0035384893417358, 0.9776629805564879, 0.964256465435028, 0.9468261599540709, 1.0145124197006226, 1.0375784635543823, 0.992344319820404, 0.9584225416183472, 1.0427420139312744, 0.9997742176055908, 0.9584409594535828, 1.0051720142364502, 0.9606672525405884, 0.9797580242156982, 0.9900978207588196, 0.943138301372528, 0.9368865489959716, 0.9272330403327942, 0.9655094146728516, 0.9074565172195436, 0.97406405210495, 0.8742623329162598, 0.9219859838485718, 0.9126378297805786, 0.8354664444923401, 0.9138413667678832, 0.9268960952758788, 0.8841327428817749, 0.9733222126960754, 0.8825243711471558, 0.9243521094322203, 0.9403685927391052, 0.8782523870468141, 0.9003781080245972, 0.8850597143173218, 0.9231640696525574, 0.931676983833313, 0.8601804971694946, 0.8312444686889648, 0.9361259937286376, 0.9289224147796632, 0.8919285535812378, 0.8838070034980774, 0.9187015891075134, 0.9484543204307556, 0.8572731018066406, 0.8458079099655151, 0.92625629901886, 0.9748064875602722, 0.9674397706985474, 0.9326313138008118, 0.9933922290802002, 1.0025516748428345, 0.9956294894218444, 0.8995802998542786, 0.9598655700683594, 1.0185420513153076, 0.9935647249221802, 0.9689980745315552, 0.9919951558113098, 1.0028616189956665, 1.0252325534820557, 1.0221387147903442, 1.009528875350952, 1.0272767543792725, 0.9865442514419556, 0.9821861386299132, 0.95982563495636, 0.9557262063026428, 0.9864148497581482, 1.0166704654693604, 1.0599093437194824, 1.0000406503677368, 0.9622656106948853, 1.0044697523117063, 1.0404677391052246, 1.0023702383041382, 0.9803014993667604, 1.0197279453277588, 0.9902933835983276, 0.998839259147644, 0.966608464717865, 1.0340296030044556, 0.9632315635681152, 0.9758646488189696, 0.9757773876190186, 0.9818265438079834, 1.0110433101654053, 1.0131133794784546, 1.0256367921829224, 1.0690158605575562, 0.9764784574508668, 0.9947471022605896, 0.9979920387268066, 0.9850373864173888, 0.9165602922439576, 0.9634824395179749, 1.052489995956421, 0.9370544552803041, 1.0348092317581177, 1.0473220348358154, 0.9566289782524108, 0.9579214453697203, 0.972671627998352, 0.9536439180374146, 0.9755330085754396, 0.9753606915473938, 0.9924075603485109, 0.9893715381622314, 0.9780346751213074, 1.0207450389862058, 0.9914312362670898, 0.9940584301948548, 1.0417673587799072, 0.977041721343994, 1.0113568305969238, 1.030456304550171, 1.0540854930877686, 0.9963837265968324, 1.002269268035889, 0.9528346061706544, 0.9132148027420044, 1.0386162996292114, 0.9384365677833556, 1.0175614356994631, 1.0362330675125122, 0.9502999186515808, 1.0015273094177246, 0.987025022506714, 0.9869014024734496, 0.9577396512031556, 0.9633736610412598, 1.0747206211090088, 1.1858476400375366, 0.9917531609535216, 1.0963184833526611, 0.9528627991676332, 0.9999563694000244, 1.0115929841995241, 1.0094747543334959, 0.9977090358734132, 0.9800350666046144, 1.0336441993713381, 1.0021690130233765, 0.9629588127136229, 0.9191407561302184, 0.9930744767189026, 1.0318671464920044, 0.975939691066742, 0.9548277258872986, 1.0113637447357178, 0.9920935630798341, 0.9777255654335022, 0.9780721664428712, 0.9507009387016296, 0.9387223720550536, 1.0220414400100708, 1.019809007644653, 0.9822806715965272, 1.0380866527557373, 1.0477066040039062, 1.0222935676574707, 1.0258997678756714, 1.027082443237305, 1.0487046241760254, 0.9292799830436708, 0.999277114868164, 1.044923186302185, 1.0261610746383667]
e = [3.865531107294373e-05, 3.866014958475717e-05, 3.866496626869776e-05, 3.8669764762744314e-05, 3.867453415296041e-05, 3.8679270801367245e-05, 3.8683978345943615e-05, 3.868864223477431e-05, 3.8693269743816934e-05, 3.8697849959135056e-05, 3.870237924274989e-05, 3.8706857594661415e-05, 3.871127773891203e-05, 3.871564331348054e-05, 3.871994340443053e-05, 3.872417437378317e-05, 3.8728336221538484e-05, 3.8732425309717655e-05, 3.8736438000341884e-05, 3.874037065543234e-05, 3.8744219637010247e-05, 3.874798130709678e-05, 3.8751652027713135e-05, 3.875523543683812e-05, 3.8758716982556514e-05, 3.876210394082591e-05, 3.8765389035688706e-05, 3.8768568629166105e-05, 3.87716390832793e-05, 3.877460039802827e-05, 3.877745257341303e-05, 3.878018469549716e-05, 3.8782800402259454e-05, 3.878529605572112e-05, 3.8787664379924536e-05, 3.878991265082732e-05, 3.8792029954493046e-05, 3.8794016290921725e-05, 3.879586802213453e-05, 3.8797588786110275e-05, 3.879916766891256e-05, 3.8800608308520175e-05, 3.88019070669543e-05, 3.880306030623615e-05, 3.880407166434452e-05, 3.8804930227342986e-05, 3.8805643271189176e-05, 3.880619988194667e-05, 3.880660733557306e-05, 3.8806854718131945e-05, 3.8806945667602115e-05, 3.88068801839836e-05, 3.880665099131875e-05, 3.8806265365565196e-05, 3.880571239278652e-05, 3.880499571096152e-05, 3.880410804413259e-05, 3.880305666825734e-05, 3.8801834307378165e-05, 3.8800444599473856e-05, 3.87988802685868e-05, 3.879714495269582e-05, 3.8795235013822094e-05, 3.879315045196563e-05, 3.879089126712642e-05, 3.8788453821325675e-05, 3.8785838114563376e-05, 3.878304414683953e-05, 3.8780071918154135e-05, 3.877691779052839e-05, 3.877357812598348e-05, 3.877006747643463e-05, 3.8766367651987814e-05, 3.876248956657946e-05, 3.875842594425194e-05, 3.8754180422984064e-05, 3.8749749364797026e-05, 3.874513640766963e-05, 3.8740334275644266e-05, 3.873535024467856e-05, 3.8730184314772493e-05, 3.872482920996845e-05, 3.871929220622405e-05, 3.871356602758169e-05, 3.8707657949998975e-05, 3.8701564335497096e-05, 3.8695285184076056e-05, 3.868882413371466e-05, 3.86821739084553e-05, 3.867534178425558e-05, 3.86683241231367e-05, 3.8661124563077465e-05, 3.8653739466099075e-05, 3.8646172470180325e-05, 3.863842357532121e-05, 3.863049278152175e-05, 3.862238008878194e-05, 3.861408913508057e-05, 3.860561628243886e-05, 3.85969651688356e-05, 3.8588135794270784e-05, 3.8579128158744425e-05, 3.856993862427771e-05, 3.856058174278587e-05, 3.855104296235368e-05, 3.854133319691755e-05, 3.853144880849868e-05, 3.852139343507588e-05, 3.851116707664913e-05, 3.8500766095239676e-05, 3.8490205042762675e-05, 3.847947300528176e-05, 3.846857362077572e-05, 3.8457506889244535e-05, 3.844628372462465e-05, 3.843489321297966e-05, 3.8423342630267136e-05, 3.841163197648712e-05, 3.8399768527597196e-05, 3.8387741369660944e-05, 3.8375561416614794e-05, 3.836322866845876e-05, 3.835074676317163e-05, 3.8338112062774605e-05, 3.832533184322529e-05, 3.831240246654488e-05, 3.829932757071219e-05, 3.828611079370603e-05, 3.827275213552639e-05, 3.825925523415208e-05, 3.8245623727561906e-05, 3.8231850339798264e-05, 3.821794962277636e-05, 3.820391066255979e-05, 3.818974801106378e-05, 3.817545439233072e-05, 3.816103708231821e-05, 3.814649244304746e-05, 3.8131831388454884e-05, 3.811704664258287e-05, 3.810214911936782e-05, 3.8087135180830956e-05, 3.807200482697226e-05, 3.805676897172816e-05, 3.804142033914104e-05, 3.802596984314732e-05, 3.801041020778939e-05, 3.799475598498248e-05, 3.7978999898768955e-05, 3.7963145587127656e-05, 3.794720396399498e-05, 3.793116411543451e-05, 3.791503331740387e-05, 3.789882612181827e-05, 3.78825279767625e-05, 3.786614615819417e-05, 3.784968066611327e-05, 3.783314969041385e-05, 3.781653504120186e-05, 3.7799851270392544e-05, 3.7783102015964694e-05, 3.776627636398189e-05, 3.774939614231698e-05, 3.773245043703355e-05, 3.77154428861104e-05, 3.769838440348394e-05, 3.7681271351175376e-05, 3.7664103729184724e-05, 3.764688881346956e-05, 3.762963024200872e-05, 3.7612328014802194e-05, 3.759498213184997e-05, 3.7577603507088504e-05, 3.756018850253895e-05, 3.754274075618014e-05, 3.752526026801206e-05, 3.7507754313992336e-05, 3.749022653209977e-05, 3.747267328435555e-05, 3.7455109122674905e-05, 3.7437519495142624e-05, 3.741991895367392e-05, 3.740230749826878e-05, 3.7384688766906045e-05, 3.736707003554329e-05, 3.7349444028222933e-05, 3.733181438292377e-05, 3.731419565156102e-05, 3.729656964424066e-05, 3.727895818883553e-05, 3.726136128534563e-05, 3.724377893377096e-05, 3.72262074961327e-05, 3.7208654248388484e-05, 3.719112282851711e-05, 3.717361323651858e-05, 3.71561327483505e-05, 3.713868500199169e-05, 3.712127363542095e-05, 3.710389137268066e-05, 3.708654185174965e-05, 3.7069235986564315e-05, 3.7051977415103465e-05, 3.70347588614095e-05, 3.7017580325482406e-05, 3.7000467273173854e-05, 3.698339060065337e-05, 3.6966375773772604e-05, 3.694941915455274e-05, 3.6932531656930216e-05, 3.69156914530322e-05, 3.68989203707315e-05, 3.688222204800695e-05, 3.686558557092212e-05, 3.684902549139224e-05, 3.68325381714385e-05, 3.681613088701852e-05, 3.679980000015348e-05, 3.67835491488222e-05, 3.6767385608982295e-05, 3.675130210467614e-05, 3.6735313187818974e-05, 3.6719411582453176e-05, 3.670359728857875e-05, 3.668788122013211e-05, 3.6672267015092075e-05, 3.6656743759522215e-05, 3.6641329643316574e-05, 3.6626006476581103e-05, 3.661079972516745e-05, 3.65956875612028e-05, 3.658069908851757e-05, 3.656581247923896e-05, 3.655103500932455e-05, 3.653637395473197e-05, 3.652183659141883e-05, 3.650740836746991e-05, 3.649310383480042e-05, 3.647892663138919e-05, 3.6464858567342155e-05, 3.645092147053219e-05, 3.6437118978938095e-05, 3.642343290266581e-05, 3.64098850695882e-05, 3.6396453651832423e-05, 3.638317502918653e-05, 3.637001282186248e-05, 3.635699613369071e-05, 3.6344117688713595e-05, 3.633138112490997e-05, 3.631877552834339e-05, 3.6306315450929105e-05, 3.62940008926671e-05, 3.628181730164215e-05, 3.626977922976948e-05, 3.6257904866943136e-05, 3.6246172385290265e-05, 3.623457087087445e-05, 3.622312215156853e-05, 3.6211833503330126e-05, 3.620068309828639e-05, 3.6189692764310166e-05, 3.617885158746503e-05, 3.616817775764503e-05, 3.615764217101969e-05, 3.6147263017483056e-05, 3.613704757299274e-05, 3.6126984923612326e-05, 3.611708234529942e-05, 3.6107321648159996e-05, 3.609772466006689e-05, 3.60882913810201e-05, 3.6079025448998436e-05, 3.606990867410786e-05, 3.606095197028481e-05, 3.605215169955045e-05, 3.6043515137862414e-05, 3.603503864724189e-05, 3.6026725865667686e-05, 3.6018584069097415e-05, 3.601058415370062e-05, 3.600275158532895e-05, 3.599507545004599e-05, 3.598758848966099e-05, 3.598022158257663e-05, 3.597304748836905e-05, 3.5966018913313746e-05, 3.595916132326238e-05, 3.59524528903421e-05, 3.594591180444695e-05, 3.593953078961931e-05, 3.5933309845859185e-05, 3.592724533518776e-05, 3.5921337257605046e-05, 3.591560744098388e-05, 3.591001950553619e-05, 3.590458072721958e-05, 3.5899327485822134e-05, 3.589421248761937e-05, 3.588925756048411e-05]
I have tried the examples given in
Python gaussian fit on simulated gaussian noisy data, and Fitting (a gaussian) with Scipy vs. ROOT et al without luck.
I'm looking to do this with lmfit because it has several advantages. This attempt was done following lmfit documentation, here is the code and plot
from numpy import sqrt, pi, exp
from lmfit import Model
import matplotlib.pyplot as plt
def gaussian(x, amp, cen, wid):
"1-d gaussian: gaussian(x, amp, cen, wid)"
return (amp/(sqrt(2*pi)*wid)) * exp(-(x-cen)**2 /(2*wid**2))
gmodel = Model(gaussian)
result = gmodel.fit(y, x=x, amp=-0.5, cen=4200, wid=2)
plt.plot(x, y,'ro', ms=6)
plt.plot(x, result.init_fit, 'g--', lw=2)
plt.plot(x, result.best_fit, 'b-', lw=2)
So in green is the fit with the initial parameters, and in blue is what should be the best fit, and as you can see I get a gaussian shifted from my points and a straight line.
Also, the third row of my data are the errors in the y axis. How can I take the errors into account when fitting the data with lmfit?

The easiest way to do this is probably to make use of the built-in models and combine the GaussianModel and ConstantModel. You can use the errors in the fitting using the keyword 'weights' as described here.
You'll probably want to do something like this:
import numpy as np
from lmfit import Model
from lmfit.models import GaussianModel, ConstantModel
import matplotlib.pyplot as plt
xval = np.array(x)
yval = np.array(y)
err = np.array(e)
peak = GaussianModel()
offset = ConstantModel()
model = peak + offset
pars = offset.make_params(c=np.median(y))
pars += peak.guess(yval, x=xval, amplitude=-0.5)
result = model.fit(yval, pars, x=xval, weights=1/err)
print(result.fit_report())
plt.plot(xval, yval, 'ro', ms=6)
plt.plot(xval, result.best_fit, 'b--')

Related

Plot wont display in python

I'm using the following bit of code to plot two arrays of the same length -
import matplotlib.pyplot as plt
from scipy import stats
from scipy.stats import linregress
G_mag_values = [11.436, 11.341, 11.822, 11.646, 11.924, 12.057, 11.884, 11.805, 12.347, 12.662, 12.362, 12.555, 12.794, 12.819, 12.945, 12.733, 12.789, 12.878, 12.963, 13.094, 13.031, 12.962, 13.018, 12.906, 13.016, 13.088, 13.04, 13.035, 13.094, 13.032, 13.216, 13.062, 13.083, 13.126, 13.101, 13.089, 13.073, 13.182, 13.116, 13.145, 13.235, 13.161, 13.154, 13.383, 13.315, 13.429, 13.461, 13.287, 13.494, 13.459, 13.478, 13.534, 13.536, 13.536, 13.483, 13.544, 13.564, 13.544, 13.608, 13.655, 13.665, 13.668, 13.697, 13.649, 13.742, 13.756, 13.671, 13.701, 13.788, 13.723, 13.697, 13.713, 13.708, 13.765, 13.847, 13.992, 13.706, 13.79, 13.783, 13.844, 13.945, 13.928, 13.936, 13.956, 13.898, 14.059, 13.945, 14.039, 13.999, 14.087, 14.05, 14.083, 14.136, 14.124, 14.189, 14.149, 14.182, 14.281, 14.177, 14.297, 14.268, 14.454, 14.295]
G_cal_values = [-8.553610547563503, -8.085853602272588, -7.98491731861732, -7.852060056526794, -7.550944423333883, -7.569289687795749, -7.547088847268468, -7.544445036682168, -7.480698829329534, -7.184407435874912, -7.382606680295108, -7.2231275160942054, -7.093385973539046, -7.0473097125206685, -6.775012624594927, -6.814667514017907, -6.719898703328146, -6.741699011193633, -6.483121454948265, -6.320533066162438, -6.216044707275117, -6.037365656714626, -6.058593802250578, -6.0203190345840865, -6.036176430437363, -5.817887798572345, -5.838439347527171, -5.864922270102037, -5.755152671040021, -5.7709095683554725, -5.729226240967218, -5.606533007538604, -5.5817719334376905, -5.578993138005095, -5.62616747769538, -5.648413591916503, -5.611676700504294, -5.557722166623976, -5.5584623064502825, -5.425878164810264, -5.582204334985258, -5.529395790688368, -5.560750195967957, -5.433224654816512, -5.4751198268734385, -5.4592032005417215, -5.514591770369543, -5.580278698184566, -5.520695348050357, -5.501615700174841, -5.578645415877418, -5.692203332547151, -5.569497861450115, -5.335209902666812, -5.470963349023013, -5.44265375533589, -5.538541653702721, -5.355732832969871, -5.318164588926453, -5.376154615199398, -5.372133774215322, -5.361689907587619, -5.37608154921679, -5.412657572197508, -5.454613589602333, -5.339384591430104, -5.367511403407703, -5.258069473329993, -5.347580031901464, -4.9905279263992, -5.445096880253789, -5.192885553786512, -5.2983352094538505, -5.3930571447307365, -5.057910469318639, -5.32585105504838, -5.238649399637653, -5.122431894813153, -5.084559296025157, -5.139042420486851, -4.9919273140342915, -5.103619454431522, -5.017946144298159, -4.98136832081144, -5.084355565584671, -5.048634391386494, -4.887073481359435, -5.074683293771264, -5.050703776716202, -5.104772289705188, -4.9597601680524415, -4.971489935988787, -4.895283369236485, -4.9859511256778974, -4.840717539517584, -4.815665714699117, -4.937635861879118, -4.887219819695687, -4.813729758415283, -4.82667464608015, -4.865176481168528, -4.885105289124561, -4.887072278243732]
fig, ax = plt.subplots()
plt.scatter(G_mag_values,G_cal_values)
ax.minorticks_on()
ax.grid(which='major', linestyle='-', linewidth='0.5')
ax.grid(which='minor', linestyle='-', linewidth='0.5')
fig.set_size_inches(10,7)
best_fit_Y_G = []
slope_G, intercept_G, r_value_G, p_value_G, std_err_G = stats.linregress(G_mag_values,G_cal_values)
for value_G in G_mag_values:
best_fit_Y_G.append(intercept_G + slope_G*value_G)
plt.plot(G_mag_values, best_fit_Y_G, 'r', label = 'Best fit')
plt.title('M67 Calibration graph for G filter')
plt.xlabel('Real magnitude')
plt.ylabel('Measured magnitude')
plt.show()
curve_G = np.polyfit(G_mag_values,G_cal_values,1)
print('G filter polyfit line: slope {}; intercept = {}'.format(curve_G[0],curve_G[1]))
print('G filter linregress: slope {}; intercept = {}'.format(slope_G,intercept_G))
When I run this, it prints the values for slope and intercept from the best_fit_Y_G and curve_G, but it doesnt display the plot at all. Where am I going wrong?
I copy/pasted and run your code.
curve_G = np.polyfit(G_mag_values,G_cal_values,1)
That line gives me error. Then I imported numpy as np and problem solved.
output figure

Normalize data for colormap

I am plotting color for geopandas shape with 2 array data:
Here's my first array.
newI =
array([ -467, -415, -414, -1001, -246, -147, -523, -327, -583,
-541, -290, -415, -453, -505, -791, -812, -672, -558,
-559, -1055, -327, -703, -419, -499, -273, -574, -802,
-450, -743, -221, -1282, -704, -352, -734, -430, -353,
-515, -1121, -664, -586, -171, -881, -402, -1024, -543,
-527, -384, -775, -931, -1380, -1662, -1069, -952, -435,
-1051, -921, -1211, -794, -547, -313, -511, -993, -430,
-262, -255, -675, -793, -1053, -702, -967, -1016, -230,
-405, -869, -689, -935, -190, -1473, -883, -1233, -240,
-607, -339, -1130, -909, -836, -667, -457, -847, -538,
-606, -457, -800, -322, -1339, -691, -627, -689, -365,
-600, -289, -810, -577, -187, -375, -574, -426, -662,
-695, -1003, -40, -1012, -279, -966, -587, -641, -753,
-461, -563, -604, -1013, -625, -506, -416, -1385, -459,
-760, -347, -308, -555, -325, -1588, -566, -533, -843,
-501, -448, -1022, -654, -602, -1201, -814, -754, -361,
-325, -1141, -725, -256, -601, -379, -496, -1099, -1101,
-598, -442, -773, -295, -1292, -558, -1234, -868, -1135,
-251, -1398, -453, -563, -1306, -693, -560, -512, -935,
-1168, -482, -431, -1199, -1249, -1199, -413, -1018, -194,
-375, -932, -1028, -436, -955, -463, -1303, -676, -554,
-601, -875, -661, -791, -443, -89, -879, -606, -577,
-475, -802, -734, -660, -684, -174, -902, -1241, -1320,
-575, -855, -222, -890, -701, -1082, -531, -693, -1008,
-1357, -433, -379, -192, -343, -477, -230, -938, -675,
-798, -259, -398, -778, -484, -817, -453, -564, -536,
-1599, -968, -547, -845, -1592, -256, -1139, -229, -926,
-474, -392, -990, -295, -558, -465, -497, -395, -468,
-310, -507, -1205, -705, -739, -609, -809, -610, -421,
-1057, -2023, -1105, -618, -466, -1291, -616, -620, -571,
-904, -383, -544, -688, -461, -769, -990, -664, -405,
-419, -852, -435, -298, -782, -758, -371, -813, -421,
-594, -259, -284, -215, -452, -430, -936, -994, -981,
-502, -510, -671, -721, -829, -523, -288, -653, -493,
-983, -1205, -722])
and Here's my second array:
array([-2407, -1992, -3400, -4826, -1544, -820, -3120, -1469, -2869,
-3622, -1738, -2122, -2773, -2939, -3558, -3575, -3082, -2494,
-3591, -5022, -1619, -2608, -3371, -3054, -1596, -2538, -3566,
-2035, -3490, -522, -5362, -3055, -1517, -4107, -2039, -2497,
-2302, -5513, -3876, -4303, -831, -4457, -2027, -5083, -2716,
-2284, -1288, -3781, -4707, -6903, -8592, -5763, -4644, -1999,
-4894, -3190, -6263, -3484, -3090, -1899, -2640, -3940, -2919,
-629, -2018, -4228, -4075, -5249, -2794, -4061, -4089, -1500,
-2434, -3867, -3359, -4070, -1472, -7334, -4367, -5422, -1563,
-3092, -1803, -4664, -4096, -3875, -3061, -1181, -4098, -2850,
-4356, -2239, -3102, -1498, -6458, -3495, -2863, -3568, -1752,
-3422, -1768, -3675, -2061, -919, -1452, -2512, -1924, -3668,
-3931, -4348, -284, -6232, -1065, -4261, -2739, -3392, -3962,
-2369, -2508, -3156, -4759, -3012, -3345, -2566, -7910, -2215,
-3581, -1357, -2155, -2643, -1420, -7449, -3023, -2982, -4913,
-2835, -1748, -4679, -2950, -2951, -5515, -4195, -3568, -1746,
-1437, -5429, -3246, -1556, -2635, -1534, -3553, -4451, -5655,
-2616, -2724, -4445, -1642, -6640, -2869, -5211, -5014, -4909,
-1103, -5658, -2096, -2427, -5719, -3152, -2717, -2544, -4226,
-4813, -2319, -2261, -4844, -5383, -5057, -2981, -5448, -1526,
-1749, -3550, -3736, -1893, -5812, -2686, -5923, -3145, -3569,
-2523, -4586, -2931, -4104, -2301, -666, -4402, -3201, -3171,
-2598, -4279, -3765, -3024, -3085, -468, -3732, -5899, -6464,
-3993, -4583, -1126, -4193, -4214, -3902, -2132, -3712, -4879,
-6907, -1524, -1987, -1444, -2086, -3229, -1316, -4331, -3150,
-4449, -1700, -1486, -3650, -2478, -4166, -2618, -3308, -2458,
-7441, -4452, -2438, -4722, -6949, -1712, -4727, -792, -4193,
-1610, -1951, -3965, -1410, -2958, -2167, -2050, -2035, -2152,
-2236, -3235, -5999, -4024, -3111, -3196, -3881, -2647, -2579,
-6387, -9912, -4677, -2983, -1913, -7547, -3166, -2990, -2183,
-3401, -2080, -3056, -2225, -2546, -4421, -3867, -2975, -1552,
-2090, -3871, -1768, -2032, -3564, -3273, -1579, -4338, -1371,
-3600, -1253, -2083, -1439, -2281, -2045, -4406, -4380, -4129,
-2520, -2529, -2108, -3081, -3561, -2601, -843, -3069, -1852,
-5888, -5730, -3386])
The code to plot those array data is as shown below.
area_gpd = gpd.read_file("....shp")
area_gpd['population'] = newI
plt.rcParams.update({'font.size':32})
west,south,east,north = area.unary_union.bounds
fig,ax = plt.subplots(figsize=(40,40))
cmap = LinearSegmentedColormap.from_list('mycmap', [ 'green','white'])
melbourne_gpd.plot(ax=ax, column='population',legend=False,cmap=cmap,zorder=3)
sm = plt.cm.ScalarMappable(cmap=cmap,\
norm=plt.Normalize(vmin=-9912,
vmax=-284))
It keeps normalizing things so the intensity shows now different.
Is there any function to normalize this data?
I want the map to be darker for those with a larger value. Can anyone give me some recommendations?
Thanks so much
I found a nice solution for the question from a guy on stackoverflow:
import scipy as sp
import matplotlib as mpl
import matplotlib.pyplot as plt
class MidpointNormalize(mpl.colors.Normalize):
def __init__(self, vmin, vmax, midpoint=0, clip=False):
self.midpoint = midpoint
mpl.colors.Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
normalized_min = max(0, 1 / 2 * (1 - abs((self.midpoint - self.vmin) / (self.midpoint - self.vmax))))
normalized_max = min(1, 1 / 2 * (1 + abs((self.vmax - self.midpoint) / (self.midpoint - self.vmin))))
normalized_mid = 0.5
x, y = [self.vmin, self.midpoint, self.vmax], [normalized_min, normalized_mid, normalized_max]
return sp.ma.masked_array(sp.interp(value, x, y))
vals = sp.array([[-5., 0], [5, 10]])
vmin = -1225
vmax = 669
norm = MidpointNormalize(vmin=vmin, vmax=vmax, midpoint=0)
It will nicely complete the job for your color map.
Here is the link to the place I found the solution : Colorplot that distinguishes between positive and negative values

Why curve_fit of scipy gives multiple regression lines on gene expression data?

"""
I'm trying to fit an exponential deacy function to a large gene expression dataset. I've spent lots of hours on stackoverflow and I found something that I'd like to try
Curve fit an exponential decay function in Python using given data points
But I'm getting multiple regression lines and I'm not sure why and how to fix that. Also I really would appreciate if you could suggest a better option or function.
Thank you in advance for helping me out.
"""
smallRNA = [1.4847814317391115,
1.233300788320302,
1.3482469618574953,
1.370572550987182,
1.2942438021815492,
1.8362649665550308,
1.9478952510177636,
1.7449833728367992,
1.133732460554052,
1.3172194208200945,
2.2366329568831316,
1.2651420206147634,
2.0615652798644457,
1.5188246709945887,
1.1466238867527543,
1.3408780599606556,
1.4771209497656745,
1.651149617228606,
1.6860351720116376,
1.2506453147790253,
1.4870430365838434,
1.524647241933158,
1.4547220043517506,
1.7128458331422909,
1.2111590790051767,
1.4833400674312536,
1.5011399997244916,
1.3913217014556971,
1.4161650806831194,
1.6373102731988107,
2.3951290051934726,
1.6848115128561434,
2.9188929470100784,
1.9035779757468991,
1.7306658643903157,
1.698255623968752,
1.2847578416257817,
2.2638689793456934,
1.5930676648572581,
1.679254296180541,
1.2719964106792359,
1.2037024877731124,
1.7079879931344195,
1.6544933181202117,
1.0485948565930532,
1.9978932787991883,
2.1584455179465056,
1.5855391880033107,
1.4131624973162846,
1.0491493504262577,
1.5440379870152185,
0.9634572526320548,
1.334006585966365,
1.7079879931344195,
1.3190420464570736,
1.7181179328838307,
1.6561191123457741,
1.6147186951451824,
1.6720869623098138,
1.6979813431306043,
1.3513988445861689,
1.09654186866835,
2.315102280947673,
1.651724518310352,
1.557770505403525,
1.6749162715346937,
2.830459562006103,
1.6607070287389956,
1.8426125062701317,
1.9938047370848269,
1.512871040315023,
1.4719534246949146,
1.9462952066313157,
1.3476999540804735,
1.552935620670088,
1.6963623896975115,
1.6051862355080382,
2.1400574327564597,
1.6455025236112542,
1.6099862265092482,
1.260871347310878,
1.4181056734587187,
1.602878345784571,
1.2470179140349023,
1.6386413699560323,
1.6153749530357946,
0.5972531564093516,
1.3845760323803993,
1.8707082507565052,
1.9446057842893594,
2.5911426074059265,
1.6216778595798695,
0.8267211148215126,
1.280072164005035,
2.1448277971531833,
1.5136542988762927,
1.3597890830112078,
1.1988223702026333,
1.485048887858052,
1.5852724352684031,
1.9208129532406653,
1.5292685023520693,
2.2402533306161008,
1.5809553088645034,
1.0324008985174453,
1.8372713254640423,
1.4196121857885755,
0.8993753827747732,
1.5813286847624104,
0.5972531564093516,
1.4514089682216447,
1.9534275512344357,
1.1590345756261624,
1.804573660514313,
0.9634572526320548,
1.188872137425364,
0.8796857765384195,
1.0875640778916444,
1.034092960428115,
1.6288585084680207,
2.3897534428254215,
1.75047276418574,
1.9774667460114315,
1.8513017159377254,
1.7340966607967765,
1.645240704520543,
1.5108664977177522,
1.4648163848908131,
2.1410823211778403,
1.7687196791762685,
1.6052523491007507,
2.675390064982142,
1.5070320893546723,
2.2705554362094054,
1.5778785887497413,
1.2719078004338622,
1.8033673563678885,
1.6084038766685629,
1.5577609032739919,
2.626402622679313,
1.368055381284621,
1.5096502028846615,
1.4691415136668815,
1.8571664694047927,
1.4357218027785223,
1.9861933433596186,
1.3559989421473164,
1.6960705653999977,
1.5797554562676404,
2.115884188384194,
1.6867706454133078,
1.526968779225678,
1.4058990171935448,
1.4992705571074072,
1.8738790555045006,
2.234660547275298,
1.849839119767255,
1.6956736498899025,
1.3843844598715376,
1.5522559968891674,
1.5989945747221277,
1.5668369355838003,
1.5051787003315549,
2.1660347974066982,
1.5701051075918644,
1.4417027546447603,
1.678262947477572,
1.8586497010487721,
1.7331317397634975,
2.184363648350726,
1.302107256488202,
1.7992548616068385,
1.7233369992526086,
2.013788196548857,
1.1118655144253595,
1.704118347478623,
1.098330571815151,
1.847087848386142,
1.1941744714226987,
2.223319853488265,
1.4970096850509844,
1.4482165159623221,
2.1289427671857952,
2.0193939201711064,
1.3704162307853769,
1.7787672437633182,
2.8434461654396217,
1.542418719275772,
1.3168345086654032,
1.6394693078981621,
1.7693003851568454,
1.8268796411139967,
1.7639346193000356,
1.3493655386282142,
1.7773817319879697,
0.9922918299081754,
1.2368583317807773,
1.636321610695747,
1.1519565606344542,
1.3065764144587242,
1.7440698169796038,
2.061929554443146,
1.3491116903528981,
1.6063853580404708,
2.10098612376305,
1.198169879301653,
1.4518215531821759,
0.7675283643313485,
0.9611096795514968,
2.124215900612652,
2.559775672893104,
1.7229719764638614,
1.6462651881380896,
1.843684956282479,
1.6311941249724367,
1.8157031826262484,
1.6158347871076075,
1.7408216031833423,
1.9934887015956644,
1.4499191247016987,
1.4534251582590787,
1.8941771885645704,
1.8228838742044629,
1.65265052648444,
1.5987070126638212,
1.7492374674300186,
1.5240334282452728,
1.451787927798003,
2.1244081460368367,
1.3664968183514556,
1.2587077236907418,
0.9634572526320548,
1.2931813736665314,
1.3748856243767726,
1.5491163587347603,
1.4115648465514437,
1.3978420086866388,
1.2332977966159,
1.344852878031716,
1.162517954982421,
1.1517456660514334,
1.716341874802147,
1.1958484823358815,
1.2544892396556067,
1.3063709067507911,
1.3413680728167385,
1.972490647444594,
2.2086387144887407,
1.109882319947273,
1.6147924084093643,
1.6682388199482967,
1.6916983477048313,
1.749454060523717,
1.5310321270003897,
1.5317066167115303,
1.8090237778630935,
1.7305465679927772,
1.3671638088116929,
1.1722510807146163,
1.326669744758649,
1.8474648005518322,
1.495649602488043,
1.7548840396870868,
1.7917594692280552,
1.5090723851424706,
1.3211543639244325,
1.4517146391746436,
1.639557958338597,
1.7943435003173827,
2.4502027948363927,
2.4079079798034706,
1.6700158510497638,
1.8695920379297737,
2.9903512612966665,
1.610484318675355,
1.4769844744372003,
1.2625974419262989,
1.4318932648446774,
2.045787584289522,
1.3535950298155,
1.584629122645107,
1.3492740003805157,
2.187673458149201,
1.4480468601975305,
1.3304008082847716,
2.3867159840087835,
1.4811867967797938,
1.6704033645129963,
1.377201264878756,
1.2299643405653156,
1.717234054622453,
1.2811699579251061,
2.300995121671421,
2.4856644882243546,
1.7744060018328025,
1.2932915858480336,
1.5513201167191744,
1.5011091737147157,
1.8078913880852043,
1.3006275650643129]
geneExpr = [1.3769050211772507,
3.7562462827448257,
2.6719763224779958,
1.1088030594011895,
3.146026602007864,
5.1314962447314505,
2.6605132571854435,
2.286485479964256,
1.9650113466570955,
1.2787138204750903,
2.7636641799765105,
4.5285096091180135,
2.055471332190772,
1.9978097053593409,
2.087677493805377,
0.6863704250688618,
2.0974613358210905,
2.2132254700071208,
2.48323841200138,
1.5039881301377562,
2.065007107942714,
2.049230428240303,
3.1503271044572427,
2.3342037488522647,
1.2879035090495468,
2.4580229092932604,
1.326194618426605,
1.467041043454956,
2.069229739358799,
0.9202774621803934,
1.913175975101608,
1.5289070141065204,
3.9252170136010194,
0.4793125314239097,
0.9274139930893066,
3.0955995956056253,
0.9350589706109779,
1.3667921114846069,
1.2438646855071578,
1.4746053869947549,
1.3828684829828457,
1.1102630744716409,
1.2884669631143617,
1.2019684569402267,
1.768584417233937,
3.3488851200048906,
1.3085841085041314,
1.949837840980232,
1.5458642815467911,
2.8734656865803703,
1.0097062308572082,
2.26417092115662,
0.5494502953108996,
2.852342629706449,
1.1610537731711952,
1.8628414163276428,
2.055877681980133,
2.031529704965473,
1.706727039705061,
5.562953440306615,
4.516805355870563,
1.7852720556314563,
2.1005987032703493,
2.0298194651259567,
4.067765793497416,
1.7752345258038185,
1.9350866537015463,
1.0961439126058516,
1.602554255421435,
2.291438448294199,
1.340880292313372,
2.408586242306745,
2.100697732173863,
0.77438404464467,
2.8981547413635966,
3.1998902982962987,
2.6960014653459727,
1.92074196528459,
1.8060268874721686,
0.7341812462712731,
4.040338021942181,
1.0206096517096142,
1.3877909962108417,
1.6341703208718996,
3.0749360871652254,
1.9089116792175367,
1.9474345491984089,
1.268331974681008,
0.8795809981676224,
1.9144190091281208,
0.1270802397390998,
4.24833595711353,
4.3669910031275565,
1.0416935096825009,
1.1940615718143244,
0.8558026237665811,
1.812046098646976,
1.311673507873615,
3.0093397849673607,
2.3649421461780626,
3.1504408119984464,
1.3703907267761202,
2.035010457899985,
1.97571648922404,
1.508716929296002,
0.9259976145453455,
5.087583944061218,
2.163327839574992,
5.164145580225468,
1.882381183129369,
2.1537323048551884,
1.4641679356399222,
1.2401260403911023,
1.8947336676344437,
1.87430000038156,
1.9324192128382656,
2.3637097605868256,
1.8153183897580145,
2.7034472776356075,
1.2548972291582945,
1.5302245388784623,
1.1170554826855297,
1.5474674616317405,
1.4778135582987868,
1.5177027250537067,
1.3100328840517919,
1.2022761604083556,
2.061201470946425,
1.4032400860384922,
1.6048491382684908,
1.213512354892811,
1.84219352998238,
0.7936446864683406,
2.2700301363565782,
3.003996024096597,
3.660411491521197,
2.195575305005744,
1.1541602033596847,
0.7881284979664975,
2.6849886985602893,
0.7206015440349863,
2.7252722206622386,
1.6010500579439904,
1.3923463784019967,
1.8243640168449449,
2.7880079433128633,
1.0659521525480418,
0.8876191860288731,
1.2036185828499717,
1.5163663935683656,
1.8751375205086147,
1.6593808016539568,
1.3450629536711138,
1.743046254509446,
2.383365679891565,
1.7202874287730594,
1.2505241744841202,
1.0648829013065944,
1.676612144448407,
2.5646208023284705,
2.9383803354532816,
0.6636096277744107,
1.980524826088373,
1.88716841838094,
1.4573506929257631,
2.8157783254949216,
2.3132066108617453,
3.5849957091245392,
1.3104222176649503,
5.005869186797619,
1.656583830703468,
1.4758563116669652,
3.1590106444683967,
1.5082266816326058,
1.3998383532758802,
1.84885674137052,
2.09998977154838,
3.78907586873952,
0.4662883394312476,
2.5388739765778756,
0.7611096254627837,
0.9830374176395882,
2.7322374977290544,
0.9792087840489858,
3.675105498156061,
0.8825732342313036,
3.1960685882274653,
5.259124190596204,
1.204476090855313,
1.3351614013975992,
1.3109417110191364,
2.100469817786245,
2.0151382362984953,
1.361158569885664,
1.0179610833986736,
0.8346346836719615,
1.9797759350043793,
3.2861656450393952,
2.334863099051601,
1.7086361720456942,
3.3297249235714377,
2.7703363951836004,
1.0897853295849105,
1.6964377929291818,
2.6302524565571352,
2.3217444361354165,
0.9778636256720471,
1.1641320302129619,
3.669759881539616,
1.8989776903183184,
1.8451729468670635,
2.284817884485841,
1.4322665246452273,
1.8028063251468571,
6.424313801576621,
1.3658464843728737,
3.6036740034305152,
2.4287401275948666,
1.9307836296113483,
2.3056947098759233,
1.7764027636098791,
1.1657155836935535,
3.0758095175369498,
2.1186188694505184,
1.0635003471332336,
0.6507112000963948,
1.7708163474637342,
2.2882337233824046,
1.3158185947256855,
1.8132723096072454,
1.6137762247932652,
1.108713242644792,
2.563727794452838,
2.6229335504139892,
1.63571914984831,
1.8453736442669983,
2.989346077449386,
0.6276060667250072,
1.0609944423522246,
2.15349991293981,
2.3320143470152193,
1.5118060325992106,
2.835346822718986,
1.5022536697497582,
0.8256556265785171,
2.7321833634584944,
2.526064328111412,
1.466756407403721,
1.7215191254561708,
1.0388635252424339,
1.715325294157353,
1.2076875587847855,
1.2362371642474348,
0.33394812674227087,
4.675284054378683,
2.389760294726976,
4.792628954075339,
2.1024724402368418,
0.8976139347165087,
0.6952074669183057,
2.709776326779585,
1.1764754694078634,
1.0073671761913532,
1.1187112550934273,
1.2764159114560854,
2.608374090493331,
2.27112424293213,
1.4332125797844384,
3.5283190803536235,
2.334990667936763,
2.285800451959592,
1.1186112799702084,
2.0399703968427336,
1.4570913440289304,
0.7014837257279548,
1.7852613553987886,
1.959023435165816,
1.5829877972968962,
1.4426312620833972,
1.496764100430386,
4.933175199545022,
4.060964038202633,
3.0709086261971392,
2.490130727772556,
3.964732991745056,
1.950304860608104,
3.60356269649804,
3.0678005050984707,
1.5710675762000597,
1.7144908152637672,
0.687838463709859,
4.599006382272776,
2.2675849365369922,
1.3590435271624708,
1.148793683412492,
2.2624825972759024,
0.9497618693732038,
1.6619770001458867,
1.53102207591445,
5.821619725812528]
df_sample = pd.DataFrame(list(zip(smallRNA,geneExpr)),columns=['smallRNA','geneExpr'])
This is the function
def func(t, a, tau, c):
return a * np.exp(-t / tau) + c
Applying the function to my data
t = np.array(df_sample['smallRNA'])
y = np.array(df_sample['geneExpr'])
t_norm = (t - t[0])/(t[-1] - t[0]) # normalized
c_0 = y[-1]
tau_0 = 1
a_0 = (y[0] - y[-1])
popt, pcov = curve_fit(func, t_norm, y, p0=(a_0, tau_0, c_0))
a, tau, c = popt
y_fit = func(t_norm, a, tau, c)
plt.plot(t, y, 'b.')
plt.plot(t, y_fit, 'r-')
plt.show()
You just need to sort your data:
t = np.array(df_sample['smallRNA'])
y = np.array(df_sample['geneExpr'])
t_norm = (t - t[0])/(t[-1] - t[0]) # normalized
c_0 = y[-1]
tau_0 = 1
a_0 = (y[0] - y[-1])
popt, pcov = curve_fit(func, t_norm, y, p0=(a_0, tau_0, c_0))
a, tau, c = popt
y_fit = func(t_norm, a, tau, c)
plt.plot(t_norm, y, 'b.')
idx = np.argsort(t_norm) # sort the data
plt.plot(t_norm[idx], y_fit[idx], 'r-') # plot sorted fit
plt.show()
Although it does not seem like there is any structure there...

How to plot two or more overlapping 3-D Gaussian surfaces in the same graph in Python?

How can I plot two or more overlapping Gaussian surfaces in the same graph, as below?
This is the code I have written, But the first surface is being covered by the second one. They are overlapping , But i want them to be displayed transparently
result obtained: https://i.stack.imgur.com/5LSsW.png
code :https://pastebin.com/embed_iframe/ms8cngXm
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def loaddata(filename,label):
file = open(filename, 'r')
text = file.read()
text=text.split('\n')
file.close()
dataset = list()
for line in text:
if len(line)>0:
value = line.split()
dataset.append([float(value[0]), float(value[1]), label])
return dataset
def multivariate_gaussian(pos, mu, Sigma):
n = mu.shape[0]
Sigma_det = np.linalg.det(Sigma)
Sigma_inv = np.linalg.inv(Sigma)
N = np.sqrt((2*np.pi)**n * Sigma_det)
fac = np.einsum('...k,kl,...l->...', pos-mu, Sigma_inv, pos-mu)
return np.exp(-fac / 2) / N
#this is just for maptype ignore this
_viridis_data = [[0.267004, 0.004874, 0.329415],
[0.268510, 0.009605, 0.335427],
[0.269944, 0.014625, 0.341379],
[0.271305, 0.019942, 0.347269],
[0.272594, 0.025563, 0.353093],
[0.273809, 0.031497, 0.358853],
[0.274952, 0.037752, 0.364543],
[0.276022, 0.044167, 0.370164],
[0.277018, 0.050344, 0.375715],
[0.277941, 0.056324, 0.381191],
[0.278791, 0.062145, 0.386592],
[0.279566, 0.067836, 0.391917],
[0.280267, 0.073417, 0.397163],
[0.280894, 0.078907, 0.402329],
[0.281446, 0.084320, 0.407414],
[0.281924, 0.089666, 0.412415],
[0.282327, 0.094955, 0.417331],
[0.282656, 0.100196, 0.422160],
[0.282910, 0.105393, 0.426902],
[0.283091, 0.110553, 0.431554],
[0.283197, 0.115680, 0.436115],
[0.283229, 0.120777, 0.440584],
[0.283187, 0.125848, 0.444960],
[0.283072, 0.130895, 0.449241],
[0.282884, 0.135920, 0.453427],
[0.282623, 0.140926, 0.457517],
[0.282290, 0.145912, 0.461510],
[0.281887, 0.150881, 0.465405],
[0.281412, 0.155834, 0.469201],
[0.280868, 0.160771, 0.472899],
[0.280255, 0.165693, 0.476498],
[0.279574, 0.170599, 0.479997],
[0.278826, 0.175490, 0.483397],
[0.278012, 0.180367, 0.486697],
[0.277134, 0.185228, 0.489898],
[0.276194, 0.190074, 0.493001],
[0.275191, 0.194905, 0.496005],
[0.274128, 0.199721, 0.498911],
[0.273006, 0.204520, 0.501721],
[0.271828, 0.209303, 0.504434],
[0.270595, 0.214069, 0.507052],
[0.269308, 0.218818, 0.509577],
[0.267968, 0.223549, 0.512008],
[0.266580, 0.228262, 0.514349],
[0.265145, 0.232956, 0.516599],
[0.263663, 0.237631, 0.518762],
[0.262138, 0.242286, 0.520837],
[0.260571, 0.246922, 0.522828],
[0.258965, 0.251537, 0.524736],
[0.257322, 0.256130, 0.526563],
[0.255645, 0.260703, 0.528312],
[0.253935, 0.265254, 0.529983],
[0.252194, 0.269783, 0.531579],
[0.250425, 0.274290, 0.533103],
[0.248629, 0.278775, 0.534556],
[0.246811, 0.283237, 0.535941],
[0.244972, 0.287675, 0.537260],
[0.243113, 0.292092, 0.538516],
[0.241237, 0.296485, 0.539709],
[0.239346, 0.300855, 0.540844],
[0.237441, 0.305202, 0.541921],
[0.235526, 0.309527, 0.542944],
[0.233603, 0.313828, 0.543914],
[0.231674, 0.318106, 0.544834],
[0.229739, 0.322361, 0.545706],
[0.227802, 0.326594, 0.546532],
[0.225863, 0.330805, 0.547314],
[0.223925, 0.334994, 0.548053],
[0.221989, 0.339161, 0.548752],
[0.220057, 0.343307, 0.549413],
[0.218130, 0.347432, 0.550038],
[0.216210, 0.351535, 0.550627],
[0.214298, 0.355619, 0.551184],
[0.212395, 0.359683, 0.551710],
[0.210503, 0.363727, 0.552206],
[0.208623, 0.367752, 0.552675],
[0.206756, 0.371758, 0.553117],
[0.204903, 0.375746, 0.553533],
[0.203063, 0.379716, 0.553925],
[0.201239, 0.383670, 0.554294],
[0.199430, 0.387607, 0.554642],
[0.197636, 0.391528, 0.554969],
[0.195860, 0.395433, 0.555276],
[0.194100, 0.399323, 0.555565],
[0.192357, 0.403199, 0.555836],
[0.190631, 0.407061, 0.556089],
[0.188923, 0.410910, 0.556326],
[0.187231, 0.414746, 0.556547],
[0.185556, 0.418570, 0.556753],
[0.183898, 0.422383, 0.556944],
[0.182256, 0.426184, 0.557120],
[0.180629, 0.429975, 0.557282],
[0.179019, 0.433756, 0.557430],
[0.177423, 0.437527, 0.557565],
[0.175841, 0.441290, 0.557685],
[0.174274, 0.445044, 0.557792],
[0.172719, 0.448791, 0.557885],
[0.171176, 0.452530, 0.557965],
[0.169646, 0.456262, 0.558030],
[0.168126, 0.459988, 0.558082],
[0.166617, 0.463708, 0.558119],
[0.165117, 0.467423, 0.558141],
[0.163625, 0.471133, 0.558148],
[0.162142, 0.474838, 0.558140],
[0.160665, 0.478540, 0.558115],
[0.159194, 0.482237, 0.558073],
[0.157729, 0.485932, 0.558013],
[0.156270, 0.489624, 0.557936],
[0.154815, 0.493313, 0.557840],
[0.153364, 0.497000, 0.557724],
[0.151918, 0.500685, 0.557587],
[0.150476, 0.504369, 0.557430],
[0.149039, 0.508051, 0.557250],
[0.147607, 0.511733, 0.557049],
[0.146180, 0.515413, 0.556823],
[0.144759, 0.519093, 0.556572],
[0.143343, 0.522773, 0.556295],
[0.141935, 0.526453, 0.555991],
[0.140536, 0.530132, 0.555659],
[0.139147, 0.533812, 0.555298],
[0.137770, 0.537492, 0.554906],
[0.136408, 0.541173, 0.554483],
[0.135066, 0.544853, 0.554029],
[0.133743, 0.548535, 0.553541],
[0.132444, 0.552216, 0.553018],
[0.131172, 0.555899, 0.552459],
[0.129933, 0.559582, 0.551864],
[0.128729, 0.563265, 0.551229],
[0.127568, 0.566949, 0.550556],
[0.126453, 0.570633, 0.549841],
[0.125394, 0.574318, 0.549086],
[0.124395, 0.578002, 0.548287],
[0.123463, 0.581687, 0.547445],
[0.122606, 0.585371, 0.546557],
[0.121831, 0.589055, 0.545623],
[0.121148, 0.592739, 0.544641],
[0.120565, 0.596422, 0.543611],
[0.120092, 0.600104, 0.542530],
[0.119738, 0.603785, 0.541400],
[0.119512, 0.607464, 0.540218],
[0.119423, 0.611141, 0.538982],
[0.119483, 0.614817, 0.537692],
[0.119699, 0.618490, 0.536347],
[0.120081, 0.622161, 0.534946],
[0.120638, 0.625828, 0.533488],
[0.121380, 0.629492, 0.531973],
[0.122312, 0.633153, 0.530398],
[0.123444, 0.636809, 0.528763],
[0.124780, 0.640461, 0.527068],
[0.126326, 0.644107, 0.525311],
[0.128087, 0.647749, 0.523491],
[0.130067, 0.651384, 0.521608],
[0.132268, 0.655014, 0.519661],
[0.134692, 0.658636, 0.517649],
[0.137339, 0.662252, 0.515571],
[0.140210, 0.665859, 0.513427],
[0.143303, 0.669459, 0.511215],
[0.146616, 0.673050, 0.508936],
[0.150148, 0.676631, 0.506589],
[0.153894, 0.680203, 0.504172],
[0.157851, 0.683765, 0.501686],
[0.162016, 0.687316, 0.499129],
[0.166383, 0.690856, 0.496502],
[0.170948, 0.694384, 0.493803],
[0.175707, 0.697900, 0.491033],
[0.180653, 0.701402, 0.488189],
[0.185783, 0.704891, 0.485273],
[0.191090, 0.708366, 0.482284],
[0.196571, 0.711827, 0.479221],
[0.202219, 0.715272, 0.476084],
[0.208030, 0.718701, 0.472873],
[0.214000, 0.722114, 0.469588],
[0.220124, 0.725509, 0.466226],
[0.226397, 0.728888, 0.462789],
[0.232815, 0.732247, 0.459277],
[0.239374, 0.735588, 0.455688],
[0.246070, 0.738910, 0.452024],
[0.252899, 0.742211, 0.448284],
[0.259857, 0.745492, 0.444467],
[0.266941, 0.748751, 0.440573],
[0.274149, 0.751988, 0.436601],
[0.281477, 0.755203, 0.432552],
[0.288921, 0.758394, 0.428426],
[0.296479, 0.761561, 0.424223],
[0.304148, 0.764704, 0.419943],
[0.311925, 0.767822, 0.415586],
[0.319809, 0.770914, 0.411152],
[0.327796, 0.773980, 0.406640],
[0.335885, 0.777018, 0.402049],
[0.344074, 0.780029, 0.397381],
[0.352360, 0.783011, 0.392636],
[0.360741, 0.785964, 0.387814],
[0.369214, 0.788888, 0.382914],
[0.377779, 0.791781, 0.377939],
[0.386433, 0.794644, 0.372886],
[0.395174, 0.797475, 0.367757],
[0.404001, 0.800275, 0.362552],
[0.412913, 0.803041, 0.357269],
[0.421908, 0.805774, 0.351910],
[0.430983, 0.808473, 0.346476],
[0.440137, 0.811138, 0.340967],
[0.449368, 0.813768, 0.335384],
[0.458674, 0.816363, 0.329727],
[0.468053, 0.818921, 0.323998],
[0.477504, 0.821444, 0.318195],
[0.487026, 0.823929, 0.312321],
[0.496615, 0.826376, 0.306377],
[0.506271, 0.828786, 0.300362],
[0.515992, 0.831158, 0.294279],
[0.525776, 0.833491, 0.288127],
[0.535621, 0.835785, 0.281908],
[0.545524, 0.838039, 0.275626],
[0.555484, 0.840254, 0.269281],
[0.565498, 0.842430, 0.262877],
[0.575563, 0.844566, 0.256415],
[0.585678, 0.846661, 0.249897],
[0.595839, 0.848717, 0.243329],
[0.606045, 0.850733, 0.236712],
[0.616293, 0.852709, 0.230052],
[0.626579, 0.854645, 0.223353],
[0.636902, 0.856542, 0.216620],
[0.647257, 0.858400, 0.209861],
[0.657642, 0.860219, 0.203082],
[0.668054, 0.861999, 0.196293],
[0.678489, 0.863742, 0.189503],
[0.688944, 0.865448, 0.182725],
[0.699415, 0.867117, 0.175971],
[0.709898, 0.868751, 0.169257],
[0.720391, 0.870350, 0.162603],
[0.730889, 0.871916, 0.156029],
[0.741388, 0.873449, 0.149561],
[0.751884, 0.874951, 0.143228],
[0.762373, 0.876424, 0.137064],
[0.772852, 0.877868, 0.131109],
[0.783315, 0.879285, 0.125405],
[0.793760, 0.880678, 0.120005],
[0.804182, 0.882046, 0.114965],
[0.814576, 0.883393, 0.110347],
[0.824940, 0.884720, 0.106217],
[0.835270, 0.886029, 0.102646],
[0.845561, 0.887322, 0.099702],
[0.855810, 0.888601, 0.097452],
[0.866013, 0.889868, 0.095953],
[0.876168, 0.891125, 0.095250],
[0.886271, 0.892374, 0.095374],
[0.896320, 0.893616, 0.096335],
[0.906311, 0.894855, 0.098125],
[0.916242, 0.896091, 0.100717],
[0.926106, 0.897330, 0.104071],
[0.935904, 0.898570, 0.108131],
[0.945636, 0.899815, 0.112838],
[0.955300, 0.901065, 0.118128],
[0.964894, 0.902323, 0.123941],
[0.974417, 0.903590, 0.130215],
[0.983868, 0.904867, 0.136897],
[0.993248, 0.906157, 0.143936]]
from matplotlib.colors import ListedColormap
viridis = ListedColormap(_viridis_data, name='viridis')
plt.register_cmap(name='viridis', cmap=viridis)
plt.set_cmap(viridis)
filename=r"C:/Users/santhoskumar/Desktop/random/pattern/class1_rw.txt"
label=0
dataset1= loaddata(filename,label)
print('Loaded data file {0} with {1} rows'.format(filename, len(dataset1)))
filename = r"C:/Users/santhoskumar/Desktop/random/pattern/class2_rw.txt"
label=1
dataset2 = loaddata(filename,label)
print('Loaded data file {0} with {1} rows'.format(filename, len(dataset2)))
filename = r'C:/Users/santhoskumar/Desktop/random/pattern/class3_rw.txt'
label=2
dataset3 = loaddata(filename,label)
print('Loaded data file {0} with {1} rows'.format(filename, len(dataset3)))
N = 600
X = np.linspace(200, 800, N)
Y = np.linspace(300, 1200, N)
X, Y = np.meshgrid(X, Y)
dataset=np.array(dataset1)
x,y,label=dataset.T
dat=x,y
dat=np.array(dat)
cov=np.cov(dat)
mu=np.mean(dat,axis=1)
print(mu)
# Pack X and Y into a single 3-dimensional array
pos = np.empty(X.shape + (2,))
pos[:, :, 0] = X
pos[:, :, 1] = Y
# The distribution on the variables X, Y packed into pos.
Z = multivariate_gaussian(pos, mu, cov)
minn=1e-15
for i in range(len(Z)):
for j in range(len(Z[i])):
Z[i][j]*=1e4
fig = plt.figure()
ax = fig.gca(projection='3d')
ax1=fig.gca(projection='3d')
ax.plot_surface(X, Y, Z,rstride=30,cstride=30, linewidth=1,antialiased=True,cmap=viridis)
cset = ax.contourf(X, Y, Z,zdir='z',offset=-0.4,cmap=viridis)
ax.set_zlim(-0.4,0.40)
ax.set_zticks(np.linspace(0,0.40,5))
ax.view_init(27, -21)
dataset=np.array(dataset2)
x,y,label=dataset.T
dat=x,y
dat=np.array(dat)
cov=np.cov(dat)
mu=np.mean(dat,axis=1)
print(mu)
# Pack X and Y into a single 3-dimensional array
pos = np.empty(X.shape + (2,))
pos[:, :, 0] = X
pos[:, :, 1] = Y
# The distribution on the variables X, Y packed into pos.
Z = multivariate_gaussian(pos, mu, cov)
minn=1e-15
for i in range(len(Z)):
for j in range(len(Z[i])):
Z[i][j]*=1e4
ax.plot_surface(X, Y, Z,rstride=20,cstride=20, linewidth=1,antialiased=True,color='red',cmap=viridis)
cset1 = ax.contourf(X, Y, Z,zdir='z',offset=-0.4,cmap=viridis)
ax.set_zlim(-0.4,0.40)
ax.set_zticks(np.linspace(0,0.40,5))
ax.view_init(27, -21)
plt.subplots_adjust(hspace=0.5)
plt.show()
If I understand your question correctly, you just have to call the plotting method multiple times such as:
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_surface(x1, y1, z1,cmap='viridis',linewidth=0)
ax.plot_surface(x2, y2, z2,cmap='viridis',linewidth=0)
ax.set_xlabel('X axis')
ax.set_ylabel('Y axis')
ax.set_zlabel('Z axis')
plt.show()

guess function when using built-in defined models in lmfit

I am having a problem with the guess function of lmfit. I am trying to fit some experimental data and I want to use different built in models of lmfit, but I cannot run the built in modules, only if I define the function directly.
The following code does not work, but if I comment the guess function it works.
P.S. It would be more interesting for me that the index is the first column because I will put this in a loop that will use all the same first column of the data and therefore i could put each new second column of the data as a new column in the DataFrame.
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from lmfit import Model
from lmfit.models import GaussianModel
minvalue = 3.25
maxvalue = 3.45
rawdata = pd.read_csv('datafile.txt', delim_whitespace = True, names=['XX','YY'])
#Section the data
data = rawdata[(rawdata['XX']>minvalue) & (rawdata['XX'] < maxvalue)]
#Create a DataFrame with the data
dataDataframe = pd.DataFrame()
dataDataframe[0] = data['YY']
dataDataframe = dataDataframe.set_index(data['XX'])
# Gaussian curve
def gaussian(x, amp, cen, wid):
"1-d gaussian: gaussian(x, amp, cen, wid)"
return (amp/(np.sqrt(2*np.pi)*wid)) * np.exp(-(x-cen)**2 /(2*wid**2))
result_gaussian = Model(gaussian).fit(dataDataframe[0], x=dataDataframe.index.values, amp=5, cen=5, wid=1)
mod = GaussianModel()
pars = mod.guess(dataDataframe[0], x = np.float32(dataDataframe.index.values))
out = mod.fit(dataDataframe[0], pars , x = np.float32(dataDataframe.index.values))
plt.plot(dataDataframe.index.values, dataDataframe[0],'bo')
plt.plot(dataDataframe.index.values, result_gaussian.best_fit, 'r-', label = 'Gaussian')
plt.plot(dataDataframe.index.values, out.best_fit, 'b-', label = 'Gaussian2')
plt.legend()
plt.show()
Error message I am having if I uncomment the built in modules:
File "/Users/johndoe/anaconda2/lib/python2.7/site-packages/lmfit/models.py", line 52, in guess_from_peak
cen = x[imaxy]
IndexError: only integers, slices (`:`), ellipsis (`...`), numpy.newaxis (`None`) and integer or boolean arrays are valid indices
I have tried to run the guess_from_peak from models.py and i did not have a problem it resulted in an integer.
Raw data:
1.1661899e+000 7.3414581e+002
1.1730889e+000 7.4060590e+002
1.1799880e+000 7.3778076e+002
1.1868871e+000 7.2950366e+002
1.1937861e+000 7.0154932e+002
1.2006853e+000 7.0399518e+002
1.2075844e+000 7.3814081e+002
1.2144834e+000 7.5750049e+002
1.2213825e+000 7.6613043e+002
1.2282816e+000 7.4348322e+002
1.2351807e+000 7.2836584e+002
1.2420797e+000 7.0964618e+002
1.2489789e+000 7.1938611e+002
1.2558780e+000 7.0620062e+002
1.2627770e+000 7.2354883e+002
1.2696761e+000 7.1347961e+002
1.2765752e+000 7.1027679e+002
1.2834742e+000 7.4422925e+002
1.2903733e+000 7.5596112e+002
1.2972724e+000 7.2770599e+002
1.3041714e+000 7.2000342e+002
1.3110706e+000 7.4451556e+002
1.3179697e+000 7.4411346e+002
1.3248687e+000 6.9408307e+002
1.3317678e+000 6.8662170e+002
1.3386669e+000 7.0951758e+002
1.3455659e+000 6.7616663e+002
1.3524650e+000 6.7230786e+002
1.3593642e+000 7.1053870e+002
1.3662632e+000 7.2593860e+002
1.3731623e+000 7.1484381e+002
1.3800614e+000 7.3073920e+002
1.3869605e+000 7.2766406e+002
1.3938595e+000 7.1958862e+002
1.4007586e+000 7.0147577e+002
1.4076577e+000 6.9747528e+002
1.4145567e+000 6.9634515e+002
1.4214559e+000 6.6082648e+002
1.4283550e+000 6.4877466e+002
1.4352540e+000 6.6942896e+002
1.4421531e+000 6.8172211e+002
1.4490522e+000 6.5540350e+002
1.4559512e+000 6.4846545e+002
1.4628503e+000 6.6383038e+002
1.4697495e+000 6.4449670e+002
1.4766484e+000 6.3950043e+002
1.4835476e+000 6.4479529e+002
1.4904467e+000 6.4849249e+002
1.4973457e+000 6.4100800e+002
1.5042448e+000 6.6731049e+002
1.5111439e+000 6.8118671e+002
1.5180429e+000 6.5618878e+002
1.5249420e+000 6.3446680e+002
1.5318412e+000 6.3301892e+002
1.5387402e+000 6.5466571e+002
1.5456393e+000 6.5982983e+002
1.5525384e+000 6.3588879e+002
1.5594375e+000 6.1257922e+002
1.5663365e+000 6.2805811e+002
1.5732356e+000 6.1877094e+002
1.5801347e+000 6.0427368e+002
1.5870337e+000 6.3391718e+002
1.5939329e+000 6.4173145e+002
1.6008320e+000 6.2423242e+002
1.6077310e+000 6.0993829e+002
1.6146301e+000 6.0605164e+002
1.6215292e+000 6.2812646e+002
1.6284282e+000 6.4028595e+002
1.6353273e+000 6.2281421e+002
1.6422265e+000 6.0742285e+002
1.6491255e+000 5.9783905e+002
1.6560246e+000 5.8637256e+002
1.6629237e+000 6.0021320e+002
1.6698227e+000 6.1169287e+002
1.6767218e+000 6.1003906e+002
1.6836209e+000 5.9548285e+002
1.6905199e+000 5.8961163e+002
1.6974190e+000 5.9599597e+002
1.7043182e+000 5.9016595e+002
1.7112173e+000 5.7669794e+002
1.7181163e+000 5.6394800e+002
1.7250154e+000 5.5043781e+002
1.7319145e+000 5.6813892e+002
1.7388135e+000 5.8987500e+002
1.7457126e+000 5.9018683e+002
1.7526118e+000 5.8595575e+002
1.7595108e+000 5.8304041e+002
1.7664099e+000 5.9360785e+002
1.7733090e+000 5.9706018e+002
1.7802080e+000 5.7838733e+002
1.7871071e+000 5.7011194e+002
1.7940062e+000 5.8080725e+002
1.8009052e+000 5.7853046e+002
1.8078043e+000 5.7998969e+002
1.8147035e+000 5.4928967e+002
1.8216025e+000 5.2888440e+002
1.8285016e+000 5.4854303e+002
1.8354007e+000 5.5585767e+002
1.8422997e+000 5.5588806e+002
1.8491988e+000 5.5359229e+002
1.8560979e+000 5.5033203e+002
1.8629971e+000 5.2563916e+002
1.8698961e+000 5.3607788e+002
1.8767952e+000 5.7113812e+002
1.8836943e+000 5.5775525e+002
1.8905933e+000 5.2081384e+002
1.8974924e+000 5.1039877e+002
1.9043915e+000 5.3863855e+002
1.9112905e+000 5.6284332e+002
1.9181896e+000 5.5691626e+002
1.9250888e+000 5.3292615e+002
1.9319878e+000 5.4550836e+002
1.9388869e+000 5.6732916e+002
1.9457860e+000 5.4372571e+002
1.9526850e+000 5.1244263e+002
1.9595841e+000 5.1212933e+002
1.9664832e+000 5.1553162e+002
1.9733822e+000 5.2064484e+002
1.9802814e+000 5.3102246e+002
1.9871805e+000 5.2069739e+002
1.9940795e+000 5.0833780e+002
2.0009787e+000 5.1853204e+002
2.0078776e+000 5.2843738e+002
2.0147767e+000 5.2046942e+002
2.0216758e+000 5.4993433e+002
2.0285749e+000 5.4103894e+002
2.0354741e+000 5.0149301e+002
2.0423732e+000 5.0521149e+002
2.0492721e+000 5.2875800e+002
2.0561712e+000 5.1962280e+002
2.0630703e+000 4.9481357e+002
2.0699694e+000 4.9459094e+002
2.0768685e+000 4.9837778e+002
2.0837677e+000 4.9990302e+002
2.0906668e+000 4.9616635e+002
2.0975657e+000 4.9398682e+002
2.1044648e+000 4.9411301e+002
2.1113639e+000 5.0085464e+002
2.1182630e+000 5.1741498e+002
2.1251621e+000 5.1049081e+002
2.1320612e+000 4.9854333e+002
2.1389601e+000 4.9250342e+002
2.1458592e+000 4.8195938e+002
2.1527584e+000 4.9623288e+002
2.1596575e+000 5.0226831e+002
2.1665566e+000 5.1108215e+002
2.1734557e+000 5.0001602e+002
2.1803546e+000 4.8078720e+002
2.1872537e+000 4.9371985e+002
2.1941528e+000 4.9578796e+002
2.2010520e+000 5.0061276e+002
2.2079511e+000 4.9850949e+002
2.2148502e+000 4.9680969e+002
2.2217491e+000 5.0683179e+002
2.2286482e+000 5.0175012e+002
2.2355473e+000 4.8996030e+002
2.2424464e+000 4.8759747e+002
2.2493455e+000 4.7695905e+002
2.2562447e+000 4.7682187e+002
2.2631438e+000 4.8609653e+002
2.2700427e+000 4.8575693e+002
2.2769418e+000 4.9476901e+002
2.2838409e+000 4.8241449e+002
2.2907400e+000 4.7581494e+002
2.2976391e+000 5.0079959e+002
2.3045382e+000 5.0975296e+002
2.3114371e+000 4.9256650e+002
2.3183362e+000 4.8954599e+002
2.3252354e+000 4.9478619e+002
2.3321345e+000 5.1234747e+002
2.3390336e+000 5.4276178e+002
2.3459327e+000 5.4188184e+002
2.3528316e+000 5.4555566e+002
2.3597307e+000 5.4856274e+002
2.3666298e+000 5.2246918e+002
2.3735290e+000 4.9281882e+002
2.3804281e+000 4.8422125e+002
2.3873272e+000 5.0562274e+002
2.3942261e+000 5.0024243e+002
2.4011252e+000 4.8827591e+002
2.4080243e+000 4.8137762e+002
2.4149234e+000 4.7244000e+002
2.4218225e+000 4.7699164e+002
2.4287217e+000 4.7515668e+002
2.4356208e+000 4.6413528e+002
2.4425197e+000 4.6328885e+002
2.4494188e+000 4.6013199e+002
2.4563179e+000 4.6177853e+002
2.4632170e+000 4.5766202e+002
2.4701161e+000 4.4741263e+002
2.4770153e+000 4.4859024e+002
2.4839141e+000 4.6913116e+002
2.4908133e+000 5.0019971e+002
2.4977124e+000 4.8486560e+002
2.5046115e+000 4.6070554e+002
2.5115106e+000 4.3163672e+002
2.5184097e+000 4.4147137e+002
2.5253086e+000 4.3510056e+002
2.5322077e+000 4.4211298e+002
2.5391068e+000 4.6599957e+002
2.5460060e+000 4.5878577e+002
2.5529051e+000 4.4981293e+002
2.5598042e+000 4.6061084e+002
2.5667033e+000 4.6963638e+002
2.5736022e+000 4.7663760e+002
2.5805013e+000 4.6380307e+002
2.5874004e+000 4.5866577e+002
2.5942996e+000 4.5507098e+002
2.6011987e+000 4.4790939e+002
2.6080978e+000 4.6447559e+002
2.6149967e+000 4.5061194e+002
2.6218958e+000 4.2355850e+002
2.6287949e+000 4.2002722e+002
2.6356940e+000 4.2429697e+002
2.6425931e+000 4.2280334e+002
2.6494923e+000 4.3304733e+002
2.6563911e+000 4.5999661e+002
2.6632903e+000 4.7144125e+002
2.6701894e+000 4.6819211e+002
2.6770885e+000 4.6265125e+002
2.6839876e+000 4.6332251e+002
2.6908867e+000 4.5123907e+002
2.6977856e+000 4.6259286e+002
2.7046847e+000 4.6975299e+002
2.7115839e+000 4.4647833e+002
2.7184830e+000 4.4722562e+002
2.7253821e+000 4.6617062e+002
2.7322812e+000 4.6656949e+002
2.7391803e+000 4.4081876e+002
2.7460792e+000 4.5200452e+002
2.7529783e+000 4.5094382e+002
2.7598774e+000 4.4421115e+002
2.7667766e+000 4.5470145e+002
2.7736757e+000 4.5202261e+002
2.7805748e+000 4.4788058e+002
2.7874737e+000 4.3493640e+002
2.7943728e+000 4.4102286e+002
2.8012719e+000 4.3156961e+002
2.8081710e+000 4.2983533e+002
2.8150702e+000 4.4627554e+002
2.8219693e+000 4.4581104e+002
2.8288682e+000 4.2150226e+002
2.8357673e+000 4.1737479e+002
2.8426664e+000 4.5602731e+002
2.8495655e+000 4.6227423e+002
2.8564646e+000 4.5953806e+002
2.8633637e+000 4.5829834e+002
2.8702629e+000 4.5450616e+002
2.8771617e+000 4.5531360e+002
2.8840609e+000 4.4464761e+002
2.8909600e+000 4.6128970e+002
2.8978591e+000 4.4664514e+002
2.9047582e+000 4.4719708e+002
2.9116573e+000 4.4492749e+002
2.9185562e+000 4.4260013e+002
2.9254553e+000 4.5593594e+002
2.9323545e+000 4.6237164e+002
2.9392536e+000 4.7034845e+002
2.9461527e+000 4.7368185e+002
2.9530518e+000 4.7302234e+002
2.9599507e+000 4.7327332e+002
2.9668498e+000 4.4960791e+002
2.9737489e+000 4.4319986e+002
2.9806480e+000 4.5416092e+002
2.9875472e+000 4.6674429e+002
2.9944463e+000 4.6089871e+002
3.0013452e+000 4.6334650e+002
3.0082443e+000 4.6833719e+002
3.0151434e+000 4.8842966e+002
3.0220425e+000 4.8455182e+002
3.0289416e+000 4.6504678e+002
3.0358407e+000 4.6673508e+002
3.0427399e+000 4.6887064e+002
3.0496387e+000 4.6799823e+002
3.0565379e+000 4.5299500e+002
3.0634370e+000 4.5381485e+002
3.0703361e+000 4.5956931e+002
3.0772352e+000 4.6477676e+002
3.0841343e+000 4.6114374e+002
3.0910332e+000 4.6816293e+002
3.0979323e+000 4.6245181e+002
3.1048315e+000 4.6533044e+002
3.1117306e+000 4.7819165e+002
3.1186297e+000 4.9699246e+002
3.1255288e+000 4.8907956e+002
3.1324277e+000 4.9116394e+002
3.1393268e+000 5.0308936e+002
3.1462259e+000 5.0668982e+002
3.1531250e+000 5.0537222e+002
3.1600242e+000 4.9574966e+002
3.1669233e+000 4.9894128e+002
3.1738222e+000 4.9885315e+002
3.1807213e+000 5.1417163e+002
3.1876204e+000 5.2202740e+002
3.1945195e+000 5.2219598e+002
3.2014186e+000 5.4433679e+002
3.2083178e+000 5.6957477e+002
3.2152169e+000 5.9891089e+002
3.2221158e+000 6.0682019e+002
3.2290149e+000 6.0779541e+002
3.2359140e+000 6.1212280e+002
3.2428131e+000 6.5589185e+002
3.2497122e+000 7.1807507e+002
3.2566113e+000 7.5950916e+002
3.2635102e+000 8.1842242e+002
3.2704093e+000 9.1277783e+002
3.2773085e+000 1.0486207e+003
3.2842076e+000 1.3214080e+003
3.2911067e+000 1.7085295e+003
3.2980058e+000 2.4946370e+003
3.3049047e+000 4.1229609e+003
3.3118038e+000 7.1944038e+003
3.3187029e+000 1.1714122e+004
3.3256021e+000 1.5338923e+004
3.3325012e+000 1.5092694e+004
3.3394003e+000 1.1227008e+004
3.3462994e+000 6.9070176e+003
3.3531983e+000 4.0318586e+003
3.3600974e+000 2.5069387e+003
3.3669965e+000 1.7313556e+003
3.3738956e+000 1.3203175e+003
3.3807948e+000 1.0810967e+003
3.3876939e+000 9.2702356e+002
3.3945928e+000 8.2453217e+002
3.4014919e+000 7.5468195e+002
3.4083910e+000 7.1011224e+002
3.4152901e+000 6.7312701e+002
3.4221892e+000 6.2927734e+002
3.4290884e+000 6.0679126e+002
3.4359872e+000 5.8445929e+002
3.4428864e+000 5.5084033e+002
3.4497855e+000 5.2990625e+002
3.4566846e+000 5.3244171e+002
3.4635837e+000 5.3299860e+002
3.4704828e+000 5.2270801e+002
3.4773817e+000 5.0838147e+002
3.4842808e+000 4.9768036e+002
3.4911799e+000 4.9974271e+002
3.4980791e+000 5.1852539e+002
3.5049782e+000 5.2486890e+002
3.5118773e+000 5.3554919e+002
3.5187764e+000 5.4363098e+002
3.5256753e+000 5.2134320e+002
3.5325744e+000 4.9386557e+002
3.5394735e+000 4.7175720e+002
3.5463727e+000 4.6334061e+002
3.5532718e+000 4.4633063e+002
3.5601709e+000 4.4021204e+002
3.5670698e+000 4.4216010e+002
3.5739689e+000 4.3208749e+002
3.5808680e+000 4.3210999e+002
3.5877671e+000 4.3717999e+002
3.5946662e+000 4.3084845e+002
3.6015654e+000 4.1379028e+002
3.6084642e+000 4.1567856e+002
3.6153634e+000 4.2414615e+002
3.6222625e+000 4.2964746e+002
3.6291616e+000 4.1986203e+002
3.6360607e+000 4.0300714e+002
3.6429598e+000 4.1156561e+002
3.6498590e+000 4.1897156e+002
3.6567578e+000 4.1506668e+002
3.6636569e+000 4.2337305e+002
3.6705561e+000 4.2956845e+002
3.6774552e+000 4.1608209e+002
3.6843543e+000 4.1159943e+002
3.6912534e+000 4.0408707e+002
3.6981523e+000 3.8742813e+002
3.7050514e+000 3.8193686e+002
3.7119505e+000 3.8675006e+002
3.7188497e+000 3.8995547e+002
3.7257488e+000 3.9189124e+002
3.7326479e+000 3.9534134e+002
3.7395468e+000 4.0249893e+002
3.7464459e+000 4.0382443e+002
3.7533450e+000 3.9881796e+002
3.7602441e+000 4.0283856e+002
3.7671432e+000 4.0544543e+002
3.7740424e+000 3.9527063e+002
3.7809412e+000 3.9659631e+002
3.7878404e+000 4.0054132e+002
3.7947395e+000 3.9123737e+002
3.8016386e+000 3.8058502e+002
3.8085377e+000 3.7388980e+002
3.8154368e+000 3.7337103e+002
3.8223360e+000 3.6008588e+002
3.8292348e+000 3.5135416e+002
3.8361340e+000 3.5958188e+002
3.8430331e+000 3.5756583e+002
3.8499322e+000 3.5956232e+002
3.8568313e+000 3.7803802e+002
3.8637304e+000 3.9012396e+002
3.8706293e+000 3.8674255e+002
3.8775284e+000 3.7771600e+002
3.8844275e+000 3.7648160e+002
3.8913267e+000 3.7692780e+002
3.8982258e+000 3.6927103e+002
3.9051249e+000 3.7007745e+002
3.9120238e+000 3.7482629e+002
3.9189229e+000 3.7230219e+002
3.9258220e+000 3.6110025e+002
3.9327211e+000 3.6490872e+002
3.9396203e+000 3.7283734e+002
3.9465194e+000 3.7933209e+002
3.9534183e+000 3.6968182e+002
3.9603174e+000 3.5532330e+002
3.9672165e+000 3.5889478e+002
3.9741156e+000 3.6407483e+002
3.9810147e+000 3.6295535e+002
3.9879138e+000 3.6387720e+002
3.9948130e+000 3.6416183e+002
4.0017118e+000 3.6089911e+002
4.0086112e+000 3.6826599e+002
4.0155101e+000 3.7570581e+002
4.0224090e+000 3.6361679e+002
4.0293083e+000 3.6003177e+002
4.0362072e+000 3.7528265e+002
4.0431066e+000 3.7368362e+002
4.0500054e+000 3.8174683e+002
4.0569048e+000 4.0386084e+002
4.0638037e+000 4.2738324e+002
4.0707026e+000 4.4587668e+002
4.0776019e+000 4.5433987e+002
4.0845008e+000 4.4404083e+002
4.0914001e+000 4.2589066e+002
4.0982990e+000 3.9662262e+002
4.1051979e+000 3.7311325e+002
4.1120973e+000 3.5790594e+002
4.1189961e+000 3.4554794e+002
4.1258955e+000 3.5435367e+002
4.1327944e+000 3.7766489e+002
4.1396937e+000 3.7425708e+002
4.1465926e+000 3.5805182e+002
4.1534915e+000 3.5078519e+002
4.1603909e+000 3.5888739e+002
4.1672897e+000 3.7242688e+002
4.1741891e+000 3.7792575e+002
4.1810880e+000 3.7338031e+002
4.1879873e+000 3.6538324e+002
4.1948862e+000 3.5872525e+002
4.2017851e+000 3.4688391e+002
4.2086844e+000 3.4881918e+002
4.2155833e+000 3.4818274e+002
4.2224827e+000 3.4055273e+002
4.2293816e+000 3.3977536e+002
4.2362804e+000 3.3322891e+002
4.2431798e+000 3.3594962e+002
4.2500787e+000 3.4658536e+002
4.2569780e+000 3.4479083e+002
4.2638769e+000 3.4267456e+002
4.2707763e+000 3.4828876e+002
4.2776752e+000 3.4845041e+002
4.2845740e+000 3.3986469e+002
4.2914734e+000 3.3093433e+002
4.2983723e+000 3.3255331e+002
4.3052716e+000 3.4089511e+002
4.3121705e+000 3.4742932e+002
4.3190699e+000 3.3570422e+002
4.3259687e+000 3.2636673e+002
4.3328676e+000 3.3228806e+002
4.3397670e+000 3.5141977e+002
4.3466659e+000 3.5683167e+002
4.3535652e+000 3.4719943e+002
4.3604641e+000 3.4054718e+002
4.3673630e+000 3.2842471e+002
4.3742623e+000 3.2503146e+002
4.3811612e+000 3.3431540e+002
4.3880606e+000 3.3462808e+002
4.3949594e+000 3.3529224e+002
4.4018588e+000 3.3313510e+002
4.4087577e+000 3.4015598e+002
4.4156566e+000 3.3703552e+002
4.4225559e+000 3.3024448e+002
4.4294548e+000 3.2974786e+002
As I suggested in the comment above, coercing the pandas Series into an ndarray will fix the problem:
mod = GaussianModel()
ydata = np.array(dataDataframe[0])
xdata = np.array(dataDataframe.index.values)
pars = mod.guess(ydata, x=xdata)
out = mod.fit(ydata, pars, x=xdata)
This example works for me:
#!/usr/bin/env python
from lmfit.models import LorentzianModel
import matplotlib.pyplot as plt
import pandas as pd
dframe = pd.read_csv('peak.csv')
model = LorentzianModel()
params = model.guess(dframe['y'], x=dframe['x'])
result = model.fit(dframe['y'], params, x=dframe['x'])
print(result.fit_report())
result.plot_fit()
plt.show()
with peaks.csv of
x,y
0.000000, 0.021654
0.200000, 0.385367
0.400000, 0.193304
0.600000, 0.103481
0.800000, 0.404041
1.000000, 0.212585
1.200000, 0.253212
1.400000, -0.037306
1.600000, 0.271415
1.800000, 0.025614
2.000000, 0.066419
2.200000, -0.034347
2.400000, 0.153702
2.600000, 0.161341
2.800000, -0.097676
3.000000, -0.061880
3.200000, 0.085341
3.400000, 0.083674
3.600000, 0.190944
3.800000, 0.222168
4.000000, 0.214417
4.200000, 0.341221
4.400000, 0.634501
4.600000, 0.302566
4.800000, 0.101096
5.000000, -0.106441
5.200000, 0.567396
5.400000, 0.531899
5.600000, 0.459800
5.800000, 0.646655
6.000000, 0.662228
6.200000, 0.820844
6.400000, 0.947696
6.600000, 1.541353
6.800000, 1.763981
7.000000, 1.846081
7.200000, 2.986333
7.400000, 3.182907
7.600000, 3.786487
7.800000, 4.822287
8.000000, 5.739122
8.200000, 6.744448
8.400000, 7.295213
8.600000, 8.737766
8.800000, 9.693782
9.000000, 9.894218
9.200000, 10.193956
9.400000, 10.091519
9.600000, 9.652392
9.800000, 8.670938
10.000000, 8.004205
10.200000, 6.773599
10.400000, 6.076502
10.600000, 5.127315
10.800000, 4.303762
11.000000, 3.426006
11.200000, 2.416431
11.400000, 2.311363
11.600000, 1.748020
11.800000, 1.135594
12.000000, 0.888514
12.200000, 1.030794
12.400000, 0.543024
12.600000, 0.767751
12.800000, 0.657551
13.000000, 0.495730
13.200000, 0.447520
13.400000, 0.173839
13.600000, 0.256758
13.800000, 0.596106
14.000000, 0.065328
14.200000, 0.197267
14.400000, 0.260038
14.600000, 0.460880
14.800000, 0.335248
15.000000, 0.295977
15.200000, -0.010228
15.400000, 0.138670
15.600000, 0.192113
15.800000, 0.304371
16.000000, 0.442517
16.200000, 0.164944
16.400000, 0.001907
16.600000, 0.207504
16.800000, 0.012640
17.000000, 0.090878
17.200000, -0.222967
17.400000, 0.391717
17.600000, 0.180295
17.800000, 0.206875
18.000000, 0.240595
18.200000, -0.037437
18.400000, 0.139918
18.600000, 0.012560
18.800000, -0.053009
19.000000, 0.226069
19.200000, 0.076879
19.400000, 0.078599
19.600000, 0.016125
19.800000, -0.071217
20.000000, -0.091474

Categories

Resources