Trackpy tp.batch() gives generator already executing error - python

I am trying to track some particles in a video using trackpy.
I'm following the walkthrough from the website:
http://soft-matter.github.io/trackpy/v0.4.2/tutorial/walkthrough.html
After processing a few frames (usually around 14 frames, sometimes 0), it gives me a Value error that sais: "generator already executing"
I cannot figure out how to solve this issue, I hope someone does.
Python: 3.9.4
Trackpy: 0.5.0
The full error:
ValueError Traceback (most recent call last)
<ipython-input-8-ff6dcf7a7595> in <module>
----> 1 f = tp.batch(frames[100:300], masksize, minmass=minmass, invert=True);
~\.conda\envs\trackpyenv\lib\site-packages\trackpy\feature.py in batch(frames, diameter, output, meta, processes, after_locate, **kwargs)
556 all_features = []
557 for i, features in enumerate(map_func(curried_locate, frames)):
--> 558 image = frames[i]
559 if hasattr(image, 'frame_no') and image.frame_no is not None:
560 frame_no = image.frame_no
~\.conda\envs\trackpyenv\lib\site-packages\slicerator\__init__.py in __getitem__(self, key)
234 if not (isinstance(key, slice) or
235 isinstance(key, collections.Iterable)):
--> 236 return self._get(self._map_index(key))
237 else:
238 rel_indices, new_length = key_to_indices(key, len(self))
~\.conda\envs\trackpyenv\lib\site-packages\slicerator\__init__.py in _get(self, key)
205
206 def _get(self, key):
--> 207 return self._ancestor[key]
208
209 def _map_index(self, key):
~\.conda\envs\trackpyenv\lib\site-packages\slicerator\__init__.py in __getitem__(self, i)
478 indices, new_length = key_to_indices(i, len(self))
479 if new_length is None:
--> 480 return self._get(indices)
481 else:
482 return Slicerator(self, indices, new_length, self._propagate_attrs)
~\.conda\envs\trackpyenv\lib\site-packages\slicerator\__init__.py in _get(self, key)
459 # We need to copy here: else any _proc_func that acts inplace would
460 # change the ancestor value.
--> 461 return self._proc_func(*(copy(a[key]) for a in self._ancestors))
462
463 def __repr__(self):
~\.conda\envs\trackpyenv\lib\site-packages\slicerator\__init__.py in <genexpr>(.0)
459 # We need to copy here: else any _proc_func that acts inplace would
460 # change the ancestor value.
--> 461 return self._proc_func(*(copy(a[key]) for a in self._ancestors))
462
463 def __repr__(self):
~\.conda\envs\trackpyenv\lib\site-packages\slicerator\__init__.py in __getitem__(self, i)
186 indices, new_length = key_to_indices(i, len(self))
187 if new_length is None:
--> 188 return self._get(indices)
189 else:
190 return cls(self, indices, new_length, propagate_attrs)
~\.conda\envs\trackpyenv\lib\site-packages\pims\base_frames.py in __getitem__(self, key)
96 """__getitem__ is handled by Slicerator. In all pims readers, the data
97 returning function is get_frame."""
---> 98 return self.get_frame(key)
99
100 def __iter__(self):
~\.conda\envs\trackpyenv\lib\site-packages\pims\base_frames.py in get_frame(self, i)
590 coords.update(**{k: v for k, v in zip(self.iter_axes, iter_coords)})
591
--> 592 result = self._get_frame_wrapped(**coords)
593 if hasattr(result, 'metadata'):
594 metadata = result.metadata
~\.conda\envs\trackpyenv\lib\site-packages\pims\imageio_reader.py in get_frame_2D(self, **coords)
100 def get_frame_2D(self, **coords):
101 i = coords['t'] if 't' in coords else 0
--> 102 frame = self.reader.get_data(i)
103 return Frame(frame, frame_no=i, metadata=frame.meta)
104
~\.conda\envs\trackpyenv\lib\site-packages\imageio\core\format.py in get_data(self, index, **kwargs)
344 self._BaseReaderWriter_last_index = index
345 try:
--> 346 im, meta = self._get_data(index, **kwargs)
347 except StopIteration:
348 raise IndexError(index)
~\.conda\envs\trackpyenv\lib\site-packages\imageio\plugins\ffmpeg.py in _get_data(self, index)
379 else:
380 if (index < self._pos) or (index > self._pos + 100):
--> 381 self._initialize(index)
382 else:
383 self._skip_frames(index - self._pos - 1)
~\.conda\envs\trackpyenv\lib\site-packages\imageio\plugins\ffmpeg.py in _initialize(self, index)
393 # Close the current generator, and thereby terminate its subprocess
394 if self._read_gen is not None:
--> 395 self._read_gen.close()
396
397 iargs = []
ValueError: generator already executing

I stumbled on the same (or similar) issue.
The root cause seems to be trying to use more than one process to execute the batch code, while some internal function call isn't thread-safe.
A workaround would be to disable multi-processes by calling batch with processes=1, e.g.:
f = tp.batch(frames[100:300], masksize, minmass=minmass, invert=True, processes=1);
See trackpy.batch for reference.
Calling it a workaround as this would cause the code to execute serially, one frame at a time. Then again, better than not executing at all...

Related

Attribute Error with: "import umap.umap_ as UMAP"

Using jupyter lab with numba = 0.55.1 and umap learn = 0.5.2. Does it matter that umap has "pypi" as channel and numba doesn't? both in anaconda3. I've already tried several solutions shown here.
So, with the following code:
import umap.umap_ as UMAP
I get the following errors:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\errors.py in new_error_context(fmt_, *args, **kwargs)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_block(self, block)
234 """
--> 235 Create CPython wrapper(s) around this function (or generator).
236 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_inst(self, inst)
379
--> 380 elif isinstance(inst, ir.SetItem):
381 signature = self.fndesc.calltypes[inst]
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_assign(self, ty, inst)
581
--> 582 def cast_result(res):
583 return self.context.cast(self.builder, res,
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in incref(self, typ, val)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\runtime\context.py in incref(self, builder, typ, value)
217 """
--> 218 self._call_incref_decref(builder, typ, value, "NRT_incref")
219
~\AppData\Roaming\Python\Python38\site-packages\numba\core\runtime\context.py in _call_incref_decref(self, builder, typ, value, funcname)
206 mod = builder.module
--> 207 fn = mod.get_or_insert_function(incref_decref_ty, name=funcname)
208 # XXX "nonnull" causes a crash in test_dyn_array: can this
AttributeError: 'Module' object has no attribute 'get_or_insert_function'
During handling of the above exception, another exception occurred:
LoweringError Traceback (most recent call last)
~\AppData\Local\Temp/ipykernel_12580/4057111716.py in <module>
----> 1 import umap.umap_ as UMAP
~\anaconda3\lib\site-packages\umap\umap_.py in <module>
30 import umap.distances as dist
31
---> 32 import umap.sparse as sparse
33
34 from umap.utils import (
~\anaconda3\lib\site-packages\umap\sparse.py in <module>
10 import numpy as np
11
---> 12 from umap.utils import norm
13
14 locale.setlocale(locale.LC_NUMERIC, "C")
~\anaconda3\lib\site-packages\umap\utils.py in <module>
39
40 #numba.njit("i4(i8[:])")
---> 41 def tau_rand_int(state):
42 """A fast (pseudo)-random number generator.
43
~\AppData\Roaming\Python\Python38\site-packages\numba\core\decorators.py in wrapper(func)
224
225 return wrapper
--> 226
227
228 def generated_jit(function=None, target='cpu', cache=False,
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in compile(self, sig)
977 else:
978 return dict((sig, self.overloads[sig].metadata) for sig in self.signatures)
--> 979
980 def get_function_type(self):
981 """Return unique function type of dispatcher when possible, otherwise
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in compile(self, args, return_type)
139
140 def _get_implementation(self, args, kws):
--> 141 impl = self.py_func(*args, **kws)
142 # Check the generating function and implementation signatures are
143 # compatible, otherwise compiling would fail later.
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in _compile_cached(self, args, return_type)
153 pyparam.kind != implparam.kind or
154 (implparam.default is not implparam.empty and
--> 155 implparam.default != pyparam.default)):
156 ok = False
157 if not ok:
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in _compile_core(self, args, return_type)
166 '_CompileStats', ('cache_path', 'cache_hits', 'cache_misses'))
167
--> 168
169 class _CompilingCounter(object):
170 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in compile_extra(typingctx, targetctx, func, args, return_type, flags, locals, library, pipeline_class)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in compile_extra(self, func)
426 """The default compiler
427 """
--> 428
429 def define_pipelines(self):
430 # this maintains the objmode fallback behaviour
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_bytecode(self)
490 pm.add_pass(AnnotateTypes, "annotate types")
491
--> 492 # strip phis
493 pm.add_pass(PreLowerStripPhis, "remove phis nodes")
494
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_core(self)
469 return pm
470
--> 471 #staticmethod
472 def define_nopython_lowering_pipeline(state, name='nopython_lowering'):
473 pm = PassManager(name)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_core(self)
460 pm.passes.extend(untyped_passes.passes)
461
--> 462 typed_passes = dpb.define_typed_pipeline(state)
463 pm.passes.extend(typed_passes.passes)
464
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in run(self, state)
341 def dependency_analysis(self):
342 """
--> 343 Computes dependency analysis
344 """
345 deps = dict()
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in run(self, state)
332 raise BaseException("Legacy pass in use")
333 except _EarlyPipelineCompletion as e:
--> 334 raise e
335 except Exception as e:
336 msg = "Failed in %s mode pipeline (step: %s)" % \
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_lock.py in _acquire_compile_lock(*args, **kwargs)
33 def _acquire_compile_lock(*args, **kwargs):
34 with self:
---> 35 return func(*args, **kwargs)
36 return _acquire_compile_lock
37
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in _runPass(self, index, pss, internal_state)
287 mutated |= check(pss.run_initialization, internal_state)
288 with SimpleTimer() as pass_time:
--> 289 mutated |= check(pss.run_pass, internal_state)
290 with SimpleTimer() as finalize_time:
291 mutated |= check(pss.run_finalizer, internal_state)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in check(func, compiler_state)
260
261 def check(func, compiler_state):
--> 262 mangled = func(compiler_state)
263 if mangled not in (True, False):
264 msg = ("CompilerPass implementations should return True/False. "
~\AppData\Roaming\Python\Python38\site-packages\numba\core\typed_passes.py in run_pass(self, state)
394 else:
395 if isinstance(restype,
--> 396 (types.Optional, types.Generator)):
397 pass
398 else:
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower(self)
136 self.lower_normal_function(self.fndesc)
137 else:
--> 138 self.genlower = self.GeneratorLower(self)
139 self.gentype = self.genlower.gentype
140
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_normal_function(self, fndesc)
190 entry_block_tail = self.lower_function_body()
191
--> 192 # Close tail of entry block
193 self.builder.position_at_end(entry_block_tail)
194 self.builder.branch(self.blkmap[self.firstblk])
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_function_body(self)
219
220 def lower_block(self, block):
--> 221 """
222 Lower the given block.
223 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_block(self, block)
233 def create_cpython_wrapper(self, release_gil=False):
234 """
--> 235 Create CPython wrapper(s) around this function (or generator).
236 """
237 if self.genlower:
~\anaconda3\lib\contextlib.py in __exit__(self, type, value, traceback)
129 value = type()
130 try:
--> 131 self.gen.throw(type, value, traceback)
132 except StopIteration as exc:
133 # Suppress StopIteration *unless* it's the same exception that
~\AppData\Roaming\Python\Python38\site-packages\numba\core\errors.py in new_error_context(fmt_, *args, **kwargs)
LoweringError: Failed in nopython mode pipeline (step: native lowering)
'Module' object has no attribute 'get_or_insert_function'
File "..\..\..\anaconda3\lib\site-packages\umap\utils.py", line 53:
def tau_rand_int(state):
<source elided>
"""
state[0] = (((state[0] & 4294967294) << 12) & 0xFFFFFFFF) ^ (
^
During: lowering "state = arg(0, name=state)" at C:\Users\User\anaconda3\lib\site-packages\umap\utils.py (53)

Cannot plot datetime64[ns] on hvplot axis ('pandas_datetime_types' is not defined' error)

I am simply trying to plot some values against datetime64[ns] timestamps with holoviews.
That is,
x-axis = nx1 datetime64[ns] values
y-axis = nx1 data.
Here is a screen shot of what I have:
Screenshot of my dataframe
<class 'pandas._libs.tslibs.timestamps.Timestamp'>
and my overall code:
import hvplot.pandas
import pandas as pd
##
Code ommitted at the start to extract data and create dictionary to convert to data frame
##
#create dictionary
temp_dict = dict(sampling_time=time_y_value_is_taken, y_axis_values = y_values)
df = pd.Dataframe.from_dict(temp_dict)
df.sampling_time=df.sampling_time.astype('datetime64[ns]')
df=df.set_index('sampling_time')
##The following code cannot run this line- it throws error
df.hvplot.line()
I keep getting the error code : 'pandas_datetime_types' is not defined. I have also tried importing datetime as datetime - but it does not work.
EDIT: Here is the traceback:
---------------------------------------------------------------------------
NameError Traceback (most recent call last)
~\miniconda3\envs\mpess_visual\lib\site-packages\IPython\core\formatters.py in __call__(self, obj, include, exclude)
968
969 if method is not None:
--> 970 return method(include=include, exclude=exclude)
971 return None
972 else:
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\core\dimension.py in _repr_mimebundle_(self, include, exclude)
1315 combined and returned.
1316 """
-> 1317 return Store.render(self)
1318
1319
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\core\options.py in render(cls, obj)
1403 data, metadata = {}, {}
1404 for hook in hooks:
-> 1405 ret = hook(obj)
1406 if ret is None:
1407 continue
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\ipython\display_hooks.py in pprint_display(obj)
280 if not ip.display_formatter.formatters['text/plain'].pprint:
281 return None
--> 282 return display(obj, raw_output=True)
283
284
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\ipython\display_hooks.py in display(obj, raw_output, **kwargs)
250 elif isinstance(obj, (CompositeOverlay, ViewableElement)):
251 with option_state(obj):
--> 252 output = element_display(obj)
253 elif isinstance(obj, (Layout, NdLayout, AdjointLayout)):
254 with option_state(obj):
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\ipython\display_hooks.py in wrapped(element)
144 try:
145 max_frames = OutputSettings.options['max_frames']
--> 146 mimebundle = fn(element, max_frames=max_frames)
147 if mimebundle is None:
148 return {}, {}
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\ipython\display_hooks.py in element_display(element, max_frames)
190 return None
191
--> 192 return render(element)
193
194
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\ipython\display_hooks.py in render(obj, **kwargs)
66 renderer = renderer.instance(fig='png')
67
---> 68 return renderer.components(obj, **kwargs)
69
70
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\plotting\renderer.py in components(self, obj, fmt, comm, **kwargs)
408 doc = Document()
409 with config.set(embed=embed):
--> 410 model = plot.layout._render_model(doc, comm)
411 if embed:
412 return render_model(model, comm)
~\miniconda3\envs\mpess_visual\lib\site-packages\panel\viewable.py in _render_model(self, doc, comm)
453 if comm is None:
454 comm = state._comm_manager.get_server_comm()
--> 455 model = self.get_root(doc, comm)
456
457 if config.embed:
~\miniconda3\envs\mpess_visual\lib\site-packages\panel\viewable.py in get_root(self, doc, comm, preprocess)
510 """
511 doc = init_doc(doc)
--> 512 root = self._get_model(doc, comm=comm)
513 if preprocess:
514 self._preprocess(root)
~\miniconda3\envs\mpess_visual\lib\site-packages\panel\layout\base.py in _get_model(self, doc, root, parent, comm)
120 if root is None:
121 root = model
--> 122 objects = self._get_objects(model, [], doc, root, comm)
123 props = dict(self._init_params(), objects=objects)
124 model.update(**self._process_param_change(props))
~\miniconda3\envs\mpess_visual\lib\site-packages\panel\layout\base.py in _get_objects(self, model, old_objects, doc, root, comm)
110 else:
111 try:
--> 112 child = pane._get_model(doc, root, model, comm)
113 except RerenderError:
114 return self._get_objects(model, current_objects[:i], doc, root, comm)
~\miniconda3\envs\mpess_visual\lib\site-packages\panel\pane\holoviews.py in _get_model(self, doc, root, parent, comm)
237 plot = self.object
238 else:
--> 239 plot = self._render(doc, comm, root)
240
241 plot.pane = self
~\miniconda3\envs\mpess_visual\lib\site-packages\panel\pane\holoviews.py in _render(self, doc, comm, root)
304 kwargs['comm'] = comm
305
--> 306 return renderer.get_plot(self.object, **kwargs)
307
308 def _cleanup(self, root):
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\plotting\bokeh\renderer.py in get_plot(self_or_cls, obj, doc, renderer, **kwargs)
71 combining the bokeh model with another plot.
72 """
---> 73 plot = super(BokehRenderer, self_or_cls).get_plot(obj, doc, renderer, **kwargs)
74 if plot.document is None:
75 plot.document = Document() if self_or_cls.notebook_context else curdoc()
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\plotting\renderer.py in get_plot(self_or_cls, obj, doc, renderer, comm, **kwargs)
241 init_key = tuple(v if d is None else d for v, d in
242 zip(plot.keys[0], defaults))
--> 243 plot.update(init_key)
244 else:
245 plot = obj
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\plotting\plot.py in update(self, key)
980 def update(self, key):
981 if len(self) == 1 and ((key == 0) or (key == self.keys[0])) and not self.drawn:
--> 982 return self.initialize_plot()
983 item = self.__getitem__(key)
984 self.traverse(lambda x: setattr(x, '_updated', True))
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\plotting\bokeh\element.py in initialize_plot(self, ranges, plot, plots, source)
1388 element = self.hmap.last
1389 key = util.wrap_tuple(self.hmap.last_key)
-> 1390 ranges = self.compute_ranges(self.hmap, key, ranges)
1391 self.current_ranges = ranges
1392 self.current_frame = element
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\plotting\plot.py in compute_ranges(self, obj, key, ranges)
636 if (not (axiswise and not isinstance(obj, HoloMap)) or
637 (not framewise and isinstance(obj, HoloMap))):
--> 638 self._compute_group_range(group, elements, ranges, framewise,
639 axiswise, robust, self.top_level,
640 prev_frame)
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\plotting\plot.py in _compute_group_range(cls, group, elements, ranges, framewise, axiswise, robust, top_level, prev_frame)
853 continue
854 matching &= (
--> 855 len({'date' if isinstance(v, util.datetime_types) else 'number'
856 for rng in rs for v in rng if util.isfinite(v)}) < 2
857 )
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\plotting\plot.py in <setcomp>(.0)
854 matching &= (
855 len({'date' if isinstance(v, util.datetime_types) else 'number'
--> 856 for rng in rs for v in rng if util.isfinite(v)}) < 2
857 )
858 if matching:
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\core\util.py in isfinite(val)
902 return finite
903 elif isinstance(val, datetime_types+timedelta_types):
--> 904 return not isnat(val)
905 elif isinstance(val, (basestring, bytes)):
906 return True
~\miniconda3\envs\mpess_visual\lib\site-packages\holoviews\core\util.py in isnat(val)
866 elif pd and val is pd.NaT:
867 return True
--> 868 elif pd and isinstance(val, pandas_datetime_types+pandas_timedelta_types):
869 return pd.isna(val)
870 else:
NameError: name 'pandas_datetime_types' is not defined
Any suggestions? Thank you
Although I couldn't find any official doc to support my statement, it's a compatibility issue (HoloViews 1.14.4 was released before Pandas 1.3.0).
Looking at [gitHub]: holoviz/holoviews - (v1.14.4) holoviews/holoviews/core/util.py (starting with line #83), there are some conditional imports. One of them is ABCIndexClass.
[GitHub]: pandas-dev/pandas - (v1.3.0) pandas/pandas/core/dtypes/dtypes.py on the other hand, does not provide it (as opposed from let's say its v1.2.5 counterpart) yielding (silent) exception, and the behavior you're experiencing.
Ways to go:
Upgrade HoloViews to v1.14.5 which no longer has this problem, (or at least, there's a Pandas 1.3.0 conditional as well - fixed by [GitHub]: holoviz/holoviews - Add support for pandas>=1.3)
You could also downgrade Pandas to (e.g.) v1.2.5, although this is not the way to go

How to convert 2 column dataframe to dictionary without turning keys into a list

I will preface this by saying I'm a very amateur user and though I've researched my problem extensively I have not found a solution. I assume the solution is simple, but we will see.
Simplified, I have a dataframe with column names A, B, C, D, etc., and I want to change those names to a, b, c, d, etc.. The list of column names is long so in order to achieve these I've imported a dataframe from an excel file with 2 columns (I used excel here because I want to create an easily reproducible method for the entire program I'm creating). The first column has A, B, C, D... and the second column has a, b, c, d.
I then took this dataframe, set the index to column 0, and transposed it. I then used .to_dict('list') and the resulting dictionary looks almost correct except that the values are in lists: {'A':['a'], 'B':['b']...}. So when I try to execute df.rename(columns=dictionary) I get the unhashable type list error.
I know this is because my values are stored as lists, if the dictionary looked like {'A':'a', 'B':'b'...} I'm betting it would work fine.
So basically, how do I turn my dataframe into a dictionary without lists that is formatted as such? Or is this not possible and I should approach this in a different way?
Thanks!
Here is my actual code:
INPUT
df_plate = pd.read_excel('plate.xlsx',index_col='sample')
df_plate_t = df_plate.T
dict_plate = df_plate_t.to_dict('list')
df_sorted2 = df_sorted.rename(columns=dict_plate)
df_sorted2
OUTPUT
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~\AppData\Local\Continuum\anaconda3\lib\site-packages\IPython\core\formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~\AppData\Local\Continuum\anaconda3\lib\site-packages\IPython\lib\pretty.py in pretty(self, obj)
400 if cls is not object \
401 and callable(cls.__dict__.get('__repr__')):
--> 402 return _repr_pprint(obj, self, cycle)
403
404 return _default_pprint(obj, self, cycle)
~\AppData\Local\Continuum\anaconda3\lib\site-packages\IPython\lib\pretty.py in _repr_pprint(obj, p, cycle)
695 """A pprint that just redirects to the normal repr function."""
696 # Find newlines and replace them with p.break_()
--> 697 output = repr(obj)
698 for idx,output_line in enumerate(output.splitlines()):
699 if idx:
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\core\base.py in __repr__(self)
76 Yields Bytestring in Py2, Unicode String in py3.
77 """
---> 78 return str(self)
79
80
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\core\base.py in __str__(self)
55
56 if compat.PY3:
---> 57 return self.__unicode__()
58 return self.__bytes__()
59
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\core\frame.py in __unicode__(self)
632 width = None
633 self.to_string(buf=buf, max_rows=max_rows, max_cols=max_cols,
--> 634 line_width=width, show_dimensions=show_dimensions)
635
636 return buf.getvalue()
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\core\frame.py in to_string(self, buf, columns, col_space, header, index, na_rep, formatters, float_format, sparsify, index_names, justify, max_rows, max_cols, show_dimensions, decimal, line_width)
719 decimal=decimal,
720 line_width=line_width)
--> 721 formatter.to_string()
722
723 if buf is None:
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\format.py in to_string(self)
596 else:
597
--> 598 strcols = self._to_str_columns()
599 if self.line_width is None: # no need to wrap around just print
600 # the whole frame
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\format.py in _to_str_columns(self)
527 str_columns = [[label] for label in self.header]
528 else:
--> 529 str_columns = self._get_formatted_column_labels(frame)
530
531 stringified = []
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\format.py in _get_formatted_column_labels(self, frame)
770 need_leadsp[x] else x]
771 for i, (col, x) in enumerate(zip(columns,
--> 772 fmt_columns))]
773
774 if self.show_row_idx_names:
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\format.py in <listcomp>(.0)
769 str_columns = [[' ' + x if not self._get_formatter(i) and
770 need_leadsp[x] else x]
--> 771 for i, (col, x) in enumerate(zip(columns,
772 fmt_columns))]
773
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\format.py in _get_formatter(self, i)
363 if is_integer(i) and i not in self.columns:
364 i = self.columns[i]
--> 365 return self.formatters.get(i, None)
366
367
TypeError: unhashable type: 'list'
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~\AppData\Local\Continuum\anaconda3\lib\site-packages\IPython\core\formatters.py in __call__(self, obj)
343 method = get_real_method(obj, self.print_method)
344 if method is not None:
--> 345 return method()
346 return None
347 else:
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\core\frame.py in _repr_html_(self)
672
673 return self.to_html(max_rows=max_rows, max_cols=max_cols,
--> 674 show_dimensions=show_dimensions, notebook=True)
675 else:
676 return None
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\core\frame.py in to_html(self, buf, columns, col_space, header, index, na_rep, formatters, float_format, sparsify, index_names, justify, max_rows, max_cols, show_dimensions, decimal, bold_rows, classes, escape, notebook, border, table_id, render_links)
2263 render_links=render_links)
2264 # TODO: a generic formatter wld b in DataFrameFormatter
-> 2265 formatter.to_html(classes=classes, notebook=notebook, border=border)
2266
2267 if buf is None:
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\format.py in to_html(self, classes, notebook, border)
727 from pandas.io.formats.html import HTMLFormatter, NotebookFormatter
728 Klass = NotebookFormatter if notebook else HTMLFormatter
--> 729 html = Klass(self, classes=classes, border=border).render()
730 if hasattr(self.buf, 'write'):
731 buffer_put_lines(self.buf, html)
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\html.py in render(self)
527 self.write('<div>')
528 self.write_style()
--> 529 super(NotebookFormatter, self).render()
530 self.write('</div>')
531 return self.elements
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\html.py in render(self)
144
145 def render(self):
--> 146 self._write_table()
147
148 if self.should_show_dimensions:
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\html.py in _write_table(self, indent)
180 self._write_header(indent + self.indent_delta)
181
--> 182 self._write_body(indent + self.indent_delta)
183
184 self.write('</table>', indent)
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\html.py in _write_body(self, indent)
323 def _write_body(self, indent):
324 self.write('<tbody>', indent)
--> 325 fmt_values = {i: self.fmt._format_col(i) for i in range(self.ncols)}
326
327 # write values
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\html.py in <dictcomp>(.0)
323 def _write_body(self, indent):
324 self.write('<tbody>', indent)
--> 325 fmt_values = {i: self.fmt._format_col(i) for i in range(self.ncols)}
326
327 # write values
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\format.py in _format_col(self, i)
702 def _format_col(self, i):
703 frame = self.tr_frame
--> 704 formatter = self._get_formatter(i)
705 values_to_format = frame.iloc[:, i]._formatting_values()
706 return format_array(values_to_format, formatter,
~\AppData\Local\Continuum\anaconda3\lib\site-packages\pandas\io\formats\format.py in _get_formatter(self, i)
363 if is_integer(i) and i not in self.columns:
364 i = self.columns[i]
--> 365 return self.formatters.get(i, None)
366
367
TypeError: unhashable type: 'list'
Yup it was an easy solution. If you want to do this and don't know how (probably not many of you out there...) then you want to use a series rather than a dataframe with keys=index and values=column.
dict_plate = pd.Series(df_plate['condition'].values,index=df_plate['sample']).to_dict()

"Error while extracting" from tensorflow datasets

I want to train a tensorflow image segmentation model on COCO, and thought I would leverage the dataset builder already included. Download seems to be completed but it crashes on extracting the zip files.
Running with TF 2.0.0 on a Jupyter Notebook under a conda environment. Computer is 64-bit Windows 10. The Oxford Pet III dataset used in the official image segmentation tutorial works fine.
Below is the error message (my local user name replaced with %user%).
---------------------------------------------------------------------------
OutOfRangeError Traceback (most recent call last)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in _sync_extract(self, from_path, method, to_path)
88 try:
---> 89 for path, handle in iter_archive(from_path, method):
90 path = tf.compat.as_text(path)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in iter_zip(arch_f)
176 with _open_or_pass(arch_f) as fobj:
--> 177 z = zipfile.ZipFile(fobj)
178 for member in z.infolist():
~\.conda\envs\tf-tutorial\lib\zipfile.py in __init__(self, file, mode, compression, allowZip64)
1130 if mode == 'r':
-> 1131 self._RealGetContents()
1132 elif mode in ('w', 'x'):
~\.conda\envs\tf-tutorial\lib\zipfile.py in _RealGetContents(self)
1193 try:
-> 1194 endrec = _EndRecData(fp)
1195 except OSError:
~\.conda\envs\tf-tutorial\lib\zipfile.py in _EndRecData(fpin)
263 # Determine file size
--> 264 fpin.seek(0, 2)
265 filesize = fpin.tell()
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\util\deprecation.py in new_func(*args, **kwargs)
506 instructions)
--> 507 return func(*args, **kwargs)
508
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in seek(self, offset, whence, position)
166 elif whence == 2:
--> 167 offset += self.size()
168 else:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in size(self)
101 """Returns the size of the file."""
--> 102 return stat(self.__name).length
103
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in stat(filename)
726 """
--> 727 return stat_v2(filename)
728
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in stat_v2(path)
743 file_statistics = pywrap_tensorflow.FileStatistics()
--> 744 pywrap_tensorflow.Stat(compat.as_bytes(path), file_statistics)
745 return file_statistics
OutOfRangeError: C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip; Unknown error
During handling of the above exception, another exception occurred:
ExtractError Traceback (most recent call last)
<ipython-input-27-887fa0198611> in <module>
1 cocoBuilder = tfds.builder('coco')
2 info = cocoBuilder.info
----> 3 cocoBuilder.download_and_prepare()
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\api_utils.py in disallow_positional_args_dec(fn, instance, args, kwargs)
50 _check_no_positional(fn, args, ismethod, allowed=allowed)
51 _check_required(fn, kwargs)
---> 52 return fn(*args, **kwargs)
53
54 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in download_and_prepare(self, download_dir, download_config)
285 self._download_and_prepare(
286 dl_manager=dl_manager,
--> 287 download_config=download_config)
288
289 # NOTE: If modifying the lines below to put additional information in
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in _download_and_prepare(self, dl_manager, download_config)
946 super(GeneratorBasedBuilder, self)._download_and_prepare(
947 dl_manager=dl_manager,
--> 948 max_examples_per_split=download_config.max_examples_per_split,
949 )
950
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in _download_and_prepare(self, dl_manager, **prepare_split_kwargs)
802 # Generating data for all splits
803 split_dict = splits_lib.SplitDict()
--> 804 for split_generator in self._split_generators(dl_manager):
805 if splits_lib.Split.ALL == split_generator.split_info.name:
806 raise ValueError(
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\image\coco.py in _split_generators(self, dl_manager)
237 root_url = 'http://images.cocodataset.org/'
238 extracted_paths = dl_manager.download_and_extract({
--> 239 key: root_url + url for key, url in urls.items()
240 })
241
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in download_and_extract(self, url_or_urls)
357 with self._downloader.tqdm():
358 with self._extractor.tqdm():
--> 359 return _map_promise(self._download_extract, url_or_urls)
360
361 #property
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in _map_promise(map_fn, all_inputs)
393 """Map the function into each element and resolve the promise."""
394 all_promises = utils.map_nested(map_fn, all_inputs) # Apply the function
--> 395 res = utils.map_nested(_wait_on_promise, all_promises)
396 return res
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in map_nested(function, data_struct, dict_only, map_tuple)
127 return {
128 k: map_nested(function, v, dict_only, map_tuple)
--> 129 for k, v in data_struct.items()
130 }
131 elif not dict_only:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in <dictcomp>(.0)
127 return {
128 k: map_nested(function, v, dict_only, map_tuple)
--> 129 for k, v in data_struct.items()
130 }
131 elif not dict_only:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in map_nested(function, data_struct, dict_only, map_tuple)
141 return tuple(mapped)
142 # Singleton
--> 143 return function(data_struct)
144
145
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in _wait_on_promise(p)
377
378 def _wait_on_promise(p):
--> 379 return p.get()
380
381 else:
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in get(self, timeout)
508 target = self._target()
509 self._wait(timeout or DEFAULT_TIMEOUT)
--> 510 return self._target_settled_value(_raise=True)
511
512 def _target_settled_value(self, _raise=False):
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in _target_settled_value(self, _raise)
512 def _target_settled_value(self, _raise=False):
513 # type: (bool) -> Any
--> 514 return self._target()._settled_value(_raise)
515
516 _value = _reason = _target_settled_value
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in _settled_value(self, _raise)
222 if _raise:
223 raise_val = self._fulfillment_handler0
--> 224 reraise(type(raise_val), raise_val, self._traceback)
225 return self._fulfillment_handler0
226
~\.conda\envs\tf-tutorial\lib\site-packages\six.py in reraise(tp, value, tb)
694 if value.__traceback__ is not tb:
695 raise value.with_traceback(tb)
--> 696 raise value
697 finally:
698 value = None
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in handle_future_result(future)
840 # type: (Any) -> None
841 try:
--> 842 resolve(future.result())
843 except Exception as e:
844 tb = exc_info()[2]
~\.conda\envs\tf-tutorial\lib\concurrent\futures\_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
~\.conda\envs\tf-tutorial\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~\.conda\envs\tf-tutorial\lib\concurrent\futures\thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in _sync_extract(self, from_path, method, to_path)
92 except BaseException as err:
93 msg = 'Error while extracting %s to %s : %s' % (from_path, to_path, err)
---> 94 raise ExtractError(msg)
95 # `tf.io.gfile.Rename(overwrite=True)` doesn't work for non empty
96 # directories, so delete destination first, if it already exists.
ExtractError: Error while extracting C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip to C:\Users\%user%\tensorflow_datasets\downloads\extracted\ZIP.images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip : C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip; Unknown error
The message seems cryptic to me. The folder to which it is trying to extract does not exist when the notebook is started - it is created by Tensorflow, and only at that command line. I obviously tried deleting it completely and running it again, to no effect.
The code that leads to the error is (everything runs fine until the last line):
import tensorflow as tf
from __future__ import absolute_import, division, print_function, unicode_literals
from tensorflow_examples.models.pix2pix import pix2pix
import tensorflow_datasets as tfds
from IPython.display import clear_output
import matplotlib.pyplot as plt
dataset, info = tfds.load('coco', with_info=True)
Also tried breaking down the last command into assigning the tdfs.builder object and then running download_and_extract, and again got the same error.
There is enough space in disk - after download, still 50+GB available, while the dataset is supposed to be 37GB in its largest version (2014).
I have a similar problem with Windows 10 & COCO 2017. My solution is simple. Extract the ZIP file manually according to the folder path in the error message.

Folium Choropleth + GeoJSON raises AttributeError: 'NoneType' object has no attribute 'get'

Found someone met same problem in another post (Folium Choropleth + GeoJSON raises AttributeError: 'NoneType'). Followed the suggestions there but my problem still exists.
I have double checked that:
a) access to parameters is correct
b) all the available keys in the geoJSON are contained in the Pandas DataFrame for Choropleth.
Here's related code block:
import json
# load geojson file into dic
with open('chicago.geojson') as f:
data = json.load(f)
f.close()
found=0
notfound=0
# check if the features.properties.community data matches the dataframe data
for feature in data['features']:
if feature['properties']['community'] in df_cname['COMMUNITY_AREA_NAME'].tolist():
found=found+1
else:
notfound=notfound+1
# print out the check result (and the output shows that all features.properties.community in geojson are found in the dataframe to be used for choropleth drawing.)
print(found,' records found. ', notfound, ' records not found.')
chicago_geo='chicago.geojson'
latitude = 41.88425
longitude = -87.63245
map_chicago = folium.Map(location=[latitude, longitude], zoom_start=11)
map_chicago.choropleth(
geo_data=chicago_geo,
data=df_cname,
columns=['COMMUNITY_AREA_NAME', 'COUNT'],
key_on='features.properties.community',
fill_color='YlOrRd',
fill_opacity=0.7,
line_opacity=0.2,
legend_name='Crime Data in Chicago'
)
map_chicago
and the output / error message:
77 records found. 0 records not found.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/IPython/core/formatters.py in __call__(self, obj)
334 method = get_real_method(obj, self.print_method)
335 if method is not None:
--> 336 return method()
337 return None
338 else:
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/map.py in _repr_html_(self, **kwargs)
249 self._parent = None
250 else:
--> 251 out = self._parent._repr_html_(**kwargs)
252 return out
253
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/branca/element.py in _repr_html_(self, **kwargs)
326
327 """
--> 328 html = self.render(**kwargs)
329 html = "data:text/html;charset=utf-8;base64," + base64.b64encode(html.encode('utf8')).decode('utf8') # noqa
330
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/branca/element.py in render(self, **kwargs)
319 """Renders the HTML representation of the element."""
320 for name, child in self._children.items():
--> 321 child.render(**kwargs)
322 return self._template.render(this=self, kwargs=kwargs)
323
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/map.py in render(self, **kwargs)
336 '</style>'), name='map_style')
337
--> 338 super(LegacyMap, self).render(**kwargs)
339
340
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/branca/element.py in render(self, **kwargs)
631
632 for name, element in self._children.items():
--> 633 element.render(**kwargs)
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/branca/element.py in render(self, **kwargs)
627 script = self._template.module.__dict__.get('script', None)
628 if script is not None:
--> 629 figure.script.add_child(Element(script(self, kwargs)),
630 name=self.get_name())
631
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/jinja2/runtime.py in __call__(self, *args, **kwargs)
547 (self.name, len(self.arguments)))
548
--> 549 return self._invoke(arguments, autoescape)
550
551 def _invoke(self, arguments, autoescape):
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/jinja2/runtime.py in _invoke(self, arguments, autoescape)
551 def _invoke(self, arguments, autoescape):
552 """This method is being swapped out by the async implementation."""
--> 553 rv = self._func(*arguments)
554 if autoescape:
555 rv = Markup(rv)
<template> in macro(l_1_this, l_1_kwargs)
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/jinja2/runtime.py in call(_Context__self, _Context__obj, *args, **kwargs)
258 args = (__self.environment,) + args
259 try:
--> 260 return __obj(*args, **kwargs)
261 except StopIteration:
262 return __self.environment.undefined('value was undefined because '
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/features.py in style_data(self)
563
564 for feature in self.data['features']:
--> 565 feature.setdefault('properties', {}).setdefault('style', {}).update(self.style_function(feature)) # noqa
566 feature.setdefault('properties', {}).setdefault('highlight', {}).update(self.highlight_function(feature)) # noqa
567 return json.dumps(self.data, sort_keys=True)
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in style_function(x)
303 'color': line_color,
304 'fillOpacity': fill_opacity,
--> 305 'fillColor': color_scale_fun(x)
306 }
307
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in color_scale_fun(x)
290 def color_scale_fun(x):
291 return color_range[len(
--> 292 [u for u in color_domain if
293 get_by_key(x, key_on) in color_data and
294 u <= color_data[get_by_key(x, key_on)]])]
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in <listcomp>(.0)
291 return color_range[len(
292 [u for u in color_domain if
--> 293 get_by_key(x, key_on) in color_data and
294 u <= color_data[get_by_key(x, key_on)]])]
295 else:
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in get_by_key(obj, key)
286 return (obj.get(key, None) if len(key.split('.')) <= 1 else
287 get_by_key(obj.get(key.split('.')[0], None),
--> 288 '.'.join(key.split('.')[1:])))
289
290 def color_scale_fun(x):
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in get_by_key(obj, key)
285 def get_by_key(obj, key):
286 return (obj.get(key, None) if len(key.split('.')) <= 1 else
--> 287 get_by_key(obj.get(key.split('.')[0], None),
288 '.'.join(key.split('.')[1:])))
289
AttributeError: 'NoneType' object has no attribute 'get'
Your suggestions are welcome!

Categories

Resources