Using jupyter lab with numba = 0.55.1 and umap learn = 0.5.2. Does it matter that umap has "pypi" as channel and numba doesn't? both in anaconda3. I've already tried several solutions shown here.
So, with the following code:
import umap.umap_ as UMAP
I get the following errors:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\errors.py in new_error_context(fmt_, *args, **kwargs)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_block(self, block)
234 """
--> 235 Create CPython wrapper(s) around this function (or generator).
236 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_inst(self, inst)
379
--> 380 elif isinstance(inst, ir.SetItem):
381 signature = self.fndesc.calltypes[inst]
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_assign(self, ty, inst)
581
--> 582 def cast_result(res):
583 return self.context.cast(self.builder, res,
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in incref(self, typ, val)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\runtime\context.py in incref(self, builder, typ, value)
217 """
--> 218 self._call_incref_decref(builder, typ, value, "NRT_incref")
219
~\AppData\Roaming\Python\Python38\site-packages\numba\core\runtime\context.py in _call_incref_decref(self, builder, typ, value, funcname)
206 mod = builder.module
--> 207 fn = mod.get_or_insert_function(incref_decref_ty, name=funcname)
208 # XXX "nonnull" causes a crash in test_dyn_array: can this
AttributeError: 'Module' object has no attribute 'get_or_insert_function'
During handling of the above exception, another exception occurred:
LoweringError Traceback (most recent call last)
~\AppData\Local\Temp/ipykernel_12580/4057111716.py in <module>
----> 1 import umap.umap_ as UMAP
~\anaconda3\lib\site-packages\umap\umap_.py in <module>
30 import umap.distances as dist
31
---> 32 import umap.sparse as sparse
33
34 from umap.utils import (
~\anaconda3\lib\site-packages\umap\sparse.py in <module>
10 import numpy as np
11
---> 12 from umap.utils import norm
13
14 locale.setlocale(locale.LC_NUMERIC, "C")
~\anaconda3\lib\site-packages\umap\utils.py in <module>
39
40 #numba.njit("i4(i8[:])")
---> 41 def tau_rand_int(state):
42 """A fast (pseudo)-random number generator.
43
~\AppData\Roaming\Python\Python38\site-packages\numba\core\decorators.py in wrapper(func)
224
225 return wrapper
--> 226
227
228 def generated_jit(function=None, target='cpu', cache=False,
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in compile(self, sig)
977 else:
978 return dict((sig, self.overloads[sig].metadata) for sig in self.signatures)
--> 979
980 def get_function_type(self):
981 """Return unique function type of dispatcher when possible, otherwise
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in compile(self, args, return_type)
139
140 def _get_implementation(self, args, kws):
--> 141 impl = self.py_func(*args, **kws)
142 # Check the generating function and implementation signatures are
143 # compatible, otherwise compiling would fail later.
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in _compile_cached(self, args, return_type)
153 pyparam.kind != implparam.kind or
154 (implparam.default is not implparam.empty and
--> 155 implparam.default != pyparam.default)):
156 ok = False
157 if not ok:
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in _compile_core(self, args, return_type)
166 '_CompileStats', ('cache_path', 'cache_hits', 'cache_misses'))
167
--> 168
169 class _CompilingCounter(object):
170 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in compile_extra(typingctx, targetctx, func, args, return_type, flags, locals, library, pipeline_class)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in compile_extra(self, func)
426 """The default compiler
427 """
--> 428
429 def define_pipelines(self):
430 # this maintains the objmode fallback behaviour
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_bytecode(self)
490 pm.add_pass(AnnotateTypes, "annotate types")
491
--> 492 # strip phis
493 pm.add_pass(PreLowerStripPhis, "remove phis nodes")
494
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_core(self)
469 return pm
470
--> 471 #staticmethod
472 def define_nopython_lowering_pipeline(state, name='nopython_lowering'):
473 pm = PassManager(name)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_core(self)
460 pm.passes.extend(untyped_passes.passes)
461
--> 462 typed_passes = dpb.define_typed_pipeline(state)
463 pm.passes.extend(typed_passes.passes)
464
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in run(self, state)
341 def dependency_analysis(self):
342 """
--> 343 Computes dependency analysis
344 """
345 deps = dict()
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in run(self, state)
332 raise BaseException("Legacy pass in use")
333 except _EarlyPipelineCompletion as e:
--> 334 raise e
335 except Exception as e:
336 msg = "Failed in %s mode pipeline (step: %s)" % \
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_lock.py in _acquire_compile_lock(*args, **kwargs)
33 def _acquire_compile_lock(*args, **kwargs):
34 with self:
---> 35 return func(*args, **kwargs)
36 return _acquire_compile_lock
37
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in _runPass(self, index, pss, internal_state)
287 mutated |= check(pss.run_initialization, internal_state)
288 with SimpleTimer() as pass_time:
--> 289 mutated |= check(pss.run_pass, internal_state)
290 with SimpleTimer() as finalize_time:
291 mutated |= check(pss.run_finalizer, internal_state)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in check(func, compiler_state)
260
261 def check(func, compiler_state):
--> 262 mangled = func(compiler_state)
263 if mangled not in (True, False):
264 msg = ("CompilerPass implementations should return True/False. "
~\AppData\Roaming\Python\Python38\site-packages\numba\core\typed_passes.py in run_pass(self, state)
394 else:
395 if isinstance(restype,
--> 396 (types.Optional, types.Generator)):
397 pass
398 else:
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower(self)
136 self.lower_normal_function(self.fndesc)
137 else:
--> 138 self.genlower = self.GeneratorLower(self)
139 self.gentype = self.genlower.gentype
140
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_normal_function(self, fndesc)
190 entry_block_tail = self.lower_function_body()
191
--> 192 # Close tail of entry block
193 self.builder.position_at_end(entry_block_tail)
194 self.builder.branch(self.blkmap[self.firstblk])
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_function_body(self)
219
220 def lower_block(self, block):
--> 221 """
222 Lower the given block.
223 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_block(self, block)
233 def create_cpython_wrapper(self, release_gil=False):
234 """
--> 235 Create CPython wrapper(s) around this function (or generator).
236 """
237 if self.genlower:
~\anaconda3\lib\contextlib.py in __exit__(self, type, value, traceback)
129 value = type()
130 try:
--> 131 self.gen.throw(type, value, traceback)
132 except StopIteration as exc:
133 # Suppress StopIteration *unless* it's the same exception that
~\AppData\Roaming\Python\Python38\site-packages\numba\core\errors.py in new_error_context(fmt_, *args, **kwargs)
LoweringError: Failed in nopython mode pipeline (step: native lowering)
'Module' object has no attribute 'get_or_insert_function'
File "..\..\..\anaconda3\lib\site-packages\umap\utils.py", line 53:
def tau_rand_int(state):
<source elided>
"""
state[0] = (((state[0] & 4294967294) << 12) & 0xFFFFFFFF) ^ (
^
During: lowering "state = arg(0, name=state)" at C:\Users\User\anaconda3\lib\site-packages\umap\utils.py (53)
Related
The following is the code I try to run. It used to work but I made changes to some installations (dont remember what unfortunately - scipy or scikit? my kmeans function also stopped working)
from umap import UMAP
umap_2d_lv=UMAP(n_components=2,random_state=0).fit(lv_data,y=cluster_num)
proj_2d_lv=umap_2d_lv.embedding_
this is how I tried to fix the error, from suggestions online:
pip install umap-learn>=0.5.1 & pip install numba==0.53.0
also tried this:
pip install umap-learn
and then
import umap.umap_ as UMAP
this is the the error that comes out:
AttributeError Traceback (most recent call last)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\errors.py in new_error_context(fmt_, *args, **kwargs)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_block(self, block)
234 """
--> 235 Create CPython wrapper(s) around this function (or generator).
236 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_inst(self, inst)
379
--> 380 elif isinstance(inst, ir.SetItem):
381 signature = self.fndesc.calltypes[inst]
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_assign(self, ty, inst)
581
--> 582 def cast_result(res):
583 return self.context.cast(self.builder, res,
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in incref(self, typ, val)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\runtime\context.py in incref(self, builder, typ, value)
217 """
--> 218 self._call_incref_decref(builder, typ, value, "NRT_incref")
219
~\AppData\Roaming\Python\Python38\site-packages\numba\core\runtime\context.py in _call_incref_decref(self, builder, typ, value, funcname)
206 mod = builder.module
--> 207 fn = mod.get_or_insert_function(incref_decref_ty, name=funcname)
208 # XXX "nonnull" causes a crash in test_dyn_array: can this
AttributeError: 'Module' object has no attribute 'get_or_insert_function'
During handling of the above exception, another exception occurred:
LoweringError Traceback (most recent call last)
<timed exec> in <module>
~\anaconda3\lib\site-packages\umap\__init__.py in <module>
1 from warnings import warn, catch_warnings, simplefilter
----> 2 from .umap_ import UMAP
3
4 try:
5 with catch_warnings():
~\anaconda3\lib\site-packages\umap\umap_.py in <module>
30 import umap.distances as dist
31
---> 32 import umap.sparse as sparse
33
34 from umap.utils import (
~\anaconda3\lib\site-packages\umap\sparse.py in <module>
10 import numpy as np
11
---> 12 from umap.utils import norm
13
14 locale.setlocale(locale.LC_NUMERIC, "C")
~\anaconda3\lib\site-packages\umap\utils.py in <module>
39
40 #numba.njit("i4(i8[:])")
---> 41 def tau_rand_int(state):
42 """A fast (pseudo)-random number generator.
43
~\AppData\Roaming\Python\Python38\site-packages\numba\core\decorators.py in wrapper(func)
224
225 return wrapper
--> 226
227
228 def generated_jit(function=None, target='cpu', cache=False,
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in compile(self, sig)
977 else:
978 return dict((sig, self.overloads[sig].metadata) for sig in self.signatures)
--> 979
980 def get_function_type(self):
981 """Return unique function type of dispatcher when possible, otherwise
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in compile(self, args, return_type)
139
140 def _get_implementation(self, args, kws):
--> 141 impl = self.py_func(*args, **kws)
142 # Check the generating function and implementation signatures are
143 # compatible, otherwise compiling would fail later.
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in _compile_cached(self, args, return_type)
153 pyparam.kind != implparam.kind or
154 (implparam.default is not implparam.empty and
--> 155 implparam.default != pyparam.default)):
156 ok = False
157 if not ok:
~\AppData\Roaming\Python\Python38\site-packages\numba\core\dispatcher.py in _compile_core(self, args, return_type)
166 '_CompileStats', ('cache_path', 'cache_hits', 'cache_misses'))
167
--> 168
169 class _CompilingCounter(object):
170 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in compile_extra(typingctx, targetctx, func, args, return_type, flags, locals, library, pipeline_class)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in compile_extra(self, func)
426 """The default compiler
427 """
--> 428
429 def define_pipelines(self):
430 # this maintains the objmode fallback behaviour
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_bytecode(self)
490 pm.add_pass(AnnotateTypes, "annotate types")
491
--> 492 # strip phis
493 pm.add_pass(PreLowerStripPhis, "remove phis nodes")
494
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_core(self)
469 return pm
470
--> 471 #staticmethod
472 def define_nopython_lowering_pipeline(state, name='nopython_lowering'):
473 pm = PassManager(name)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler.py in _compile_core(self)
460 pm.passes.extend(untyped_passes.passes)
461
--> 462 typed_passes = dpb.define_typed_pipeline(state)
463 pm.passes.extend(typed_passes.passes)
464
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in run(self, state)
341 def dependency_analysis(self):
342 """
--> 343 Computes dependency analysis
344 """
345 deps = dict()
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in run(self, state)
332 raise BaseException("Legacy pass in use")
333 except _EarlyPipelineCompletion as e:
--> 334 raise e
335 except Exception as e:
336 msg = "Failed in %s mode pipeline (step: %s)" % \
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_lock.py in _acquire_compile_lock(*args, **kwargs)
33 def _acquire_compile_lock(*args, **kwargs):
34 with self:
---> 35 return func(*args, **kwargs)
36 return _acquire_compile_lock
37
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in _runPass(self, index, pss, internal_state)
287 mutated |= check(pss.run_initialization, internal_state)
288 with SimpleTimer() as pass_time:
--> 289 mutated |= check(pss.run_pass, internal_state)
290 with SimpleTimer() as finalize_time:
291 mutated |= check(pss.run_finalizer, internal_state)
~\AppData\Roaming\Python\Python38\site-packages\numba\core\compiler_machinery.py in check(func, compiler_state)
260
261 def check(func, compiler_state):
--> 262 mangled = func(compiler_state)
263 if mangled not in (True, False):
264 msg = ("CompilerPass implementations should return True/False. "
~\AppData\Roaming\Python\Python38\site-packages\numba\core\typed_passes.py in run_pass(self, state)
394 else:
395 if isinstance(restype,
--> 396 (types.Optional, types.Generator)):
397 pass
398 else:
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower(self)
136 self.lower_normal_function(self.fndesc)
137 else:
--> 138 self.genlower = self.GeneratorLower(self)
139 self.gentype = self.genlower.gentype
140
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_normal_function(self, fndesc)
190 entry_block_tail = self.lower_function_body()
191
--> 192 # Close tail of entry block
193 self.builder.position_at_end(entry_block_tail)
194 self.builder.branch(self.blkmap[self.firstblk])
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_function_body(self)
219
220 def lower_block(self, block):
--> 221 """
222 Lower the given block.
223 """
~\AppData\Roaming\Python\Python38\site-packages\numba\core\lowering.py in lower_block(self, block)
233 def create_cpython_wrapper(self, release_gil=False):
234 """
--> 235 Create CPython wrapper(s) around this function (or generator).
236 """
237 if self.genlower:
~\anaconda3\lib\contextlib.py in __exit__(self, type, value, traceback)
129 value = type()
130 try:
--> 131 self.gen.throw(type, value, traceback)
132 except StopIteration as exc:
133 # Suppress StopIteration *unless* it's the same exception that
~\AppData\Roaming\Python\Python38\site-packages\numba\core\errors.py in new_error_context(fmt_, *args, **kwargs)
LoweringError: Failed in nopython mode pipeline (step: native lowering)
'Module' object has no attribute 'get_or_insert_function'
File "..\..\..\anaconda3\lib\site-packages\umap\utils.py", line 53:
def tau_rand_int(state):
<source elided>
"""
state[0] = (((state[0] & 4294967294) << 12) & 0xFFFFFFFF) ^ (
^
During: lowering "state = arg(0, name=state)" at C:\Users\User\anaconda3\lib\site-packages\umap\utils.py (53)
I am trying to create B-splines with the patsy package on a ipynb notebook on JupyterLab:
from patsy import dmatrix
bs = dmatrix("bs(x, df=50, degree=1) - 1", {"x": x})
axes[0].plot(x, bs)
axes[0].set_title("Basis functions")
plt.show()
This works fine the first time I run it. But when I try to rerun this cell again, it fails with the following error:
-----------------------------------------------------
TypeError Traceback (most recent call last)
/opt/conda/lib/python3.8/site-packages/patsy/compat.py in call_and_wrap_exc(msg, origin, f, *args, **kwargs)
35 try:
---> 36 return f(*args, **kwargs)
37 except Exception as e:
/opt/conda/lib/python3.8/site-packages/patsy/eval.py in eval(self, expr, source_name, inner_namespace)
164 code = compile(expr, source_name, "eval", self.flags, False)
--> 165 return eval(code, {}, VarLookupDict([inner_namespace]
166 + self._namespaces))
<string> in <module>
TypeError: 'DesignMatrix' object is not callable
The above exception was the direct cause of the following exception:
PatsyError Traceback (most recent call last)
<ipython-input-6-6ed4ba95a384> in <module>
2
3 _, axes = plt.subplots(2, figsize=(16, 16))
----> 4 bs = dmatrix("bs(x, df=50, degree=1) - 1", {"x": x})
5 axes[0].plot(x, bs)
6 axes[0].set_title("Basis functions")
/opt/conda/lib/python3.8/site-packages/patsy/highlevel.py in dmatrix(formula_like, data, eval_env, NA_action, return_type)
288 """
289 eval_env = EvalEnvironment.capture(eval_env, reference=1)
--> 290 (lhs, rhs) = _do_highlevel_design(formula_like, data, eval_env,
291 NA_action, return_type)
292 if lhs.shape[1] != 0:
/opt/conda/lib/python3.8/site-packages/patsy/highlevel.py in _do_highlevel_design(formula_like, data, eval_env, NA_action, return_type)
162 def data_iter_maker():
163 return iter([data])
--> 164 design_infos = _try_incr_builders(formula_like, data_iter_maker, eval_env,
165 NA_action)
166 if design_infos is not None:
/opt/conda/lib/python3.8/site-packages/patsy/highlevel.py in _try_incr_builders(formula_like, data_iter_maker, eval_env, NA_action)
64 if isinstance(formula_like, ModelDesc):
65 assert isinstance(eval_env, EvalEnvironment)
---> 66 return design_matrix_builders([formula_like.lhs_termlist,
67 formula_like.rhs_termlist],
68 data_iter_maker,
/opt/conda/lib/python3.8/site-packages/patsy/build.py in design_matrix_builders(termlists, data_iter_maker, eval_env, NA_action)
691 # on some data to find out what type of data they return.
692 (num_column_counts,
--> 693 cat_levels_contrasts) = _examine_factor_types(all_factors,
694 factor_states,
695 data_iter_maker,
/opt/conda/lib/python3.8/site-packages/patsy/build.py in _examine_factor_types(factors, factor_states, data_iter_maker, NA_action)
441 for data in data_iter_maker():
442 for factor in list(examine_needed):
--> 443 value = factor.eval(factor_states[factor], data)
444 if factor in cat_sniffers or guess_categorical(value):
445 if factor not in cat_sniffers:
/opt/conda/lib/python3.8/site-packages/patsy/eval.py in eval(self, memorize_state, data)
562
563 def eval(self, memorize_state, data):
--> 564 return self._eval(memorize_state["eval_code"],
565 memorize_state,
566 data)
/opt/conda/lib/python3.8/site-packages/patsy/eval.py in _eval(self, code, memorize_state, data)
545 def _eval(self, code, memorize_state, data):
546 inner_namespace = VarLookupDict([data, memorize_state["transforms"]])
--> 547 return call_and_wrap_exc("Error evaluating factor",
548 self,
549 memorize_state["eval_env"].eval,
/opt/conda/lib/python3.8/site-packages/patsy/compat.py in call_and_wrap_exc(msg, origin, f, *args, **kwargs)
41 origin)
42 # Use 'exec' to hide this syntax from the Python 2 parser:
---> 43 exec("raise new_exc from e")
44 else:
45 # In python 2, we just let the original exception escape -- better
/opt/conda/lib/python3.8/site-packages/patsy/compat.py in <module>
PatsyError: Error evaluating factor: TypeError: 'DesignMatrix' object is not callable
bs(x, df=50, degree=1) - 1
^^^^^^^^^^^^^^^^^^^^^^
Ends up it was because of myself overriding the variable bs hence overriding the bs function inside the patsy string.
This is why eval is an antipattern as usual...
I follow the pandas_profiling document script, but this problem always arises.
My dataset is the boston from sklearn.
I have the report, but without the features of an html version:
profile2 = ProfileReport(data, title="Relatório DATASET -data-", html={'style': {'full_width': True}}, sort="None")
The image below refers to this code:
from pandas_profiling import ProfileReport
profile = ProfileReport(data, title='Pandas Profiling Report', explorative=True)
[![enter image description here][1]][1]
My version of pandas_profiling
[![enter image description here][2]][2]
I don't have the problem related above if I use this code:
profile = ProfileReport (data)
UPDATE:
Uninstalled the previous version and got the new one (2.9.0), but this problems happens:
Summarize dataset: 75%
21/28 [00:07<00:02, 2.84it/s, Get scatter matrix]
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\formatters.py in __call__(self, obj)
343 method = get_real_method(obj, self.print_method)
344 if method is not None:
--> 345 return method()
346 return None
347 else:
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\profile_report.py in _repr_html_(self)
407 def _repr_html_(self):
408 """The ipython notebook widgets user interface gets called by the jupyter notebook."""
--> 409 self.to_notebook_iframe()
410
411 def __repr__(self):
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\profile_report.py in to_notebook_iframe(self)
387 with warnings.catch_warnings():
388 warnings.simplefilter("ignore")
--> 389 display(get_notebook_iframe(self))
390
391 def to_widgets(self):
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\report\presentation\flavours\widget\notebook.py in get_notebook_iframe(profile)
63 output = get_notebook_iframe_src(profile)
64 elif attribute == "srcdoc":
---> 65 output = get_notebook_iframe_srcdoc(profile)
66 else:
67 raise ValueError(
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\report\presentation\flavours\widget\notebook.py in get_notebook_iframe_srcdoc(profile)
21 width = config["notebook"]["iframe"]["width"].get(str)
22 height = config["notebook"]["iframe"]["height"].get(str)
---> 23 src = html.escape(profile.to_html())
24
25 iframe = f'<iframe width="{width}" height="{height}" srcdoc="{src}" frameborder="0" allowfullscreen></iframe>'
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\profile_report.py in to_html(self)
357
358 """
--> 359 return self.html
360
361 def to_json(self) -> str:
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\profile_report.py in html(self)
177 def html(self):
178 if self._html is None:
--> 179 self._html = self._render_html()
180 return self._html
181
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\profile_report.py in _render_html(self)
284 from pandas_profiling.report.presentation.flavours import HTMLReport
285
--> 286 report = self.report
287
288 disable_progress_bar = not config["progress_bar"].get(bool)
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\profile_report.py in report(self)
171 def report(self):
172 if self._report is None:
--> 173 self._report = get_report_structure(self.description_set)
174 return self._report
175
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\profile_report.py in description_set(self)
152 def description_set(self):
153 if self._description_set is None:
--> 154 self._description_set = describe_df(self.title, self.df, self._sample)
155 return self._description_set
156
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\model\describe.py in describe(title, df, sample)
100 # Scatter matrix
101 pbar.set_postfix_str("Get scatter matrix")
--> 102 scatter_matrix = get_scatter_matrix(df, variables)
103 pbar.update()
104
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\model\summary.py in get_scatter_matrix(df, variables)
696 for y in continuous_variables:
697 if x in continuous_variables:
--> 698 scatter_matrix[x][y] = scatter_pairwise(df[x], df[y], x, y)
699 else:
700 scatter_matrix = {}
C:\ProgramData\Anaconda3\lib\contextlib.py in inner(*args, **kwds)
71 #wraps(func)
72 def inner(*args, **kwds):
---> 73 with self._recreate_cm():
74 return func(*args, **kwds)
75 return inner
C:\ProgramData\Anaconda3\lib\contextlib.py in __enter__(self)
110 del self.args, self.kwds, self.func
111 try:
--> 112 return next(self.gen)
113 except StopIteration:
114 raise RuntimeError("generator didn't yield") from None
C:\ProgramData\Anaconda3\lib\site-packages\pandas_profiling\visualisation\context.py in manage_matplotlib_context()
77 register_matplotlib_converters()
78 matplotlib.rcParams.update(customRcParams)
---> 79 sns.set_style(style="white")
80 yield
81 finally:
AttributeError: module 'seaborn' has no attribute 'set_style'
The solution was unistall / reinstall the Anaconda and the pandas-profiling. Probably some version issue as suggested by Paul H on the comments.
I want to train a tensorflow image segmentation model on COCO, and thought I would leverage the dataset builder already included. Download seems to be completed but it crashes on extracting the zip files.
Running with TF 2.0.0 on a Jupyter Notebook under a conda environment. Computer is 64-bit Windows 10. The Oxford Pet III dataset used in the official image segmentation tutorial works fine.
Below is the error message (my local user name replaced with %user%).
---------------------------------------------------------------------------
OutOfRangeError Traceback (most recent call last)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in _sync_extract(self, from_path, method, to_path)
88 try:
---> 89 for path, handle in iter_archive(from_path, method):
90 path = tf.compat.as_text(path)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in iter_zip(arch_f)
176 with _open_or_pass(arch_f) as fobj:
--> 177 z = zipfile.ZipFile(fobj)
178 for member in z.infolist():
~\.conda\envs\tf-tutorial\lib\zipfile.py in __init__(self, file, mode, compression, allowZip64)
1130 if mode == 'r':
-> 1131 self._RealGetContents()
1132 elif mode in ('w', 'x'):
~\.conda\envs\tf-tutorial\lib\zipfile.py in _RealGetContents(self)
1193 try:
-> 1194 endrec = _EndRecData(fp)
1195 except OSError:
~\.conda\envs\tf-tutorial\lib\zipfile.py in _EndRecData(fpin)
263 # Determine file size
--> 264 fpin.seek(0, 2)
265 filesize = fpin.tell()
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\util\deprecation.py in new_func(*args, **kwargs)
506 instructions)
--> 507 return func(*args, **kwargs)
508
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in seek(self, offset, whence, position)
166 elif whence == 2:
--> 167 offset += self.size()
168 else:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in size(self)
101 """Returns the size of the file."""
--> 102 return stat(self.__name).length
103
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in stat(filename)
726 """
--> 727 return stat_v2(filename)
728
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in stat_v2(path)
743 file_statistics = pywrap_tensorflow.FileStatistics()
--> 744 pywrap_tensorflow.Stat(compat.as_bytes(path), file_statistics)
745 return file_statistics
OutOfRangeError: C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip; Unknown error
During handling of the above exception, another exception occurred:
ExtractError Traceback (most recent call last)
<ipython-input-27-887fa0198611> in <module>
1 cocoBuilder = tfds.builder('coco')
2 info = cocoBuilder.info
----> 3 cocoBuilder.download_and_prepare()
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\api_utils.py in disallow_positional_args_dec(fn, instance, args, kwargs)
50 _check_no_positional(fn, args, ismethod, allowed=allowed)
51 _check_required(fn, kwargs)
---> 52 return fn(*args, **kwargs)
53
54 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in download_and_prepare(self, download_dir, download_config)
285 self._download_and_prepare(
286 dl_manager=dl_manager,
--> 287 download_config=download_config)
288
289 # NOTE: If modifying the lines below to put additional information in
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in _download_and_prepare(self, dl_manager, download_config)
946 super(GeneratorBasedBuilder, self)._download_and_prepare(
947 dl_manager=dl_manager,
--> 948 max_examples_per_split=download_config.max_examples_per_split,
949 )
950
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in _download_and_prepare(self, dl_manager, **prepare_split_kwargs)
802 # Generating data for all splits
803 split_dict = splits_lib.SplitDict()
--> 804 for split_generator in self._split_generators(dl_manager):
805 if splits_lib.Split.ALL == split_generator.split_info.name:
806 raise ValueError(
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\image\coco.py in _split_generators(self, dl_manager)
237 root_url = 'http://images.cocodataset.org/'
238 extracted_paths = dl_manager.download_and_extract({
--> 239 key: root_url + url for key, url in urls.items()
240 })
241
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in download_and_extract(self, url_or_urls)
357 with self._downloader.tqdm():
358 with self._extractor.tqdm():
--> 359 return _map_promise(self._download_extract, url_or_urls)
360
361 #property
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in _map_promise(map_fn, all_inputs)
393 """Map the function into each element and resolve the promise."""
394 all_promises = utils.map_nested(map_fn, all_inputs) # Apply the function
--> 395 res = utils.map_nested(_wait_on_promise, all_promises)
396 return res
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in map_nested(function, data_struct, dict_only, map_tuple)
127 return {
128 k: map_nested(function, v, dict_only, map_tuple)
--> 129 for k, v in data_struct.items()
130 }
131 elif not dict_only:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in <dictcomp>(.0)
127 return {
128 k: map_nested(function, v, dict_only, map_tuple)
--> 129 for k, v in data_struct.items()
130 }
131 elif not dict_only:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in map_nested(function, data_struct, dict_only, map_tuple)
141 return tuple(mapped)
142 # Singleton
--> 143 return function(data_struct)
144
145
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in _wait_on_promise(p)
377
378 def _wait_on_promise(p):
--> 379 return p.get()
380
381 else:
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in get(self, timeout)
508 target = self._target()
509 self._wait(timeout or DEFAULT_TIMEOUT)
--> 510 return self._target_settled_value(_raise=True)
511
512 def _target_settled_value(self, _raise=False):
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in _target_settled_value(self, _raise)
512 def _target_settled_value(self, _raise=False):
513 # type: (bool) -> Any
--> 514 return self._target()._settled_value(_raise)
515
516 _value = _reason = _target_settled_value
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in _settled_value(self, _raise)
222 if _raise:
223 raise_val = self._fulfillment_handler0
--> 224 reraise(type(raise_val), raise_val, self._traceback)
225 return self._fulfillment_handler0
226
~\.conda\envs\tf-tutorial\lib\site-packages\six.py in reraise(tp, value, tb)
694 if value.__traceback__ is not tb:
695 raise value.with_traceback(tb)
--> 696 raise value
697 finally:
698 value = None
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in handle_future_result(future)
840 # type: (Any) -> None
841 try:
--> 842 resolve(future.result())
843 except Exception as e:
844 tb = exc_info()[2]
~\.conda\envs\tf-tutorial\lib\concurrent\futures\_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
~\.conda\envs\tf-tutorial\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~\.conda\envs\tf-tutorial\lib\concurrent\futures\thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in _sync_extract(self, from_path, method, to_path)
92 except BaseException as err:
93 msg = 'Error while extracting %s to %s : %s' % (from_path, to_path, err)
---> 94 raise ExtractError(msg)
95 # `tf.io.gfile.Rename(overwrite=True)` doesn't work for non empty
96 # directories, so delete destination first, if it already exists.
ExtractError: Error while extracting C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip to C:\Users\%user%\tensorflow_datasets\downloads\extracted\ZIP.images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip : C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip; Unknown error
The message seems cryptic to me. The folder to which it is trying to extract does not exist when the notebook is started - it is created by Tensorflow, and only at that command line. I obviously tried deleting it completely and running it again, to no effect.
The code that leads to the error is (everything runs fine until the last line):
import tensorflow as tf
from __future__ import absolute_import, division, print_function, unicode_literals
from tensorflow_examples.models.pix2pix import pix2pix
import tensorflow_datasets as tfds
from IPython.display import clear_output
import matplotlib.pyplot as plt
dataset, info = tfds.load('coco', with_info=True)
Also tried breaking down the last command into assigning the tdfs.builder object and then running download_and_extract, and again got the same error.
There is enough space in disk - after download, still 50+GB available, while the dataset is supposed to be 37GB in its largest version (2014).
I have a similar problem with Windows 10 & COCO 2017. My solution is simple. Extract the ZIP file manually according to the folder path in the error message.
Found someone met same problem in another post (Folium Choropleth + GeoJSON raises AttributeError: 'NoneType'). Followed the suggestions there but my problem still exists.
I have double checked that:
a) access to parameters is correct
b) all the available keys in the geoJSON are contained in the Pandas DataFrame for Choropleth.
Here's related code block:
import json
# load geojson file into dic
with open('chicago.geojson') as f:
data = json.load(f)
f.close()
found=0
notfound=0
# check if the features.properties.community data matches the dataframe data
for feature in data['features']:
if feature['properties']['community'] in df_cname['COMMUNITY_AREA_NAME'].tolist():
found=found+1
else:
notfound=notfound+1
# print out the check result (and the output shows that all features.properties.community in geojson are found in the dataframe to be used for choropleth drawing.)
print(found,' records found. ', notfound, ' records not found.')
chicago_geo='chicago.geojson'
latitude = 41.88425
longitude = -87.63245
map_chicago = folium.Map(location=[latitude, longitude], zoom_start=11)
map_chicago.choropleth(
geo_data=chicago_geo,
data=df_cname,
columns=['COMMUNITY_AREA_NAME', 'COUNT'],
key_on='features.properties.community',
fill_color='YlOrRd',
fill_opacity=0.7,
line_opacity=0.2,
legend_name='Crime Data in Chicago'
)
map_chicago
and the output / error message:
77 records found. 0 records not found.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/IPython/core/formatters.py in __call__(self, obj)
334 method = get_real_method(obj, self.print_method)
335 if method is not None:
--> 336 return method()
337 return None
338 else:
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/map.py in _repr_html_(self, **kwargs)
249 self._parent = None
250 else:
--> 251 out = self._parent._repr_html_(**kwargs)
252 return out
253
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/branca/element.py in _repr_html_(self, **kwargs)
326
327 """
--> 328 html = self.render(**kwargs)
329 html = "data:text/html;charset=utf-8;base64," + base64.b64encode(html.encode('utf8')).decode('utf8') # noqa
330
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/branca/element.py in render(self, **kwargs)
319 """Renders the HTML representation of the element."""
320 for name, child in self._children.items():
--> 321 child.render(**kwargs)
322 return self._template.render(this=self, kwargs=kwargs)
323
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/map.py in render(self, **kwargs)
336 '</style>'), name='map_style')
337
--> 338 super(LegacyMap, self).render(**kwargs)
339
340
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/branca/element.py in render(self, **kwargs)
631
632 for name, element in self._children.items():
--> 633 element.render(**kwargs)
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/branca/element.py in render(self, **kwargs)
627 script = self._template.module.__dict__.get('script', None)
628 if script is not None:
--> 629 figure.script.add_child(Element(script(self, kwargs)),
630 name=self.get_name())
631
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/jinja2/runtime.py in __call__(self, *args, **kwargs)
547 (self.name, len(self.arguments)))
548
--> 549 return self._invoke(arguments, autoescape)
550
551 def _invoke(self, arguments, autoescape):
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/jinja2/runtime.py in _invoke(self, arguments, autoescape)
551 def _invoke(self, arguments, autoescape):
552 """This method is being swapped out by the async implementation."""
--> 553 rv = self._func(*arguments)
554 if autoescape:
555 rv = Markup(rv)
<template> in macro(l_1_this, l_1_kwargs)
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/jinja2/runtime.py in call(_Context__self, _Context__obj, *args, **kwargs)
258 args = (__self.environment,) + args
259 try:
--> 260 return __obj(*args, **kwargs)
261 except StopIteration:
262 return __self.environment.undefined('value was undefined because '
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/features.py in style_data(self)
563
564 for feature in self.data['features']:
--> 565 feature.setdefault('properties', {}).setdefault('style', {}).update(self.style_function(feature)) # noqa
566 feature.setdefault('properties', {}).setdefault('highlight', {}).update(self.highlight_function(feature)) # noqa
567 return json.dumps(self.data, sort_keys=True)
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in style_function(x)
303 'color': line_color,
304 'fillOpacity': fill_opacity,
--> 305 'fillColor': color_scale_fun(x)
306 }
307
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in color_scale_fun(x)
290 def color_scale_fun(x):
291 return color_range[len(
--> 292 [u for u in color_domain if
293 get_by_key(x, key_on) in color_data and
294 u <= color_data[get_by_key(x, key_on)]])]
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in <listcomp>(.0)
291 return color_range[len(
292 [u for u in color_domain if
--> 293 get_by_key(x, key_on) in color_data and
294 u <= color_data[get_by_key(x, key_on)]])]
295 else:
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in get_by_key(obj, key)
286 return (obj.get(key, None) if len(key.split('.')) <= 1 else
287 get_by_key(obj.get(key.split('.')[0], None),
--> 288 '.'.join(key.split('.')[1:])))
289
290 def color_scale_fun(x):
/opt/conda/envs/DSX-Python35/lib/python3.5/site-packages/folium/folium.py in get_by_key(obj, key)
285 def get_by_key(obj, key):
286 return (obj.get(key, None) if len(key.split('.')) <= 1 else
--> 287 get_by_key(obj.get(key.split('.')[0], None),
288 '.'.join(key.split('.')[1:])))
289
AttributeError: 'NoneType' object has no attribute 'get'
Your suggestions are welcome!