I am having below data frame and wanted to save the data frame as a CSV file in the Azure Data lake. My data frame is called 'df'. I am using Azure Synpase Notebook
df.to_csv('abfss://jobsdata#strxxxuei.dfs.core.windows.net/Jobs_newdata/data.csv', sep=',', encoding='utf-8', index=False)
Getting the below error message when I tried to run the above code,
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
/tmp/ipykernel_6713/3472604753.py in <module>
----> 1 df.to_csv('abfss://jobsdata#strxxxuei.dfs.core.windows.net/Jobs_newdata/jobs.csv', sep=',', encoding='utf-8', index=False)
~/cluster-env/clonedenv/lib/python3.8/site-packages/pandas/core/generic.py in to_csv(self, path_or_buf, sep, na_rep, float_format, columns, header, index, index_label, mode, encoding, compression, quoting, quotechar, line_terminator, chunksize, date_format, doublequote, escapechar, decimal, errors, storage_options)
3385 )
3386
-> 3387 return DataFrameRenderer(formatter).to_csv(
3388 path_or_buf,
3389 line_terminator=line_terminator,
~/cluster-env/clonedenv/lib/python3.8/site-packages/pandas/io/formats/format.py in to_csv(self, path_or_buf, encoding, sep, columns, index_label, mode, compression, quoting, quotechar, line_terminator, chunksize, date_format, doublequote, escapechar, errors, storage_options)
1081 formatter=self.fmt,
1082 )
-> 1083 csv_formatter.save()
1084
1085 if created_buffer:
~/cluster-env/clonedenv/lib/python3.8/site-packages/pandas/io/formats/csvs.py in save(self)
226 """
227 # apply compression and byte/text conversion
--> 228 with get_handle(
229 self.filepath_or_buffer,
230 self.mode,
~/cluster-env/clonedenv/lib/python3.8/site-packages/pandas/io/common.py in get_handle(path_or_buf, mode, encoding, compression, memory_map, is_text, errors, storage_options)
556
557 # open URLs
--> 558 ioargs = _get_filepath_or_buffer(
559 path_or_buf,
560 encoding=encoding,
~/cluster-env/clonedenv/lib/python3.8/site-packages/pandas/io/common.py in _get_filepath_or_buffer(filepath_or_buffer, encoding, compression, mode, storage_options)
331
332 try:
--> 333 file_obj = fsspec.open(
334 filepath_or_buffer, mode=fsspec_mode, **(storage_options or {})
335 ).open()
~/cluster-env/clonedenv/lib/python3.8/site-packages/fsspec/core.py in open(urlpath, mode, compression, encoding, errors, protocol, newline, **kwargs)
427 ``OpenFile`` object.
428 """
--> 429 return open_files(
430 urlpath=[urlpath],
431 mode=mode,
~/cluster-env/clonedenv/lib/python3.8/site-packages/fsspec/core.py in open_files(urlpath, mode, compression, encoding, errors, name_function, num, protocol, newline, auto_mkdir, expand, **kwargs)
279 be used as a single context
280 """
--> 281 fs, fs_token, paths = get_fs_token_paths(
282 urlpath,
283 mode,
~/cluster-env/clonedenv/lib/python3.8/site-packages/fsspec/core.py in get_fs_token_paths(urlpath, mode, num, name_function, storage_options, protocol, expand)
597 "share the same protocol"
598 )
--> 599 cls = get_filesystem_class(protocol)
600 optionss = list(map(cls._get_kwargs_from_urls, urlpath))
601 paths = [cls._strip_protocol(u) for u in urlpath]
~/cluster-env/clonedenv/lib/python3.8/site-packages/fsspec/registry.py in get_filesystem_class(protocol)
209 bit = known_implementations[protocol]
210 try:
--> 211 register_implementation(protocol, _import_class(bit["class"]))
212 except ImportError as e:
213 raise ImportError(bit["err"]) from e
~/cluster-env/clonedenv/lib/python3.8/site-packages/fsspec/registry.py in _import_class(cls, minv)
232 else:
233 mod, name = cls.rsplit(".", 1)
--> 234 mod = importlib.import_module(mod)
235 return getattr(mod, name)
236
~/cluster-env/clonedenv/lib/python3.8/importlib/__init__.py in import_module(name, package)
125 break
126 level += 1
--> 127 return _bootstrap._gcd_import(name[level:], package, level)
128
129
~/cluster-env/clonedenv/lib/python3.8/importlib/_bootstrap.py in _gcd_import(name, package, level)
~/cluster-env/clonedenv/lib/python3.8/importlib/_bootstrap.py in _find_and_load(name, import_)
~/cluster-env/clonedenv/lib/python3.8/importlib/_bootstrap.py in _find_and_load_unlocked(name, import_)
~/cluster-env/clonedenv/lib/python3.8/importlib/_bootstrap.py in _load_unlocked(spec)
~/cluster-env/clonedenv/lib/python3.8/importlib/_bootstrap_external.py in exec_module(self, module)
~/cluster-env/clonedenv/lib/python3.8/importlib/_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)
~/cluster-env/clonedenv/lib/python3.8/site-packages/fsspec_wrapper/__init__.py in <module>
----> 1 from .core import (
2 AzureBlobFileSystem
3 )
4
5 __all__ = [
~/cluster-env/clonedenv/lib/python3.8/site-packages/fsspec_wrapper/core.py in <module>
3 from .utils import logger as synapseml_pandas_logger
4 from .utils.common import SynapseCredential
----> 5 import adlfs
6 import time
7 import re
~/cluster-env/clonedenv/lib/python3.8/site-packages/adlfs/__init__.py in <module>
----> 1 from .spec import AzureDatalakeFileSystem
2 from .spec import AzureBlobFileSystem, AzureBlobFile
3 from ._version import get_versions
4
5 __all__ = ["AzureBlobFileSystem", "AzureBlobFile", "AzureDatalakeFileSystem"]
~/cluster-env/clonedenv/lib/python3.8/site-packages/adlfs/spec.py in <module>
16 ResourceExistsError,
17 )
---> 18 from azure.storage.blob._shared.base_client import create_configuration
19 from azure.datalake.store import AzureDLFileSystem, lib
20 from azure.datalake.store.core import AzureDLFile, AzureDLPath
~/cluster-env/clonedenv/lib/python3.8/site-packages/azure/storage/blob/__init__.py in <module>
8 from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import
9 from ._version import VERSION
---> 10 from ._blob_client import BlobClient
11 from ._container_client import ContainerClient
12 from ._blob_service_client import BlobServiceClient
~/cluster-env/clonedenv/lib/python3.8/site-packages/azure/storage/blob/_blob_client.py in <module>
24
25 from ._shared import encode_base64
---> 26 from ._shared.base_client import StorageAccountHostsMixin, parse_connection_str, parse_query, TransportWrapper
27 from ._shared.encryption import generate_blob_encryption_data
28 from ._shared.uploads import IterStreamer
~/cluster-env/clonedenv/lib/python3.8/site-packages/azure/storage/blob/_shared/base_client.py in <module>
38 from .constants import STORAGE_OAUTH_SCOPE, SERVICE_HOST_BASE, CONNECTION_TIMEOUT, READ_TIMEOUT
39 from .models import LocationMode
---> 40 from .authentication import SharedKeyCredentialPolicy
41 from .shared_access_signature import QueryStringConstants
42 from .request_handlers import serialize_batch_body, _get_batch_request_delimiter
~/cluster-env/clonedenv/lib/python3.8/site-packages/azure/storage/blob/_shared/authentication.py in <module>
20
21 try:
---> 22 from azure.core.pipeline.transport import AioHttpTransport
23 except ImportError:
24 AioHttpTransport = None
~/cluster-env/clonedenv/lib/python3.8/importlib/_bootstrap.py in _handle_fromlist(module, fromlist, import_, recursive)
~/cluster-env/clonedenv/lib/python3.8/site-packages/azure/core/pipeline/transport/__init__.py in __getattr__(name)
66 if name == 'AioHttpTransport':
67 try:
---> 68 from ._aiohttp import AioHttpTransport
69 return AioHttpTransport
70 except ImportError:
~/cluster-env/clonedenv/lib/python3.8/site-packages/azure/core/pipeline/transport/_aiohttp.py in <module>
33 import asyncio
34 import codecs
---> 35 import aiohttp
36 from multidict import CIMultiDict
37
~/cluster-env/clonedenv/lib/python3.8/site-packages/aiohttp/__init__.py in <module>
4
5 from . import hdrs as hdrs
----> 6 from .client import (
7 BaseConnector as BaseConnector,
8 ClientConnectionError as ClientConnectionError,
~/cluster-env/clonedenv/lib/python3.8/site-packages/aiohttp/client.py in <module>
33 from yarl import URL
34
---> 35 from . import hdrs, http, payload
36 from .abc import AbstractCookieJar
37 from .client_exceptions import (
~/cluster-env/clonedenv/lib/python3.8/site-packages/aiohttp/http.py in <module>
5 from . import __version__
6 from .http_exceptions import HttpProcessingError as HttpProcessingError
----> 7 from .http_parser import (
8 HeadersParser as HeadersParser,
9 HttpParser as HttpParser,
~/cluster-env/clonedenv/lib/python3.8/site-packages/aiohttp/http_parser.py in <module>
13 from . import hdrs
14 from .base_protocol import BaseProtocol
---> 15 from .helpers import NO_EXTENSIONS, BaseTimerContext
16 from .http_exceptions import (
17 BadStatusLine,
~/cluster-env/clonedenv/lib/python3.8/site-packages/aiohttp/helpers.py in <module>
665
666
--> 667 class CeilTimeout(async_timeout.timeout):
668 def __enter__(self) -> async_timeout.timeout:
669 if self._timeout is not None:
TypeError: function() argument 'code' must be code, not str
I am getting the above error message, not sure how to rectify it.
Can anyone advise what is the issue in my code?
This could be due to invalid permission to access the container or you may not have write permissions to the container.
I have repro’d with your code and was able to write the data to CSV successfully.
df.to_csv('abfss://<container name>#<storage account name>.dfs.core.windows.net/source/sample2.csv', sep=',', encoding='utf-8', index=False)
Output:
I am using the below code :
import sklearn
from imblearn.pipeline import make_pipeline
but it is showing the below error for me:
ImportError: cannot import name 'available_if' from 'sklearn.utils.metaestimators' (/databricks/python/lib/python3.8/site-packages/sklearn/utils/metaestimators.py)
Here is the complete error:
> ImportError Traceback (most recent call
> last) <command-3963464708539101> in <module>
> 1 import sklearn
> ----> 2 from imblearn.pipeline import make_pipeline
> 3 from imblearn.over_sampling import SMOTE
> 4 from imblearn.under_sampling import NearMiss
> 5
>
> /databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py
> in import_patch(name, globals, locals, fromlist, level)
> 160 # Import the desired module. If you’re seeing this while debugging a failed import,
> 161 # look at preceding stack frames for relevant error information.
> --> 162 original_result = python_builtin_import(name, globals, locals, fromlist, level)
> 163
> 164 is_root_import = thread_local._nest_level == 1
>
> /databricks/python/lib/python3.8/site-packages/imblearn/__init__.py in
> <module>
> 51 else:
> 52 from . import combine
> ---> 53 from . import ensemble
> 54 from . import exceptions
> 55 from . import metrics
>
> /databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py
> in import_patch(name, globals, locals, fromlist, level)
> 160 # Import the desired module. If you’re seeing this while debugging a failed import,
> 161 # look at preceding stack frames for relevant error information.
> --> 162 original_result = python_builtin_import(name, globals, locals, fromlist, level)
> 163
> 164 is_root_import = thread_local._nest_level == 1
>
> /databricks/python/lib/python3.8/site-packages/imblearn/ensemble/__init__.py
> in <module>
> 4 """
> 5
> ----> 6 from ._easy_ensemble import EasyEnsembleClassifier
> 7 from ._bagging import BalancedBaggingClassifier
> 8 from ._forest import BalancedRandomForestClassifier
>
> /databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py
> in import_patch(name, globals, locals, fromlist, level)
> 160 # Import the desired module. If you’re seeing this while debugging a failed import,
> 161 # look at preceding stack frames for relevant error information.
> --> 162 original_result = python_builtin_import(name, globals, locals, fromlist, level)
> 163
> 164 is_root_import = thread_local._nest_level == 1
>
> /databricks/python/lib/python3.8/site-packages/imblearn/ensemble/_easy_ensemble.py
> in <module>
> 19 from ..utils._docstring import _random_state_docstring
> 20 from ..utils._validation import _deprecate_positional_args
> ---> 21 from ..pipeline import Pipeline
> 22
> 23 MAX_INT = np.iinfo(np.int32).max
>
> /databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py
> in import_patch(name, globals, locals, fromlist, level)
> 160 # Import the desired module. If you’re seeing this while debugging a failed import,
> 161 # look at preceding stack frames for relevant error information.
> --> 162 original_result = python_builtin_import(name, globals, locals, fromlist, level)
> 163
> 164 is_root_import = thread_local._nest_level == 1
>
> /databricks/python/lib/python3.8/site-packages/imblearn/pipeline.py in
> <module>
> 16 from sklearn.base import clone
> 17 from sklearn.utils import _print_elapsed_time
> ---> 18 from sklearn.utils.metaestimators import available_if
> 19 from sklearn.utils.validation import check_memory
> 20
>
> ImportError: cannot import name 'available_if' from
> 'sklearn.utils.metaestimators'
> (/databricks/python/lib/python3.8/site-packages/sklearn/utils/metaestimators.py)
output of sklearn.version : '0.24.1'
I have tried with a lot of things but it is not working. Please let me know if you have a solution for this. The can be incompatibility of versions but don't know which version works well.
Edit:
Also, I am getting the below data:
!pip install scikit-learn==1.1.1
print(sklearn.__version__)
sklearn.__path__
Output:
Requirement already satisfied: scikit-learn==1.1.1 in /databricks/python3/lib/python3.8/site-packages (1.1.1)
Requirement already satisfied: scipy>=1.3.2 in /databricks/python3/lib/python3.8/site-packages (from scikit-learn==1.1.1) (1.6.2)
Requirement already satisfied: numpy>=1.17.3 in /databricks/python3/lib/python3.8/site-packages (from scikit-learn==1.1.1) (1.19.2)
Requirement already satisfied: joblib>=1.0.0 in /databricks/python3/lib/python3.8/site-packages (from scikit-learn==1.1.1) (1.0.1)
Requirement already satisfied: threadpoolctl>=2.0.0 in /databricks/python3/lib/python3.8/site-packages (from scikit-learn==1.1.1) (2.1.0)
Out[45]: '0.24.1'
Out[46]: ['/databricks/python/lib/python3.8/site-packages/sklearn']
Here, it is getting version from /python/ and installing in /python3/
It might be the issue. Don't know the solution.
I had to specific versions of numpy, pandas, dask-ml and scikit-learn to resolve this.
numpy 1.22.4
pandas 1.2.4
dask-ml 2022.5.27
scikit-learn 1.1.1
Did you try using a virtual environment.
To create a venv(virtual environment):
python -m venv venv_name
To activate the venv:
source venv_name/bin/activate
I am trying to run some R code in a predominantly Python script in Jupyter Notebook.
I have conda installed rpy2 on my terminal. Calling import rpy2 in my script works.
But when I try to run %load_ext rpy2.ipython (which is a necessary step to run R code), I have the following error:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-13-fb23c6edefe4> in <module>
----> 1 get_ipython().run_line_magic('load_ext', 'rpy2.ipython')
~/opt/anaconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py in run_line_magic(self, magic_name, line, _stack_depth)
2312 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
2313 with self.builtin_trap:
-> 2314 result = fn(*args, **kwargs)
2315 return result
2316
</Users/dpatrick/opt/anaconda3/lib/python3.7/site-packages/decorator.py:decorator-gen-64> in load_ext(self, module_str)
~/opt/anaconda3/lib/python3.7/site-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
185 # but it's overkill for just that one bit of state.
186 def magic_deco(arg):
--> 187 call = lambda f, *a, **k: f(*a, **k)
188
189 if callable(arg):
~/opt/anaconda3/lib/python3.7/site-packages/IPython/core/magics/extension.py in load_ext(self, module_str)
31 if not module_str:
32 raise UsageError('Missing module name.')
---> 33 res = self.shell.extension_manager.load_extension(module_str)
34
35 if res == 'already loaded':
~/opt/anaconda3/lib/python3.7/site-packages/IPython/core/extensions.py in load_extension(self, module_str)
78 if module_str not in sys.modules:
79 with prepended_to_syspath(self.ipython_extension_dir):
---> 80 mod = import_module(module_str)
81 if mod.__file__.startswith(self.ipython_extension_dir):
82 print(("Loading extensions from {dir} is deprecated. "
~/opt/anaconda3/lib/python3.7/importlib/__init__.py in import_module(name, package)
125 break
126 level += 1
--> 127 return _bootstrap._gcd_import(name[level:], package, level)
128
129
~/opt/anaconda3/lib/python3.7/importlib/_bootstrap.py in _gcd_import(name, package, level)
~/opt/anaconda3/lib/python3.7/importlib/_bootstrap.py in _find_and_load(name, import_)
~/opt/anaconda3/lib/python3.7/importlib/_bootstrap.py in _find_and_load_unlocked(name, import_)
~/opt/anaconda3/lib/python3.7/importlib/_bootstrap.py in _load_unlocked(spec)
~/opt/anaconda3/lib/python3.7/importlib/_bootstrap_external.py in exec_module(self, module)
~/opt/anaconda3/lib/python3.7/importlib/_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)
~/opt/anaconda3/lib/python3.7/site-packages/rpy2/ipython/__init__.py in <module>
----> 1 from . import rmagic
2
3 load_ipython_extension = rmagic.load_ipython_extension
~/opt/anaconda3/lib/python3.7/site-packages/rpy2/ipython/rmagic.py in <module>
53 # numpy and rpy2 imports
54
---> 55 import rpy2.rinterface as ri
56 import rpy2.rinterface_lib.callbacks
57 import rpy2.robjects as ro
~/opt/anaconda3/lib/python3.7/site-packages/rpy2/rinterface.py in <module>
5 import typing
6 from rpy2.rinterface_lib import openrlib
----> 7 import rpy2.rinterface_lib._rinterface_capi as _rinterface
8 import rpy2.rinterface_lib.embedded as embedded
9 import rpy2.rinterface_lib.conversion as conversion
~/opt/anaconda3/lib/python3.7/site-packages/rpy2/rinterface_lib/_rinterface_capi.py in <module>
8 from _rinterface_cffi import ffi
9 from . import conversion
---> 10 from . import embedded
11 from . import memorymanagement
12
~/opt/anaconda3/lib/python3.7/site-packages/rpy2/rinterface_lib/embedded.py in <module>
6 from _rinterface_cffi import ffi
7 from . import openrlib
----> 8 from . import callbacks
9
10 _options = ('rpy2', '--quiet', '--no-save')
~/opt/anaconda3/lib/python3.7/site-packages/rpy2/rinterface_lib/callbacks.py in <module>
34
35 #ffi_proxy.callback(ffi_proxy._consoleflush_def,
---> 36 ffi)
37 def _consoleflush():
38 try:
~/opt/anaconda3/lib/python3.7/site-packages/rpy2/rinterface_lib/ffi_proxy.py in decorator(func)
42 'void', ('SEXP',))
43 _evaluate_in_r_def = SignatureDefinition('_evaluate_in_r',
---> 44 'SEXP', ('SEXP args',))
45
46
AttributeError: 'CompiledFFI' object has no attribute 'ffi'
What exactly is this 'CompiledFFI' error? How should I fix it?
Thanks so much for your help!
I want to check version of the dependencies; But pd.show_versions() gives AttributeError.
Here is the full traceback:
AttributeError Traceback (most recent call last)
<ipython-input-2-c4c6bc0da956> in <module>
----> 1 pd.show_versions()
C:\ProgramData\Anaconda3\lib\site-packages\pandas\util\_print_versions.py in show_versions(as_json)
97 for modname in deps:
98 mod = import_optional_dependency(
---> 99 modname, raise_on_missing=False, on_version="ignore"
100 )
101 if mod:
C:\ProgramData\Anaconda3\lib\site-packages\pandas\compat\_optional.py in import_optional_dependency(name, extra, raise_on_missing, on_version)
88 """
89 try:
---> 90 module = importlib.import_module(name)
91 except ImportError:
92 if raise_on_missing:
C:\ProgramData\Anaconda3\lib\importlib\__init__.py in import_module(name, package)
125 break
126 level += 1
--> 127 return _bootstrap._gcd_import(name[level:], package, level)
128
129
C:\ProgramData\Anaconda3\lib\importlib\_bootstrap.py in _gcd_import(name, package, level)
C:\ProgramData\Anaconda3\lib\importlib\_bootstrap.py in _find_and_load(name, import_)
C:\ProgramData\Anaconda3\lib\importlib\_bootstrap.py in _find_and_load_unlocked(name, import_)
C:\ProgramData\Anaconda3\lib\importlib\_bootstrap.py in _load_unlocked(spec)
C:\ProgramData\Anaconda3\lib\importlib\_bootstrap_external.py in exec_module(self, module)
C:\ProgramData\Anaconda3\lib\importlib\_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)
C:\ProgramData\Anaconda3\lib\site-packages\gcsfs\__init__.py in <module>
2
3 from .core import GCSFileSystem
----> 4 from .dask_link import register as register_dask
5 from .mapping import GCSMap
6 from ._version import get_versions
C:\ProgramData\Anaconda3\lib\site-packages\gcsfs\dask_link.py in <module>
120
121 try:
--> 122 register()
123 except ImportError as e:
124 print(e)
C:\ProgramData\Anaconda3\lib\site-packages\gcsfs\dask_link.py in register()
115 return PyarrowWrappedGCSFS(self)
116
--> 117 dask.bytes.core._filesystems['gcs'] = DaskGCSFileSystem
118 dask.bytes.core._filesystems['gs'] = DaskGCSFileSystem
119
AttributeError: module 'dask.bytes.core' has no attribute '_filesystems'
How can I fix it?
Looks like an issue with your installation of GCSFS. There seems to be a similar issue here.
Try conda install -c conda-forge gcsfs or pip install gcsfs to reinstall GCSFS.
I download the theano from github, and install it.
But when I try to import the theano in ipython, I meet this problem
In [1]: import theano
ImportError Traceback (most recent call last)
<ipython-input-1-3397704bd624> in <module>()
----> 1 import theano
C:\Anaconda3\lib\site-packages\theano\__init__.py in <module>()
40 from theano.version import version as version
41
---> 42 from theano.configdefaults import config
43
44 # This is the api version for ops that generate C code. External ops
C:\Anaconda3\lib\site-packages\theano\configdefaults.py in <module>()
14
15 import theano
---> 16 from theano.configparser import (AddConfigVar, BoolParam, ConfigParam, EnumStr,
17 FloatParam, IntParam, StrParam,
18 TheanoConfigParser, THEANO_FLAGS_DICT)
C:\Anaconda3\lib\site-packages\theano\configparser.py in <module>()
13
14 import theano
---> 15 from theano.compat import configparser as ConfigParser
16 from six import string_types
17
C:\Anaconda3\lib\site-packages\theano\compat\__init__.py in <module>()
4 # Python 3.x compatibility
5 from six import PY3, b, BytesIO, next
----> 6 from six.moves import configparser
7 from six.moves import reload_module as reload
8 import collections
C:\Anaconda3\lib\site-packages\six.py in __get__(self, obj, tp)
90
91 def __get__(self, obj, tp):
---> 92 result = self._resolve()
93 setattr(obj, self.name, result) # Invokes __set__.
94 try:
C:\Anaconda3\lib\site-packages\six.py in _resolve(self)
113
114 def _resolve(self):
--> 115 return _import_module(self.mod)
116
117 def __getattr__(self, attr):
C:\Anaconda3\lib\site-packages\six.py in _import_module(name)
80 def _import_module(name):
81 """Import module, returning the module after the last dot."""
---> 82 __import__(name)
83 return sys.modules[name]
84
C:\Anaconda3\Lib\site-packages\theano\configparser.py in <module>()
13
14 import theano
---> 15 from theano.compat import configparser as ConfigParser
16 from six import string_types
17
When I get into the files, I indeed can not find configparser.py in the directory, but the original file do not have it neither.
ImportError: cannot import name 'configparser'
Just found the temperary solution , rename configparser.py to config_parser or any other name that are not confilct .
and change name of each module include it to config_parser .