Related
I am setting up a local database for datajoint following instructions here: https://github.com/datajoint/mysql-docker.
The mySQL database works but now I wanted to use MinIO for external storage. I fire up MySQL and MinIO as services using docker-compose and set up dj.config for external storage. I am using MinIO service despite running to make it s3 compatible.
Here is my dj.config:
dj.config['stores'] = {
'minio': { # store in s3
'protocol': 's3',
'endpoint': 'minio:9001',
'bucket': 'test',
'location': 'test2/',
'access_key': os.environ.get('MINIO_ROOT_USER', 'FAKEKEY'),
'secret_key': os.environ.get('MINIO_ROOT_PASSWORD', 'FAKEKEY')
}
}
But I get a weird parsing while populating the table. Here is the error stack:
---------------------------------------------------------------------------
ParseError Traceback (most recent call last)
/usr/local/lib/python3.8/dist-packages/minio/parsers.py in fromstring(cls, root_name, data)
68 try:
---> 69 return cls(root_name, ElementTree.fromstring(data.strip()))
70 except (ParseError, AttributeError, ValueError, TypeError) as error:
/usr/lib/python3.8/xml/etree/ElementTree.py in XML(text, parser)
1319 parser = XMLParser(target=TreeBuilder())
-> 1320 parser.feed(text)
1321 return parser.close()
ParseError: syntax error: line 1, column 0
During handling of the above exception, another exception occurred:
InvalidXMLError Traceback (most recent call last)
<ipython-input-24-884d9761b97d> in <module>
----> 1 TrainedModel.populate(reserve_jobs=True)
/usr/local/lib/python3.8/dist-packages/datajoint/autopopulate.py in populate(self, suppress_errors, return_exception_objects, reserve_jobs, order, limit, max_calls, display_progress, *restrictions)
157 self.__class__._allow_insert = True
158 try:
--> 159 make(dict(key))
160 except (KeyboardInterrupt, SystemExit, Exception) as error:
161 try:
/usr/local/lib/python3.8/dist-packages/nnfabrik/templates/trained_model.py in make(self, key)
273 key["model_state"] = filepath
274
--> 275 self.ModelStorage.insert1(key, ignore_extra_fields=True)
276
277
/usr/local/lib/python3.8/dist-packages/datajoint/table.py in insert1(self, row, **kwargs)
173 For kwargs, see insert()
174 """
--> 175 self.insert((row,), **kwargs)
176
177 def insert(self, rows, replace=False, skip_duplicates=False, ignore_extra_fields=False, allow_direct_insert=None):
/usr/local/lib/python3.8/dist-packages/datajoint/table.py in insert(self, rows, replace, skip_duplicates, ignore_extra_fields, allow_direct_insert)
334 return row_to_insert
335
--> 336 rows = list(make_row_to_insert(row) for row in rows)
337 if rows:
338 try:
/usr/local/lib/python3.8/dist-packages/datajoint/table.py in <genexpr>(.0)
334 return row_to_insert
335
--> 336 rows = list(make_row_to_insert(row) for row in rows)
337 if rows:
338 try:
/usr/local/lib/python3.8/dist-packages/datajoint/table.py in make_row_to_insert(row)
304 elif isinstance(row, collections.abc.Mapping): # dict-based
305 check_fields(row)
--> 306 attributes = [make_placeholder(name, row[name]) for name in heading if name in row]
307 else: # positional
308 try:
/usr/local/lib/python3.8/dist-packages/datajoint/table.py in <listcomp>(.0)
304 elif isinstance(row, collections.abc.Mapping): # dict-based
305 check_fields(row)
--> 306 attributes = [make_placeholder(name, row[name]) for name in heading if name in row]
307 else: # positional
308 try:
/usr/local/lib/python3.8/dist-packages/datajoint/table.py in make_placeholder(name, value)
275 if attr.is_external:
276 # value is hash of contents
--> 277 value = self.external[attr.store].upload_attachment(attachment_path).bytes
278 else:
279 # value is filename + contents
/usr/local/lib/python3.8/dist-packages/datajoint/external.py in upload_attachment(self, local_path)
199 uuid = uuid_from_file(local_path, init_string=attachment_name + '\0')
200 external_path = self._make_uuid_path(uuid, '.' + attachment_name)
--> 201 self._upload_file(local_path, external_path)
202 # insert tracking info
203 self.connection.query("""
/usr/local/lib/python3.8/dist-packages/datajoint/external.py in _upload_file(self, local_path, external_path, metadata)
100 def _upload_file(self, local_path, external_path, metadata=None):
101 if self.spec['protocol'] == 's3':
--> 102 self.s3.fput(local_path, external_path, metadata)
103 elif self.spec['protocol'] == 'file':
104 safe_copy(local_path, external_path, overwrite=True)
/usr/local/lib/python3.8/dist-packages/datajoint/external.py in s3(self)
71 def s3(self):
72 if self._s3 is None:
---> 73 self._s3 = s3.Folder(**self.spec)
74 return self._s3
75
/usr/local/lib/python3.8/dist-packages/datajoint/s3.py in __init__(self, endpoint, bucket, access_key, secret_key, secure, **_)
18 secure=secure)
19 self.bucket = bucket
---> 20 if not self.client.bucket_exists(bucket):
21 raise errors.BucketInaccessible('Inaccessible s3 bucket %s' % bucket) from None
22
/usr/local/lib/python3.8/dist-packages/minio/api.py in bucket_exists(self, bucket_name)
402
403 try:
--> 404 self._url_open('HEAD', bucket_name=bucket_name)
405 return True
406 except NoSuchBucket:
/usr/local/lib/python3.8/dist-packages/minio/api.py in _url_open(self, method, bucket_name, object_name, query, body, headers, content_sha256, preload_content)
2183
2184 # Get bucket region.
-> 2185 region = self._get_bucket_region(bucket_name)
2186
2187 # Construct target url.
/usr/local/lib/python3.8/dist-packages/minio/api.py in _get_bucket_region(self, bucket_name)
2061 region = self._region or self._region_map.get(bucket_name)
2062 if not region:
-> 2063 region = self._get_bucket_location(bucket_name)
2064 self._region_map[bucket_name] = region
2065 return region
/usr/local/lib/python3.8/dist-packages/minio/api.py in _get_bucket_location(self, bucket_name)
2105 raise ResponseError(response, method, bucket_name).get_exception()
2106
-> 2107 location = parse_location_constraint(response.data)
2108 # location is empty for 'US standard region'
2109 if not location:
/usr/local/lib/python3.8/dist-packages/minio/parsers.py in parse_location_constraint(data)
425 :return: Returns location of your bucket.
426 """
--> 427 root = S3Element.fromstring('BucketLocationConstraintResult', data)
428 return root.text()
429
/usr/local/lib/python3.8/dist-packages/minio/parsers.py in fromstring(cls, root_name, data)
69 return cls(root_name, ElementTree.fromstring(data.strip()))
70 except (ParseError, AttributeError, ValueError, TypeError) as error:
---> 71 raise InvalidXMLError(
72 '"{}" XML is not parsable. Message: {}'.format(
73 root_name, error
InvalidXMLError: InvalidXMLError: message: "BucketLocationConstraintResult" XML is not parsable. Message: syntax error: line 1, column 0
I am trying to load a sasbdat file in python using pd.read_sas() and I fail to load the data due to the below error.
ValueError Traceback (most recent call last)
<ipython-input-148-64f915da8256> in <module>
----> 1 df_sas = pd.read_sas('input_sasfile.sas7bdat', format='sas7bdat')
~\.conda\envs\overloaded-new\lib\site-packages\pandas\io\sas\sasreader.py in read_sas(filepath_or_buffer, format, index, encoding, chunksize, iterator)
121
122 reader = SAS7BDATReader(
--> 123 filepath_or_buffer, index=index, encoding=encoding, chunksize=chunksize
124 )
125 else:
~\.conda\envs\overloaded-new\lib\site-packages\pandas\io\sas\sas7bdat.py in __init__(self, path_or_buf, index, convert_dates, blank_missing, chunksize, encoding, convert_text, convert_header_text)
144
145 self._get_properties()
--> 146 self._parse_metadata()
147
148 def column_data_lengths(self):
~\.conda\envs\overloaded-new\lib\site-packages\pandas\io\sas\sas7bdat.py in _parse_metadata(self)
349 self.close()
350 raise ValueError("Failed to read a meta data page from the SAS file.")
--> 351 done = self._process_page_meta()
352
353 def _process_page_meta(self):
~\.conda\envs\overloaded-new\lib\site-packages\pandas\io\sas\sas7bdat.py in _process_page_meta(self)
355 pt = [const.page_meta_type, const.page_amd_type] + const.page_mix_types
356 if self._current_page_type in pt:
--> 357 self._process_page_metadata()
358 is_data_page = self._current_page_type & const.page_data_type
359 is_mix_page = self._current_page_type in const.page_mix_types
~\.conda\envs\overloaded-new\lib\site-packages\pandas\io\sas\sas7bdat.py in _process_page_metadata(self)
388 subheader_signature = self._read_subheader_signature(pointer.offset)
389 subheader_index = self._get_subheader_index(
--> 390 subheader_signature, pointer.compression, pointer.ptype
391 )
392 self._process_subheader(subheader_index, pointer)
~\.conda\envs\overloaded-new\lib\site-packages\pandas\io\sas\sas7bdat.py in _get_subheader_index(self, signature, compression, ptype)
401 else:
402 self.close()
--> 403 raise ValueError("Unknown subheader signature")
404 return index
405
ValueError: Unknown subheader signature
Though I found relevant github issue (https://github.com/pandas-dev/pandas/issues/24794), but it was closed because the issue got resolved by updating the pandas.
Any help is greatly appreciated.
Trying to Access a Shared Folder using the following code :
credentials = Credentials(username = user_name, password = "secret")
config = Configuration(server ='outlook.office365.com', credentials = credentials, auth_type=NTLM)
account = Account(primary_smtp_address = 'shared_mail#domain.com', credentials = credentials, autodiscover = False, config = config, access_type = DELEGATE,)
The above three lines of Code work perfectly but we are unable to get the root,
the following code : account.root.tree() or account.root throws the following error:
KeyError Traceback (most recent call last)
~\anaconda3\lib\site-packages\cached_property.py in __get__(self, obj, cls)
68 # check if the value was computed before the lock was acquired
---> 69 return obj_dict[name]
70
KeyError: 'root'
During handling of the above exception, another exception occurred:
ErrorNonExistentMailbox Traceback (most recent call last)
<ipython-input-46-a90a4f76ca21> in <module>
2 logging.basicConfig(level=logging.DEBUG)
3
----> 4 account.root.tree()
~\anaconda3\lib\site-packages\cached_property.py in __get__(self, obj, cls)
71 except KeyError:
72 # if not, do the calculation and release the lock
---> 73 return obj_dict.setdefault(name, self.func(obj))
74
75
~\anaconda3\lib\site-packages\exchangelib\account.py in root(self)
268 #threaded_cached_property
269 def root(self):
--> 270 return Root.get_distinguished(account=self)
271
272 #threaded_cached_property
~\anaconda3\lib\site-packages\exchangelib\folders\roots.py in get_distinguished(cls, account)
107 return cls.resolve(
108 account=account,
--> 109 folder=cls(account=account, name=cls.DISTINGUISHED_FOLDER_ID, is_distinguished=True)
110 )
111 except ErrorFolderNotFound:
~\anaconda3\lib\site-packages\exchangelib\folders\base.py in resolve(cls, account, folder)
485 def resolve(cls, account, folder):
486 # Resolve a single folder
--> 487 folders = list(FolderCollection(account=account, folders=[folder]).resolve())
488 if not folders:
489 raise ErrorFolderNotFound('Could not find folder %r' % folder)
~\anaconda3\lib\site-packages\exchangelib\folders\collections.py in resolve(self)
254 additional_fields = self.get_folder_fields(target_cls=self._get_target_cls(), is_complex=None)
255 for f in self.__class__(account=self.account, folders=resolveable_folders).get_folders(
--> 256 additional_fields=additional_fields
257 ):
258 yield f
~\anaconda3\lib\site-packages\exchangelib\folders\collections.py in get_folders(self, additional_fields)
317 folders=self.folders,
318 additional_fields=additional_fields,
--> 319 shape=ID_ONLY,
320 ):
321 yield f
~\anaconda3\lib\site-packages\exchangelib\services\get_folder.py in call(self, folders, additional_fields, shape)
32 **dict(
33 additional_fields=additional_fields,
---> 34 shape=shape,
35 )
36 )):
~\anaconda3\lib\site-packages\exchangelib\services\common.py in _pool_requests(self, payload_func, items, **kwargs)
538 for i, chunk in enumerate(chunkify(items, self.chunk_size), start=1):
539 log.debug('Processing %s chunk %s containing %s items', self.__class__.__name__, i, len(chunk))
--> 540 for elem in self._get_elements(payload=payload_func(chunk, **kwargs)):
541 yield elem
542
~\anaconda3\lib\site-packages\exchangelib\services\common.py in _get_elements_in_response(self, response)
401 def _get_elements_in_response(self, response):
402 for msg in response:
--> 403 container_or_exc = self._get_element_container(message=msg, name=self.element_container_name)
404 if isinstance(container_or_exc, (bool, Exception)):
405 yield container_or_exc
~\anaconda3\lib\site-packages\exchangelib\services\common.py in _get_element_container(self, message, response_message, name)
360 # rspclass == 'Error', or 'Success' and not 'NoError'
361 try:
--> 362 raise self._get_exception(code=response_code, text=msg_text, msg_xml=msg_xml)
363 except self.ERRORS_TO_CATCH_IN_RESPONSE as e:
364 return e
ErrorNonExistentMailbox: Mailbox does not exist.
The same code seems to be working here : https://medium.com/#theamazingexposure/accessing-shared-mailbox-using-exchangelib-python-f020e71a96ab
Also checked this thread https://github.com/ecederstrand/exchangelib/issues/391 and tried almost all the solutions but facing the same error.
I want to train a tensorflow image segmentation model on COCO, and thought I would leverage the dataset builder already included. Download seems to be completed but it crashes on extracting the zip files.
Running with TF 2.0.0 on a Jupyter Notebook under a conda environment. Computer is 64-bit Windows 10. The Oxford Pet III dataset used in the official image segmentation tutorial works fine.
Below is the error message (my local user name replaced with %user%).
---------------------------------------------------------------------------
OutOfRangeError Traceback (most recent call last)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in _sync_extract(self, from_path, method, to_path)
88 try:
---> 89 for path, handle in iter_archive(from_path, method):
90 path = tf.compat.as_text(path)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in iter_zip(arch_f)
176 with _open_or_pass(arch_f) as fobj:
--> 177 z = zipfile.ZipFile(fobj)
178 for member in z.infolist():
~\.conda\envs\tf-tutorial\lib\zipfile.py in __init__(self, file, mode, compression, allowZip64)
1130 if mode == 'r':
-> 1131 self._RealGetContents()
1132 elif mode in ('w', 'x'):
~\.conda\envs\tf-tutorial\lib\zipfile.py in _RealGetContents(self)
1193 try:
-> 1194 endrec = _EndRecData(fp)
1195 except OSError:
~\.conda\envs\tf-tutorial\lib\zipfile.py in _EndRecData(fpin)
263 # Determine file size
--> 264 fpin.seek(0, 2)
265 filesize = fpin.tell()
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\util\deprecation.py in new_func(*args, **kwargs)
506 instructions)
--> 507 return func(*args, **kwargs)
508
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in seek(self, offset, whence, position)
166 elif whence == 2:
--> 167 offset += self.size()
168 else:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in size(self)
101 """Returns the size of the file."""
--> 102 return stat(self.__name).length
103
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in stat(filename)
726 """
--> 727 return stat_v2(filename)
728
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in stat_v2(path)
743 file_statistics = pywrap_tensorflow.FileStatistics()
--> 744 pywrap_tensorflow.Stat(compat.as_bytes(path), file_statistics)
745 return file_statistics
OutOfRangeError: C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip; Unknown error
During handling of the above exception, another exception occurred:
ExtractError Traceback (most recent call last)
<ipython-input-27-887fa0198611> in <module>
1 cocoBuilder = tfds.builder('coco')
2 info = cocoBuilder.info
----> 3 cocoBuilder.download_and_prepare()
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\api_utils.py in disallow_positional_args_dec(fn, instance, args, kwargs)
50 _check_no_positional(fn, args, ismethod, allowed=allowed)
51 _check_required(fn, kwargs)
---> 52 return fn(*args, **kwargs)
53
54 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in download_and_prepare(self, download_dir, download_config)
285 self._download_and_prepare(
286 dl_manager=dl_manager,
--> 287 download_config=download_config)
288
289 # NOTE: If modifying the lines below to put additional information in
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in _download_and_prepare(self, dl_manager, download_config)
946 super(GeneratorBasedBuilder, self)._download_and_prepare(
947 dl_manager=dl_manager,
--> 948 max_examples_per_split=download_config.max_examples_per_split,
949 )
950
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in _download_and_prepare(self, dl_manager, **prepare_split_kwargs)
802 # Generating data for all splits
803 split_dict = splits_lib.SplitDict()
--> 804 for split_generator in self._split_generators(dl_manager):
805 if splits_lib.Split.ALL == split_generator.split_info.name:
806 raise ValueError(
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\image\coco.py in _split_generators(self, dl_manager)
237 root_url = 'http://images.cocodataset.org/'
238 extracted_paths = dl_manager.download_and_extract({
--> 239 key: root_url + url for key, url in urls.items()
240 })
241
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in download_and_extract(self, url_or_urls)
357 with self._downloader.tqdm():
358 with self._extractor.tqdm():
--> 359 return _map_promise(self._download_extract, url_or_urls)
360
361 #property
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in _map_promise(map_fn, all_inputs)
393 """Map the function into each element and resolve the promise."""
394 all_promises = utils.map_nested(map_fn, all_inputs) # Apply the function
--> 395 res = utils.map_nested(_wait_on_promise, all_promises)
396 return res
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in map_nested(function, data_struct, dict_only, map_tuple)
127 return {
128 k: map_nested(function, v, dict_only, map_tuple)
--> 129 for k, v in data_struct.items()
130 }
131 elif not dict_only:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in <dictcomp>(.0)
127 return {
128 k: map_nested(function, v, dict_only, map_tuple)
--> 129 for k, v in data_struct.items()
130 }
131 elif not dict_only:
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in map_nested(function, data_struct, dict_only, map_tuple)
141 return tuple(mapped)
142 # Singleton
--> 143 return function(data_struct)
144
145
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in _wait_on_promise(p)
377
378 def _wait_on_promise(p):
--> 379 return p.get()
380
381 else:
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in get(self, timeout)
508 target = self._target()
509 self._wait(timeout or DEFAULT_TIMEOUT)
--> 510 return self._target_settled_value(_raise=True)
511
512 def _target_settled_value(self, _raise=False):
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in _target_settled_value(self, _raise)
512 def _target_settled_value(self, _raise=False):
513 # type: (bool) -> Any
--> 514 return self._target()._settled_value(_raise)
515
516 _value = _reason = _target_settled_value
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in _settled_value(self, _raise)
222 if _raise:
223 raise_val = self._fulfillment_handler0
--> 224 reraise(type(raise_val), raise_val, self._traceback)
225 return self._fulfillment_handler0
226
~\.conda\envs\tf-tutorial\lib\site-packages\six.py in reraise(tp, value, tb)
694 if value.__traceback__ is not tb:
695 raise value.with_traceback(tb)
--> 696 raise value
697 finally:
698 value = None
~\.conda\envs\tf-tutorial\lib\site-packages\promise\promise.py in handle_future_result(future)
840 # type: (Any) -> None
841 try:
--> 842 resolve(future.result())
843 except Exception as e:
844 tb = exc_info()[2]
~\.conda\envs\tf-tutorial\lib\concurrent\futures\_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
~\.conda\envs\tf-tutorial\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~\.conda\envs\tf-tutorial\lib\concurrent\futures\thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
~\.conda\envs\tf-tutorial\lib\site-packages\tensorflow_datasets\core\download\extractor.py in _sync_extract(self, from_path, method, to_path)
92 except BaseException as err:
93 msg = 'Error while extracting %s to %s : %s' % (from_path, to_path, err)
---> 94 raise ExtractError(msg)
95 # `tf.io.gfile.Rename(overwrite=True)` doesn't work for non empty
96 # directories, so delete destination first, if it already exists.
ExtractError: Error while extracting C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip to C:\Users\%user%\tensorflow_datasets\downloads\extracted\ZIP.images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip : C:\Users\%user%\tensorflow_datasets\downloads\images.cocodataset.org_zips_train20147eQIfmQL3bpVDgkOrnAQklNLVUtCsFrDPwMAuYSzF3U.zip; Unknown error
The message seems cryptic to me. The folder to which it is trying to extract does not exist when the notebook is started - it is created by Tensorflow, and only at that command line. I obviously tried deleting it completely and running it again, to no effect.
The code that leads to the error is (everything runs fine until the last line):
import tensorflow as tf
from __future__ import absolute_import, division, print_function, unicode_literals
from tensorflow_examples.models.pix2pix import pix2pix
import tensorflow_datasets as tfds
from IPython.display import clear_output
import matplotlib.pyplot as plt
dataset, info = tfds.load('coco', with_info=True)
Also tried breaking down the last command into assigning the tdfs.builder object and then running download_and_extract, and again got the same error.
There is enough space in disk - after download, still 50+GB available, while the dataset is supposed to be 37GB in its largest version (2014).
I have a similar problem with Windows 10 & COCO 2017. My solution is simple. Extract the ZIP file manually according to the folder path in the error message.
I am able to connect to my remote db (after authentication, of course) to a database. But I am not able to execute any commands or even list collections.
mongo_url = "blah.com:12345"
db_name = "db_name"
db_user_name = "user"
db_password = "password"
mongo_uri = "mongodb://" + db_user_name + ":" + db_password + "#" + mongo_url + "/" + db_name
connection = pymongo.MongoClient(mongo_uri)
db = connection[db_name]
print db.authenticate(db_user_name, db_password) // Returns True
However I am not able to use commands like :
db.collection_names() or any command using db.command()
I get this error stack (sayng Authentcation failed):
---------------------------------------------------------------------------
OperationFailure Traceback (most recent call last)
<ipython-input-13-1840c0979539> in <module>()
----> 1 db.collection_names()
D:\Continuum\Anaconda2\lib\site-packages\pymongo\database.pyc in collection_names(self, include_system_collections)
515 """
516 with self.__client._socket_for_reads(
--> 517 ReadPreference.PRIMARY) as (sock_info, slave_okay):
518
519 wire_version = sock_info.max_wire_version
D:\Continuum\Anaconda2\lib\contextlib.pyc in __enter__(self)
15 def __enter__(self):
16 try:
---> 17 return self.gen.next()
18 except StopIteration:
19 raise RuntimeError("generator didn't yield")
D:\Continuum\Anaconda2\lib\site-packages\pymongo\mongo_client.pyc in _socket_for_reads(self, read_preference)
796 topology = self._get_topology()
797 single = topology.description.topology_type == TOPOLOGY_TYPE.Single
--> 798 with self._get_socket(read_preference) as sock_info:
799 slave_ok = (single and not sock_info.is_mongos) or (
800 preference != ReadPreference.PRIMARY)
D:\Continuum\Anaconda2\lib\contextlib.pyc in __enter__(self)
15 def __enter__(self):
16 try:
---> 17 return self.gen.next()
18 except StopIteration:
19 raise RuntimeError("generator didn't yield")
D:\Continuum\Anaconda2\lib\site-packages\pymongo\mongo_client.pyc in _get_socket(self, selector)
762 server = self._get_topology().select_server(selector)
763 try:
--> 764 with server.get_socket(self.__all_credentials) as sock_info:
765 yield sock_info
766 except NetworkTimeout:
D:\Continuum\Anaconda2\lib\contextlib.pyc in __enter__(self)
15 def __enter__(self):
16 try:
---> 17 return self.gen.next()
18 except StopIteration:
19 raise RuntimeError("generator didn't yield")
D:\Continuum\Anaconda2\lib\site-packages\pymongo\server.pyc in get_socket(self, all_credentials, checkout)
161 #contextlib.contextmanager
162 def get_socket(self, all_credentials, checkout=False):
--> 163 with self.pool.get_socket(all_credentials, checkout) as sock_info:
164 yield sock_info
165
D:\Continuum\Anaconda2\lib\contextlib.pyc in __enter__(self)
15 def __enter__(self):
16 try:
---> 17 return self.gen.next()
18 except StopIteration:
19 raise RuntimeError("generator didn't yield")
D:\Continuum\Anaconda2\lib\site-packages\pymongo\pool.pyc in get_socket(self, all_credentials, checkout)
582 sock_info = self._get_socket_no_auth()
583 try:
--> 584 sock_info.check_auth(all_credentials)
585 yield sock_info
586 except:
D:\Continuum\Anaconda2\lib\site-packages\pymongo\pool.pyc in check_auth(self, all_credentials)
330
331 for credentials in cached - authset:
--> 332 auth.authenticate(credentials, self)
333 self.authset.add(credentials)
334
D:\Continuum\Anaconda2\lib\site-packages\pymongo\auth.pyc in authenticate(credentials, sock_info)
462 mechanism = credentials.mechanism
463 auth_func = _AUTH_MAP.get(mechanism)
--> 464 auth_func(credentials, sock_info)
465
466
D:\Continuum\Anaconda2\lib\site-packages\pymongo\auth.pyc in _authenticate_default(credentials, sock_info)
442 def _authenticate_default(credentials, sock_info):
443 if sock_info.max_wire_version >= 3:
--> 444 return _authenticate_scram_sha1(credentials, sock_info)
445 else:
446 return _authenticate_mongo_cr(credentials, sock_info)
D:\Continuum\Anaconda2\lib\site-packages\pymongo\auth.pyc in _authenticate_scram_sha1(credentials, sock_info)
226 ('conversationId', res['conversationId']),
227 ('payload', Binary(client_final))])
--> 228 res = sock_info.command(source, cmd)
229
230 parsed = _parse_scram_response(res['payload'])
D:\Continuum\Anaconda2\lib\site-packages\pymongo\pool.pyc in command(self, dbname, spec, slave_ok, read_preference, codec_options, check, allowable_errors, check_keys, read_concern)
237 check, allowable_errors, self.address,
238 check_keys, self.listeners, self.max_bson_size,
--> 239 read_concern)
240 except OperationFailure:
241 raise
D:\Continuum\Anaconda2\lib\site-packages\pymongo\network.pyc in command(sock, dbname, spec, slave_ok, is_mongos, read_preference, codec_options, check, allowable_errors, address, check_keys, listeners, max_bson_size, read_concern)
100 response_doc = unpacked['data'][0]
101 if check:
--> 102 helpers._check_command_response(response_doc, None, allowable_errors)
103 except Exception as exc:
104 if publish:
D:\Continuum\Anaconda2\lib\site-packages\pymongo\helpers.pyc in _check_command_response(response, msg, allowable_errors)
203
204 msg = msg or "%s"
--> 205 raise OperationFailure(msg % errmsg, code, response)
206
207
OperationFailure: Authentication failed.
But I am able to do these operations from my mongo shell.