Memory & Value error when Pandas save to new file - python

Simple lines of a script. It is to remove some columns from an Excel file and save it to a new file.
import pandas as pd
import numpy as np
work_file = "C:\\My Documents\\the_file.xlsx"
df = pd.read_excel(work_file, sheet_name = "Sheet1", index_col = 0)
column_list_to_remove = ["Name","Gender","Register"]
results1 = df.drop(column_list_to_remove, axis=1)
writer = pd.ExcelWriter("C:\\My Documents\\new-file.xlsx")
results1.to_excel(writer,'Sheet1')
writer.save()
It had been working well on an old computer, both small and big (thousand rows) Excel files.
I have now upgraded to a new computer with bigger RAM (16 GB). When I run this script, it was well on a small file (a few thousand lines). But when it runs on the bigger file (hundred thousand rows Excel), it gives me below error message.
How can I have this corrected? Thank you.
Error message:
Traceback (most recent call last):
File "C:\Python38\lib\xml\etree\ElementTree.py", line 832, in _get_writer
yield file.write
File "C:\Python38\lib\xml\etree\ElementTree.py", line 772, in write
serialize(write, self._root, qnames, namespaces,
File "C:\Python38\lib\xml\etree\ElementTree.py", line 937, in _serialize_xml
_serialize_xml(write, e, qnames, None,
File "C:\Python38\lib\xml\etree\ElementTree.py", line 937, in _serialize_xml
_serialize_xml(write, e, qnames, None,
File "C:\Python38\lib\xml\etree\ElementTree.py", line 937, in _serialize_xml
_serialize_xml(write, e, qnames, None,
File "C:\Python38\lib\xml\etree\ElementTree.py", line 931, in _serialize_xml
write(" %s=\"%s\"" % (qnames[k], v))
MemoryError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\My Documents\my_script.py", line 9, in <module>
writer.save()
File "C:\Python38\lib\site-packages\pandas\io\excel\_openpyxl.py", line 43, in save
return self.book.save(self.path)
File "C:\Python38\lib\site-packages\openpyxl\workbook\workbook.py", line 392, in save
save_workbook(self, filename)
File "C:\Python38\lib\site-packages\openpyxl\writer\excel.py", line 293, in save_workbook
writer.save()
File "C:\Python38\lib\site-packages\openpyxl\writer\excel.py", line 275, in save
self.write_data()
File "C:\Python38\lib\site-packages\openpyxl\writer\excel.py", line 75, in write_data
self._write_worksheets()
File "C:\Python38\lib\site-packages\openpyxl\writer\excel.py", line 215, in _write_worksheets
self.write_worksheet(ws)
File "C:\Python38\lib\site-packages\openpyxl\writer\excel.py", line 200, in write_worksheet
writer.write()
File "C:\Python38\lib\site-packages\openpyxl\worksheet\_writer.py", line 360, in write
self.close()
File "C:\Python38\lib\site-packages\openpyxl\worksheet\_writer.py", line 368, in close
self.xf.close()
File "C:\Python38\lib\site-packages\openpyxl\worksheet\_writer.py", line 299, in get_stream
pass
File "C:\Python38\lib\contextlib.py", line 120, in __exit__
next(self.gen)
File "C:\Python38\lib\site-packages\et_xmlfile\xmlfile.py", line 50, in element
self._write_element(el)
File "C:\Python38\lib\site-packages\et_xmlfile\xmlfile.py", line 77, in _write_element
xml = tostring(element)
File "C:\Python38\lib\xml\etree\ElementTree.py", line 1133, in tostring
ElementTree(element).write(stream, encoding,
File "C:\Python38\lib\xml\etree\ElementTree.py", line 772, in write
serialize(write, self._root, qnames, namespaces,
File "C:\Python38\lib\contextlib.py", line 131, in __exit__
self.gen.throw(type, value, traceback)
File "C:\Python38\lib\xml\etree\ElementTree.py", line 832, in _get_writer
yield file.write
File "C:\Python38\lib\contextlib.py", line 525, in __exit__
raise exc_details[1]
File "C:\Python38\lib\contextlib.py", line 510, in __exit__
if cb(*exc_details):
File "C:\Python38\lib\contextlib.py", line 382, in _exit_wrapper
callback(*args, **kwds)
ValueError: I/O operation on closed file.

Replace your last three lines of code with the following:
with pd.ExcelWriter("C:\\My Documents\\new-file.xlsx") as writer:
results1.to_excel(writer)

The problem is resolved by reinstalled a 64-bit Python version. No change made to the codes.

Related

Errors when attempting to install CUDA through CMD

When I've been attempting to install CUDA through Command Prompt, I get the following errors and have no idea how to fix them.
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\urllib3\response.py", line 438, in _error_catcher
yield
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\urllib3\response.py", line 519, in read
data = self._fp.read(amt) if not fp_closed else b""
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\cachecontrol\filewrapper.py", line 62, in read
data = self.__fp.read(amt)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\http\client.py", line 459, in read
n = self.readinto(b)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\http\client.py", line 503, in readinto
n = self.fp.readinto(b)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\socket.py", line 704, in readinto
return self._sock.recv_into(b)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\ssl.py", line 1241, in recv_into
return self.read(nbytes, buffer)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\ssl.py", line 1099, in read
return self._sslobj.read(len, buffer)
socket.timeout: The read operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\cli\base_command.py", line 173, in _main
status = self.run(options, args)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\cli\req_command.py", line 203, in wrapper
return func(self, options, args)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\commands\install.py", line 315, in run
requirement_set = resolver.resolve(
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\resolver.py", line 94, in resolve
result = self._result = resolver.resolve(
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\resolvelib\resolvers.py", line 472, in resolve
state = resolution.resolve(requirements, max_rounds=max_rounds)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\resolvelib\resolvers.py", line 341, in resolve
self._add_to_criteria(self.state.criteria, r, parent=None)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\resolvelib\resolvers.py", line 172, in _add_to_criteria
if not criterion.candidates:
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\resolvelib\structs.py", line 151, in __bool__
return bool(self._sequence)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\found_candidates.py", line 140, in __bool__
return any(self)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\found_candidates.py", line 128, in <genexpr>
return (c for c in iterator if id(c) not in self._incompatible_ids)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\found_candidates.py", line 32, in _iter_built
candidate = func()
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\factory.py", line 204, in _make_candidate_from_link
self._link_candidate_cache[link] = LinkCandidate(
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\candidates.py", line 295, in __init__
super().__init__(
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\candidates.py", line 156, in __init__
self.dist = self._prepare()
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\candidates.py", line 227, in _prepare
dist = self._prepare_distribution()
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\resolution\resolvelib\candidates.py", line 305, in _prepare_distribution
return self._factory.preparer.prepare_linked_requirement(
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\operations\prepare.py", line 508, in prepare_linked_requirement
return self._prepare_linked_requirement(req, parallel_builds)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\operations\prepare.py", line 550, in _prepare_linked_requirement
local_file = unpack_url(
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\operations\prepare.py", line 239, in unpack_url
file = get_http_url(
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\operations\prepare.py", line 102, in get_http_url
from_path, content_type = download(link, temp_dir.path)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\network\download.py", line 145, in __call__
for chunk in chunks:
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\cli\progress_bars.py", line 144, in iter
for x in it:
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_internal\network\utils.py", line 63, in response_chunks
for chunk in response.raw.stream(
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\urllib3\response.py", line 576, in stream
data = self.read(amt=amt, decode_content=decode_content)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\urllib3\response.py", line 541, in read
raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\contextlib.py", line 135, in __exit__
self.gen.throw(type, value, traceback)
File "c:\users\silasm\appdata\local\programs\python\python39\lib\site-packages\pip\_vendor\urllib3\response.py", line 443, in _error_catcher
raise ReadTimeoutError(self._pool, None, "Read timed out.")
pip._vendor.urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='download.pytorch.org', port=443): Read timed out.
Any help would be much appreciated as I don't know how to fix this.
Also, everytime I try opening a Python file, it immediately shuts down after booting up. I assume this has something to do with conflicting versions of Python, but I have uninstalled all versions and reinstalled just 3.9. If there is something I can use to completely rid my computer of all things Python in order to start from the ground up, I would appreciate it very much as I've had more than just this problem when it comes to Python.
First, it looks like a temporary network issue. you can try to rerun the same command, and hopefully it is working. good luck out there.
Second, for different python versions, it is better to use conda to manage it. reference: https://docs.anaconda.com/anaconda/install/index.html

BadZipFile: file is not a zip file in reading excel file using pandas

I'm trying to execute the following code and I'm constantly experiencing this issue.
import pandas as pd
df = pd.read_excel('First_Run.xlsx', engine='openpyxl')
print(df.head())
I've make sure that excel file is there at the respective path. Have tried multiple ways to resolve the issue but failed to find the desired solution.
Here's the output of the code block.
Traceback (most recent call last):
File "c:\Users\fharookshaik\Desktop\Gmail Bot\temp.py", line 7, in <module>
df = pd.read_excel('First_Run.xlsx',engine='openpyxl')
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\pandas\util\_decorators.py", line 299, in wrapper
return func(*args, **kwargs)
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\pandas\io\excel\_base.py", line 336, in read_excel
io = ExcelFile(io, storage_options=storage_options, engine=engine)
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\pandas\io\excel\_base.py", line 1131, in __init__
self._reader = self._engines[engine](self._io, storage_options=storage_options)
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\pandas\io\excel\_openpyxl.py", line 475, in __init__
super().__init__(filepath_or_buffer, storage_options=storage_options)
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\pandas\io\excel\_base.py", line 391, in __init__
self.book = self.load_workbook(self.handles.handle)
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\pandas\io\excel\_openpyxl.py", line 486, in load_workbook
return load_workbook(
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\openpyxl\reader\excel.py", line 315, in load_workbook
reader = ExcelReader(filename, read_only, keep_vba,
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\openpyxl\reader\excel.py", line 124, in __init__
self.archive = _validate_archive(fn)
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\site-packages\openpyxl\reader\excel.py", line 96, in _validate_archive
archive = ZipFile(filename, 'r')
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\zipfile.py", line 1269, in __init__
self._RealGetContents()
File "C:\Users\fharookshaik\AppData\Local\Programs\Python\Python38\lib\zipfile.py", line 1336, in _RealGetContents
raise BadZipFile("File is not a zip file")
zipfile.BadZipFile: File is not a zip file
Hope, this will be answered soon by the brilliant minds on this dynamic community.
Thanks in well Advance. 🙂

Pandas Dedupe not working . Multiprocessing and Permission error

I was trying to clean up duplicates in an excel file using dedupe.
The code worked fine at first and the code itself is simple. But whenever I run the code I get the below error. The code works fine if I delete all the temp files, restart pycharm or restart my computer and it won't run for the second time.
The data file is a csv file with a list of random similar name in column A with header as 'Name'. Please help tp resolve. Thank you.
Code
import pandas as pd
import pandas_dedupe
#loading data
df = pd.read_csv('duplicate.csv')
#deduplication process
df_final = pandas_dedupe.dedupe_dataframe(df,['Name'])
#save to csv
df_final.to_csv('cleansed_output.csv')
Getting error below
C:\Users\Username\AppData\Roaming\Python\Python38\site-packages\pandas_dedupe\utility_functions.py:17: FutureWarning: The default value of regex will change from True to False in a future version.
df[i] = df[i].str.replace('[^\w\s\.\-\(\)\,\:\/\\\\]','')
Reading from dedupe_dataframe_learned_settings
Clustering...
Traceback (most recent call last):
File "C:\Users\Username\AppData\Roaming\Python\Python38\site-packages\dedupe\api.py", line 103, in score
matches = core.scoreDuplicates(pairs,
File "C:\Users\Username\AppData\Roaming\Python\Python38\site-packages\dedupe\core.py", line 244, in scoreDuplicates
process.start()
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\process.py", line 121, in start
self._popen = self._Popen(self)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\context.py", line 224, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\context.py", line 327, in _Popen
return Popen(process_obj)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\popen_spawn_win32.py", line 45, in __init__
prep_data = spawn.get_preparation_data(process_obj._name)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\spawn.py", line 154, in get_preparation_data
_check_not_importing_main()
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\spawn.py", line 134, in _check_not_importing_main
raise RuntimeError('''
RuntimeError:
An attempt has been made to start a new process before the
current process has finished its bootstrapping phase.
This probably means that you are not using fork to start your
child processes and you have forgotten to use the proper idiom
in the main module:
if __name__ == '__main__':
freeze_support()
...
The "freeze_support()" line can be omitted if the program
is not going to be frozen to produce an executable.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\spawn.py", line 116, in spawn_main
exitcode = _main(fd, parent_sentinel)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\spawn.py", line 125, in _main
prepare(preparation_data)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\spawn.py", line 236, in prepare
_fixup_main_from_path(data['init_main_from_path'])
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\multiprocessing\spawn.py", line 287, in _fixup_main_from_path
main_content = runpy.run_path(main_path,
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\runpy.py", line 265, in run_path
return _run_module_code(code, init_globals, run_name,
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\runpy.py", line 97, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\runpy.py", line 87, in _run_code
exec(code, run_globals)
File "C:\Users\Username\PycharmProjects\Duplicate\main.py", line 10, in <module>
df_final = pandas_dedupe.dedupe_dataframe(df,['Name'])
File "C:\Users\Username\AppData\Roaming\Python\Python38\site-packages\pandas_dedupe\dedupe_dataframe.py", line 249, in dedupe_dataframe
clustered_df = _cluster(deduper, data_d, threshold, canonicalize)
File "C:\Users\Username\AppData\Roaming\Python\Python38\site-packages\pandas_dedupe\dedupe_dataframe.py", line 143, in _cluster
clustered_dupes = deduper.partition(data, threshold)
File "C:\Users\Username\AppData\Roaming\Python\Python38\site-packages\dedupe\api.py", line 170, in partition
pair_scores = self.score(pairs)
File "C:\Users\Username\AppData\Roaming\Python\Python38\site-packages\dedupe\api.py", line 108, in score
raise RuntimeError('''
RuntimeError:
You need to either turn off multiprocessing or protect
the calls to the Dedupe methods with a
`if __name__ == '__main__'` in your main module, see
https://docs.python.org/3/library/multiprocessing.html#the-spawn-and-forkserver-start-methods
Traceback (most recent call last):
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\shutil.py", line 616, in _rmtree_unsafe
os.unlink(fullname)
PermissionError: [WinError 32] The process cannot access the file because it is being used by another process: 'C:\\Users\\USERNAME~1.KAB\\AppData\\Local\\Temp\\tmpp9123_pc\\blocks.db'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\tempfile.py", line 802, in onerror
_os.unlink(path)
PermissionError: [WinError 32] The process cannot access the file because it is being used by another process: 'C:\\Users\\USERNAME~1.KAB\\AppData\\Local\\Temp\\tmpp9123_pc\\blocks.db'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\weakref.py", line 642, in _exitfunc
f()
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\weakref.py", line 566, in __call__
return info.func(*info.args, **(info.kwargs or {}))
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\tempfile.py", line 817, in _cleanup
cls._rmtree(name)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\tempfile.py", line 813, in _rmtree
_shutil.rmtree(name, onerror=onerror)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\shutil.py", line 740, in rmtree
return _rmtree_unsafe(path, onerror)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\shutil.py", line 618, in _rmtree_unsafe
onerror(os.unlink, fullname, sys.exc_info())
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\tempfile.py", line 805, in onerror
cls._rmtree(path)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\tempfile.py", line 813, in _rmtree
_shutil.rmtree(name, onerror=onerror)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\shutil.py", line 740, in rmtree
return _rmtree_unsafe(path, onerror)
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\shutil.py", line 599, in _rmtree_unsafe
onerror(os.scandir, path, sys.exc_info())
File "C:\Users\Username\AppData\Local\Programs\Python\Python38\lib\shutil.py", line 596, in _rmtree_unsafe
with os.scandir(path) as scandir_it:
NotADirectoryError: [WinError 267] The directory name is invalid: 'C:\\Users\\USERNAME~1.KAB\\AppData\\Local\\Temp\\tmpp9123_pc\\blocks.db'
Process finished with exit code -1
The answer is in the error:
You need to either turn off multiprocessing or protect the calls to the Dedupe methods with a if __name__ == '__main__' in your main module
Change your code to the following, and try again:
import pandas as pd
import pandas_dedupe
if __name__ == "__main__":
#loading data
df = pd.read_csv('duplicate.csv')
#deduplication process
df_final = pandas_dedupe.dedupe_dataframe(df,['Name'])
#save to csv
df_final.to_csv('cleansed_output.csv')

Can't rebuild indexes in django-haystack

I've installed Apache Solr 4.10.4, django-haystack 2.4.0, pysolr 3.3.2 and Django 1.8.6. I'm finishing make a simple blog application in Django framework. I typed following command in terminal:
(my_env) pecan#tux ~/Documents/Django/mysite $ python manage.py rebuild_index
but it return me an error:
WARNING: This will irreparably remove EVERYTHING from your search index in connection 'default'.
Your choices after this are to restore from backups or rebuild via the `rebuild_index` command.
Are you sure you wish to continue? [y/N] y
Removing all documents from your index because you said so.
All documents removed.
Indexing 3 posts
ERROR:root:Error updating blog using default
Traceback (most recent call last):
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 1080, in _escape_attrib
if "&" in text:
TypeError: argument of type 'int' is not iterable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/management/commands/update_index.py", line 188, in handle_label
self.update_backend(label, using)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/management/commands/update_index.py", line 233, in update_backend
do_update(backend, index, qs, start, end, total, verbosity=self.verbosity, commit=self.commit)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/management/commands/update_index.py", line 96, in do_update
backend.update(index, current_qs, commit=commit)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/backends/solr_backend.py", line 75, in update
self.conn.add(docs, commit=commit, boost=index.get_field_weights())
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/pysolr.py", line 807, in add
m = ET.tostring(message, encoding='utf-8')
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 1125, in tostring
short_empty_elements=short_empty_elements)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 777, in write
short_empty_elements=short_empty_elements)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 942, in _serialize_xml
short_empty_elements=short_empty_elements)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 942, in _serialize_xml
short_empty_elements=short_empty_elements)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 934, in _serialize_xml
v = _escape_attrib(v)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 1092, in _escape_attrib
_raise_serialization_error(text)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 1058, in _raise_serialization_error
"cannot serialize %r (type %s)" % (text, type(text).__name__)
TypeError: cannot serialize 1 (type int)
Traceback (most recent call last):
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 1080, in _escape_attrib
if "&" in text:
TypeError: argument of type 'int' is not iterable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "manage.py", line 22, in <module>
execute_from_command_line(sys.argv)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/django/core/management/__init__.py", line 346, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/django/core/management/base.py", line 394, in run_from_argv
self.execute(*args, **cmd_options)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/django/core/management/base.py", line 445, in execute
output = self.handle(*args, **options)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/management/commands/rebuild_index.py", line 26, in handle
call_command('update_index', **options)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/django/core/management/__init__.py", line 120, in call_command
return command.execute(*args, **defaults)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/django/core/management/base.py", line 445, in execute
output = self.handle(*args, **options)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/management/commands/update_index.py", line 183, in handle
return super(Command, self).handle(*items, **options)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/django/core/management/base.py", line 623, in handle
label_output = self.handle_label(label, **options)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/management/commands/update_index.py", line 188, in handle_label
self.update_backend(label, using)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/management/commands/update_index.py", line 233, in update_backend
do_update(backend, index, qs, start, end, total, verbosity=self.verbosity, commit=self.commit)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/management/commands/update_index.py", line 96, in do_update
backend.update(index, current_qs, commit=commit)
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/haystack/backends/solr_backend.py", line 75, in update
self.conn.add(docs, commit=commit, boost=index.get_field_weights())
File "/home/pecan/Documents/Django/my_env/lib/python3.4/site-packages/pysolr.py", line 807, in add
m = ET.tostring(message, encoding='utf-8')
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 1125, in tostring
short_empty_elements=short_empty_elements)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 777, in write
short_empty_elements=short_empty_elements)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 942, in _serialize_xml
short_empty_elements=short_empty_elements)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 942, in _serialize_xml
short_empty_elements=short_empty_elements)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 934, in _serialize_xml
v = _escape_attrib(v)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 1092, in _escape_attrib
_raise_serialization_error(text)
File "/usr/lib64/python3.4/xml/etree/ElementTree.py", line 1058, in _raise_serialization_error
"cannot serialize %r (type %s)" % (text, type(text).__name__)
TypeError: cannot serialize 1 (type int)
I had to install older versions of packages and Solr because my book is a bit ancient.
Can anybody help me?

€ sign - xlsxwriter error

I'm trying to get some data from one web page and write these data into xlsx file. Everything seems good but the Encoding error probably raises if it tries to write it into xlsx file during CLOSING the file.
ERROR:
Traceback (most recent call last):
File "C:/Users/Milano/PycharmProjects/distrelec/crawler.py", line 429, in <module>
temp_file_to_xlsx()
File "C:/Users/Milano/PycharmProjects/distrelec/crawler.py", line 119, in temp_file_to_xlsx
wb.close()
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 295, in close
self._store_workbook()
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 518, in _store_workbook
xml_files = packager._create_package()
File "C:\Python27\lib\site-packages\xlsxwriter\packager.py", line 134, in _create_package
self._write_workbook_file()
File "C:\Python27\lib\site-packages\xlsxwriter\packager.py", line 174, in _write_workbook_file
workbook._assemble_xml_file()
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 464, in _assemble_xml_file
self._write_sheets()
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 1455, in _write_sheets
self._write_sheet(worksheet.name, id_num, worksheet.hidden)
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 1472, in _write_sheet
self._xml_empty_tag('sheet', attributes)
File "C:\Python27\lib\site-packages\xlsxwriter\xmlwriter.py", line 80, in _xml_empty_tag
self.fh.write("<%s/>" % tag)
File "C:\Python27\lib\codecs.py", line 694, in write
return self.writer.write(data)
File "C:\Python27\lib\codecs.py", line 357, in write
data, consumed = self.encode(object, self.errors)
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 23: ordinal not in range(128)
To find out where is the problem I've edited codecs module:
def write(self, object):
""" Writes the object's contents encoded to self.stream.
"""
try:
data, consumed = self.encode(object, self.errors)
self.stream.write(data)
except:
print object
print repr(object)
raise Exception
The output is:
<sheet name="Android PC–APC" sheetId="42" r:id="rId42"/>
'<sheet name="Android PC\xe2\x80\x93APC" sheetId="42" r:id="rId42"/>'
temp_file_to_xlsx()
File "C:/Users/Milano/PycharmProjects/distrelec/crawler.py", line 119, in temp_file_to_xlsx
wb.close()
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 295, in close
self._store_workbook()
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 518, in _store_workbook
xml_files = packager._create_package()
File "C:\Python27\lib\site-packages\xlsxwriter\packager.py", line 134, in _create_package
self._write_workbook_file()
File "C:\Python27\lib\site-packages\xlsxwriter\packager.py", line 174, in _write_workbook_file
workbook._assemble_xml_file()
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 464, in _assemble_xml_file
self._write_sheets()
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 1455, in _write_sheets
self._write_sheet(worksheet.name, id_num, worksheet.hidden)
File "C:\Python27\lib\site-packages\xlsxwriter\workbook.py", line 1472, in _write_sheet
self._xml_empty_tag('sheet', attributes)
File "C:\Python27\lib\site-packages\xlsxwriter\xmlwriter.py", line 80, in _xml_empty_tag
self.fh.write("<%s/>" % tag)
File "C:\Python27\lib\codecs.py", line 699, in write
return self.writer.write(data)
File "C:\Python27\lib\codecs.py", line 363, in write
raise Exception
Exception
What should I do with that please?
You have to decode your input data with the correct encoding, which seems to be 'utf-8'.
You may want to look at this:
Example: Simple Unicode with Python 2

Categories

Resources