write() at beginning of file? - python

I'm doing it like this now, but I want it to write at the beginning of the file instead.
f = open('out.txt', 'a') # or 'w'?
f.write("string 1")
f.write("string 2")
f.write("string 3")
f.close()
so that the contents of out.txt will be:
string 3
string 2
string 1
and not (like this code does):
string 1
string 2
string 3

Take a look at this question. There are some solutions there.
Though I would probably go that same way Daniel and MAK suggest -- maybe make a lil' class to make things a little more flexible and explicit:
class Prepender:
def __init__(self, fname, mode='w'):
self.__write_queue = []
self.__f = open(fname, mode)
def write(self, s):
self.__write_queue.insert(0, s)
def close(self):
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.__write_queue:
self.__f.writelines(self.__write_queue)
self.__f.close()
with Prepender('test_d.out') as f:
f.write('string 1\n')
f.write('string 2\n')
f.write('string 3\n')

You could throw a f.seek(0) between each write (or write a wrapper function that does it for you), but there's no simple built in way of doing this.
EDIT: this doesn't work, even if you put a f.flush() in there it will continually overwrite. You may just have to queue up the writes and reverse the order yourself.
So instead of
f.write("string 1")
f.write("string 2")
f.write("string 3")
Maybe do something like:
writeList = []
writeList.append("string 1\n")
writeList.append("string 2\n")
writeList.append("string 3\n")
writeList.reverse()
f.writelines(writeList)

A variation on kdtrv's answer. This version keeps the existing file contents, and offers a write_lines method that preserves line order.
class Prepender(object):
def __init__(self,
file_path,
):
# Read in the existing file, so we can write it back later
with open(file_path, mode='r') as f:
self.__write_queue = f.readlines()
self.__open_file = open(file_path, mode='w')
def write_line(self, line):
self.__write_queue.insert(0,
"%s\n" % line,
)
def write_lines(self, lines):
lines.reverse()
for line in lines:
self.write_line(line)
def close(self):
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.__write_queue:
self.__open_file.writelines(self.__write_queue)
self.__open_file.close()
with Prepender('test_d.out') as f:
# Must write individual lines in reverse order
f.write_line('This will be line 3')
f.write_line('This will be line 2')
f.write_line('This will be line 1')
with Prepender('test_d.out') as f:
# Or, use write_lines instead - that maintains order.
f.write_lines(
['This will be line 1',
'This will be line 2',
'This will be line 3',
]
)

Elaborating on Daniel DiPaolo's answer:
Simply append all the lines that you want to write to a list. Reverse the list and then write its contents into the file.
f=open('output.txt','w')
l=[]
l.append("string 1")
l.append("string 2")
l.append("string 3")
for line in l:
f.write(line)
f.close()
You could also use a deque and add lines at its beginning instead of using a list and reversing it.

Related

How should I put these lists together?

I am creating an interface with Pyqt. This interface generates a .txt file.
Basically in text_file, I generate the .txt file, where the title of the file is linked to radio_value and the content of the file in clickconector. The latter is where I have my problem. relleno is a list where is what is filled in the form, and form_label the labels of this. I need to join both lists into one, I tried from the most basic: a = zip (form_label, Relleno), but it gives an error: TypeError: can only join an iterable (I know it has to do with the join of a text file), and many others that basically gives the same error and I need to write that correctly (I know that the return of the code is wrong, it will only return the first tuple. I try to repeat that but with all the tuples)
I'll skip the part where I configure buttons to be more specific. Maybe It's a rookie mistake, but I've tried for hours and can't get what I want. Any constructive criticism is welcome
def clickconector (self):
relleno = [self.bitacora.text(), self.turno.text(), self.asesor.text(), self.item.text(), self.modelo.text(), self.identificacion.text(), self.rig.text(),self.horometro.text(),self.condicion.text(),self.orden.text(), self.observacion.text()]
form_label = ["bitacora", 'turno', 'asesor', 'item', 'modelo', 'identificacion', 'rig', 'horometro', 'condicion', 'orden', 'observacion']
for a,b in zip (form_label, relleno):
print (a,b)
def radio_value (self):
if self.pendiente.isChecked():
return 'Pendiente' , self.bitacora.text()
if self.terminado.isChecked():
return 'Terminado', self.bitacora.text()
def text_file(self):
filename = f"{' '.join(self.radio_value())}.txt"
with open(filename, "w") as f:
f.write(" ".join(self.clickconector()))
return f
Your error comes from the line
f.write(" ".join(self.clickconector()))
clickconector() doesn't return anything thus the error says only iterable~.
Try returning some text in iterable or just the text.
----- update -----
I'll change it like this
def clickconector (self):
relleno = [self.bitacora.text(), self.turno.text(), self.asesor.text(), self.item.text(), self.modelo.text(), self.identificacion.text(), self.rig.text(),self.horometro.text(),self.condicion.text(),self.orden.text(), self.observacion.text()]
form_label = ["bitacora", 'turno', 'asesor', 'item', 'modelo', 'identificacion', 'rig', 'horometro', 'condicion', 'orden', 'observacion']
return [str(label_text) for label_text in zip(form_label, relleno)] # just return list of tuples
def radio_value (self):
if self.pendiente.isChecked():
return 'Pendiente' , self.bitacora.text()
if self.terminado.isChecked():
return 'Terminado', self.bitacora.text()
def text_file(self):
filename = f"{' '.join(self.radio_value())}.txt"
with open(filename, "w") as f:
f.write(" ".join(self.clickconector()))
return f

How to chain file objects in Python?

I'm trying to find a simple way to chain file-like objects. I have a single CSV file which is split into a number of segments on disk. I'd like to be able to pass them to csv.DictReader without having to make a concatenated temporary first.
Something like:
files = map(io.open, filenames)
for row in csv.DictReader(io.chain(files)):
print(row[column_name])
But I haven't been able to find anything like io.chain. If I were parsing it myself, I could do something like:
from itertools import chain
def lines(fp):
for line in fp.readlines():
yield line
a = open('segment-1.dat')
b = open('segment-2.dat')
for line in chain(lines(a), lines(b)):
row = line.strip().split(',')
However DictReader needs something it can call read() on, so this method doesn't work. I can iterate over the files, copying the fieldnames property from the previous reader, but I was hoping for something which let me put all the processing within a single loop body.
An iterable might help
from io import BytesIO
a = BytesIO(b"1st file 1st line \n1st file 2nd line")
b = BytesIO(b"2nd file 1st line \n2nd file 2nd line")
class Reader:
def __init__(self, *files):
self.files = files
self.current_idx = 0
def __iter__(self):
return self
def __next__(self):
f = self.files[self.current_idx]
for line in f:
return line
else:
if self.current_idx < len(self.files) - 1:
self.current_idx += 1
return next (self)
raise StopIteration("feed me more files")
r = Reader(a, b)
for l in r:
print(l)
Result:
b'1st file 1st line \n'
b'1st file 2nd line'
b'2nd file 1st line \n'
b'2nd file 2nd line'
Edit:
:D then there are standard library goodies.
https://docs.python.org/3.7/library/fileinput.html
with fileinput.input(files=('spam.txt', 'eggs.txt')) as f:
for line in f:
process(line)
You could create a class that's an iterator that returns a string each time its __next__() method is called (quoting the docs).
import csv
class ChainedCSVfiles:
def __init__(self, filenames):
self.filenames = filenames
def __iter__(self):
return next(self)
def __next__(self):
for filename in self.filenames:
with open(filename, 'r', newline='') as csvfile:
for line in csvfile:
yield line
filenames = 'segment-1.dat', 'segment-2.dat'
reader = csv.DictReader(ChainedCSVfiles(filenames),
fieldnames=('field1', 'field2', 'field3'))
for row in reader:
print(row)

Best way to read file contents of a file and set to empty string if exception happen

try:
content = open("/tmp/out").read()
except:
content = ""
Can I go any shorter or more elegant than this? I've to do it for more than one files so I want something more short.
Is writing function the only shorter way to do it?
What I actually want is this but I want to concat "" if there is any exception
lines = (open("/var/log/log.1").read() + open("/var/log/log").read()).split("\n")
Yes, you'll have to write something like
def get_contents(filename):
try:
with open(filename) as f:
return f.read()
except EnvironmentError:
return ''
lines = (get_contents('/var/log/log.1')
+ get_contents('/var/log/log')).split('\n')
NlightNFotis raises a valid point, if the files are big, you don't want to do this. Maybe you'd write a line generator that accepts a list of filenames:
def get_lines(filenames):
for fname in filenames:
try:
with open(fname) as f:
for line in f:
yield line
except EnvironmentError:
continue
...
for line in get_lines(["/var/log/log.1", "/var/log/log"]):
do_stuff(line)
Another way is to use the standard fileinput.FileInput class (thanks, J.F. Sebastian):
import fileinput
def eat_errors(f, mode):
try:
return open(f, mode)
except IOError:
return open(os.devnull)
for line in fileinput.FileInput(["/var/log/log.1", "/var/log/log"], openhook=eat_errors):
do_stuff(line)
This code will monkey patch out open for another open that creates a FakeFile that always returns a "empty" string if open throws an `IOException``.
Whilst it's more code than you'd really want to write for the problem at hand, it does mean that you have a reusable context manager for faking open if the need arises again (probably twice in the next decade)
with monkey_patched_open():
...
Actual code.
#!/usr/bin/env python
from contextlib import contextmanager
from StringIO import StringIO
################################################################################
class FakeFile(StringIO):
def __init__(self):
StringIO.__init__(self)
self.count = 0
def read(self, n=-1):
return "<empty#1>"
def readlines(self, sizehint = 0):
return ["<empty#2>"]
def next(self):
if self.count == 0:
self.count += 1
return "<empty#3>"
else:
raise StopIteration
################################################################################
#contextmanager
def monkey_patched_open():
global open
old_open = open
def new_fake_open(filename, mode="r"):
try:
fh = old_open(filename, mode)
except IOError:
fh = FakeFile()
return fh
open = new_fake_open
try:
yield
finally:
open = old_open
################################################################################
with monkey_patched_open():
for line in open("NOSUCHFILE"):
print "NOSUCHFILE->", line
print "Other", open("MISSING").read()
print "OK", open(__file__).read()[:30]
Running the above gives:
NOSUCHFILE-> <empty#3>
Other <empty#1>
OK #!/usr/bin/env python
from co
I left in the "empty" strings just to show what was happening.
StringIO would have sufficed just to read it once but I thought the OP was looking to keep reading from file, hence the need for FakeFile - unless someone knows of a better mechanism.
I know some see monkey patching as the act of a scoundrel.
You could try the following, but it's probably not the best:
import os
def chk_file(filename):
if os.stat(filename).st_size == 0:
return ""
else:
with open(filename) as f:
return f.readlines()
if __name__=="__main__":
print chk_file("foobar.txt") #populated file
print chk_file("bar.txt") #empty file
print chk_file("spock.txt") #populated
It works. You can wrap it with your try-except, if you want.
You could define a function to catch errors:
from itertools import chain
def readlines(filename):
try:
with open(filename) as file:
return file.readlines() # or just `file` to return an iterator
except EnvironmentError:
return []
files = (readlines(name) for name in ["/var/log/1", "/var/log/2"])
lines = list(chain.from_iterable(files))

How to read a CSV file in reverse order in Python?

I know how to do it for a TXT file, but now I am having some trouble doing it for a CSV file.
How can I read a CSV file from the bottom in Python?
Pretty much the same way as for a text file: read the whole thing into a list and then go backwards:
import csv
with open('test.csv', 'r') as textfile:
for row in reversed(list(csv.reader(textfile))):
print ', '.join(row)
If you want to get fancy, you could write a lot of code that reads blocks starting at the end of the file and working backwards, emitting a line at a time, and then feed that to csv.reader, but that will only work with a file that can be seeked, i.e. disk files but not standard input.
Some of us have files that do not fit into memory, could anyone come with a solution that does not require storing the entire file in memory?
That's a bit trickier. Luckily, all csv.reader expects is an iterator-like object that returns a string (line) per call to next(). So we grab the technique Darius Bacon presented in "Most efficient way to search the last x lines of a file in python" to read the lines of a file backwards, without having to pull in the whole file:
import os
def reversed_lines(file):
"Generate the lines of file in reverse order."
part = ''
for block in reversed_blocks(file):
for c in reversed(block):
if c == '\n' and part:
yield part[::-1]
part = ''
part += c
if part: yield part[::-1]
def reversed_blocks(file, blocksize=4096):
"Generate blocks of file's contents in reverse order."
file.seek(0, os.SEEK_END)
here = file.tell()
while 0 < here:
delta = min(blocksize, here)
here -= delta
file.seek(here, os.SEEK_SET)
yield file.read(delta)
and feed reversed_lines into the code to reverse the lines before they get to csv.reader, removing the need for reversed and list:
import csv
with open('test.csv', 'r') as textfile:
for row in csv.reader(reversed_lines(textfile)):
print ', '.join(row)
There is a more Pythonic solution possible, which doesn't require a character-by-character reversal of the block in memory (hint: just get a list of indices where there are line ends in the block, reverse it, and use it to slice the block), and uses chain out of itertools to glue the line clusters from successive blocks together, but that's left as an exercise for the reader.
It's worth noting that the reversed_lines() idiom above only works if the columns in the CSV file don't contain newlines.
Aargh! There's always something. Luckily, it's not too bad to fix this:
def reversed_lines(file):
"Generate the lines of file in reverse order."
part = ''
quoting = False
for block in reversed_blocks(file):
for c in reversed(block):
if c == '"':
quoting = not quoting
elif c == '\n' and part and not quoting:
yield part[::-1]
part = ''
part += c
if part: yield part[::-1]
Of course, you'll need to change the quote character if your CSV dialect doesn't use ".
Building on #mike-desimone 's answer. Here's a solution that provides the same structure as a python file object but is read in reverse, line by line:
import os
class ReversedFile(object):
def __init__(self, f, mode='r'):
"""
Wraps a file object with methods that make it be read in reverse line-by-line
if ``f`` is a filename opens a new file object
"""
if mode != 'r':
raise ValueError("ReversedFile only supports read mode (mode='r')")
if not type(f) == file:
# likely a filename
f = open(f)
self.file = f
self.lines = self._reversed_lines()
def _reversed_lines(self):
"Generate the lines of file in reverse order."
part = ''
for block in self._reversed_blocks():
for c in reversed(block):
if c == '\n' and part:
yield part[::-1]
part = ''
part += c
if part: yield part[::-1]
def _reversed_blocks(self, blocksize=4096):
"Generate blocks of file's contents in reverse order."
file = self.file
file.seek(0, os.SEEK_END)
here = file.tell()
while 0 < here:
delta = min(blocksize, here)
here -= delta
file.seek(here, os.SEEK_SET)
yield file.read(delta)
def __getattribute__(self, name):
"""
Allows for the underlying file attributes to come through
"""
try:
# ReversedFile attribute
return super(ReversedFile, self).__getattribute__(name)
except AttributeError:
# self.file attribute
return getattr(self.file, name)
def __iter__(self):
"""
Creates iterator
"""
return self
def seek(self):
raise NotImplementedError('ReversedFile does not support seek')
def next(self):
"""
Next item in the sequence
"""
return self.lines.next()
def read(self):
"""
Returns the entire contents of the file reversed line by line
"""
contents = ''
for line in self:
contents += line
return contents
def readline(self):
"""
Returns the next line from the bottom
"""
return self.next()
def readlines(self):
"""
Returns all remaining lines from the bottom of the file in reverse
"""
return [x for x in self]
Go for it. This is simple program to reverse the rows from a CSV file.
import csv
BC_file = open('Master.csv', 'rb')
BC_reader = csv.reader(BC_file)
next(BC_reader)
for row in reversed(list(BC_reader)):
print row[0]

Writing string to a file on a new line every time

I want to append a newline to my string every time I call file.write(). What's the easiest way to do this in Python?
Use "\n":
file.write("My String\n")
See the Python manual for reference.
You can do this in two ways:
f.write("text to write\n")
or, depending on your Python version (2 or 3):
print >>f, "text to write" # Python 2.x
print("text to write", file=f) # Python 3.x
You can use:
file.write(your_string + '\n')
If you use it extensively (a lot of written lines), you can subclass 'file':
class cfile(file):
#subclass file to have a more convienient use of writeline
def __init__(self, name, mode = 'r'):
self = file.__init__(self, name, mode)
def wl(self, string):
self.writelines(string + '\n')
Now it offers an additional function wl that does what you want:
with cfile('filename.txt', 'w') as fid:
fid.wl('appends newline charachter')
fid.wl('is written on a new line')
Maybe I am missing something like different newline characters (\n, \r, ...) or that the last line is also terminated with a newline, but it works for me.
you could do:
file.write(your_string + '\n')
as suggested by another answer, but why using string concatenation (slow, error-prone) when you can call file.write twice:
file.write(your_string)
file.write("\n")
note that writes are buffered so it amounts to the same thing.
Another solution that writes from a list using fstring
lines = ['hello','world']
with open('filename.txt', "w") as fhandle:
for line in lines:
fhandle.write(f'{line}\n')
And as a function
def write_list(fname, lines):
with open(fname, "w") as fhandle:
for line in lines:
fhandle.write(f'{line}\n')
write_list('filename.txt', ['hello','world'])
file_path = "/path/to/yourfile.txt"
with open(file_path, 'a') as file:
file.write("This will be added to the next line\n")
or
log_file = open('log.txt', 'a')
log_file.write("This will be added to the next line\n")
Unless write to binary files, use print. Below example good for formatting csv files:
def write_row(file_, *columns):
print(*columns, sep='\t', end='\n', file=file_)
Usage:
PHI = 45
with open('file.csv', 'a+') as f:
write_row(f, 'header', 'phi:', PHI, 'serie no. 2')
write_row(f) # additional empty line
write_row(f, data[0], data[1])
You can also use partial as a more pythonic way of creating this kind of wrappers. In the example below, row is print with predefined kwargs.
from functools import partial
with open('file.csv', 'a+') as f:
row = partial(print, sep='\t', end='\n', file=f)
row('header', 'phi:', PHI, 'serie no. 2', end='\n\n')
row(data[0], data[1])
Notes:
print documentation
'{}, {}'.format(1, 'the_second') - https://pyformat.info/, PEP-3101
'\t' - tab character
*columns in function definition - dispatches any number of arguments to list - see question on *args & **kwargs
Just a note, file isn't supported in Python 3 and was removed. You can do the same with the open built-in function.
f = open('test.txt', 'w')
f.write('test\n')
Ok, here is a safe way of doing it.
with open('example.txt', 'w') as f:
for i in range(10):
f.write(str(i+1))
f.write('\n')
This writes 1 to 10 each number on a new line.
I really didn't want to type \n every single time and #matthause's answer didn't seem to work for me, so I created my own class
class File():
def __init__(self, name, mode='w'):
self.f = open(name, mode, buffering=1)
def write(self, string, newline=True):
if newline:
self.f.write(string + '\n')
else:
self.f.write(string)
And here it is implemented
f = File('console.log')
f.write('This is on the first line')
f.write('This is on the second line', newline=False)
f.write('This is still on the second line')
f.write('This is on the third line')
This should show in the log file as
This is on the first line
This is on the second lineThis is still on the second line
This is on the third line
This is the solution that I came up with trying to solve this problem for myself in order to systematically produce \n's as separators. It writes using a list of strings where each string is one line of the file, however it seems that it may work for you as well. (Python 3.+)
#Takes a list of strings and prints it to a file.
def writeFile(file, strList):
line = 0
lines = []
while line < len(strList):
lines.append(cheekyNew(line) + strList[line])
line += 1
file = open(file, "w")
file.writelines(lines)
file.close()
#Returns "\n" if the int entered isn't zero, otherwise "".
def cheekyNew(line):
if line != 0:
return "\n"
return ""
You can decorate method write in specific place where you need this behavior:
#Changed behavior is localized to single place.
with open('test1.txt', 'w') as file:
def decorate_with_new_line(method):
def decorated(text):
method(f'{text}\n')
return decorated
file.write = decorate_with_new_line(file.write)
file.write('This will be on line 1')
file.write('This will be on line 2')
file.write('This will be on line 3')
#Standard behavior is not affected. No class was modified.
with open('test2.txt', 'w') as file:
file.write('This will be on line 1')
file.write('This will be on line 1')
file.write('This will be on line 1')
Using append (a) with open() on a print() statement looks easier for me:
save_url = ".\test.txt"
your_text = "This will be on line 1"
print(your_text, file=open(save_url, "a+"))
another_text = "This will be on line 2"
print(another_text, file=open(save_url, "a+"))
another_text = "This will be on line 3"
print(another_text, file=open(save_url, "a+"))
You could use C-style string formatting:
file.write("%s\n" % "myString")
More about String Formatting.
If write is a callback, you may need a custom writeln.
def writeln(self, string):
self.f.write(string + '\n')
Itself inside a custom opener. See answers and feedback for this question : subclassing file objects (to extend open and close operations) in python 3
(Context Manager)
I faced this when using ftplib to "retrieve lines" from a file that was "record based" (FB80):
with open('somefile.rpt', 'w') as fp:
ftp.retrlines('RETR USER.REPORT', fp.write)
and ended up with one long record with no newlines, this is likely a problem with ftplib, but obscure.
So this became:
with OpenX('somefile.rpt') as fp:
ftp.retrlines('RETR USER.REPORT', fp.writeln)
It does the job. This is a use case a few people will be looking for.
Complete declaration (only the last two lines are mine):
class OpenX:
def __init__(self, filename):
self.f = open(filename, 'w')
def __enter__(self):
return self.f
def __exit__(self, exc_type, exc_value, traceback):
self.f.close()
def writeln(self, string):
self.f.write(string + '\n')
in order to suspport multiple operating systems use:
file.write(f'some strings and/or {variable}. {os.linesep}')
Actually, when you use the multiline syntax, like so:
f.write("""
line1
line2
line2""")
You don't need to add \n!
Usually you would use \n but for whatever reason in Visual Studio Code 2019 Individual it won't work. But you can use this:
# Workaround to \n not working
print("lorem ipsum", file=f) # Python 3.0 onwards only
print >>f, "Text" # Python 2.0 and under

Categories

Resources