printing results in stdout file as well as console - python

My requirement is to print the output of the file in the console as well as a log file. The following piece of code does it for me , exept for a minor hiccup. I am calling a perl script at the end of the file , whose output is getting displayed in the console , but not getting printed to the file.
import subprocess
import sys
class Tee(object):
def __init__(self, *files):
self.files = files
def write(self, obj):
for f in self.files:
f.write(obj)
f = open('MyFile.txt', 'w')
original = sys.stdout
sys.stdout = Tee(sys.stdout, f)
print "Logging Started"
# My code
print "A"
subprocess.call(['./MyScript])
sys.stdout = original
print "Logging Stopped" # Only on stdout
f.close()
Can anyone please advise how can that be achieved? Or is it possible at all to achieve the same?

Use subprocess.check_output:
print subprocess.check_output(['./MyScript])
In Python 2.6, either use the backport subprocess32, or copy the 2.7 source for check_output.

If you look at check_output is implemented in Python2.7, you should be able to work out how to use subprocess.Popen
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output

The below code did the trick for me. Thanks everyone for helping.
#!/usr/bin/python
import os
import subprocess
import sys
class Tee(object):
def __init__(self, *files):
self.files = files
def write(self, obj):
for f in self.files:
f.write(obj)
f = open('MyFile.txt', 'w')
original = sys.stdout
sys.stdout = Tee(sys.stdout, f)
print "Logging Started"
# My code
print "A"
def check_output(*popenargs, **kwargs):
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output
location = "%s/folder"%(os.environ["Home"])
myoutput = check_output(['./MyFile'])
print myoutput
sys.stdout = original
print "Logging Stopped" # Only on stdout
f.close()

subprocess.check_ouput
is introduced in python 2.7, if you can onlu use previous versions, you can just use the Popen and set the stdout to your output stream (but in your case, you already overrided the sys.stdout, I think it's not needed), just change to:
p = subprocess.Popen(['./MyScript'])

Related

Is there a pythonic way to mix error handling and context management?

I'd like to make a program which saves intermediate results to be able to reuse them is case of crash.
In this regard, I wanted to try to open the intermediate results file with a context manager. However basic context manager do not include error handling and basic error handling do not include context management. So I was forced to write my own context manager - which works fairly well, but I do not feel like the result is very pythonicly satifying
import subprocess
import contextlib
import sys
#contextlib.contextmanager
def myopen(cmd, filename):
try: f = open(filename, 'r')
except FileNotFoundError: pass
# I didn't put the file creation here to prevent the risk of
# causing an exception during the handling of the previous one
else:
yield f
f.close()
return
print(f'creating new {filename}', file=sys.stderr, flush=True)
sp = subprocess.run(cmd, shell=True, executable='/bin/bash')
try: f = open(filename, 'r')
except: raise
else:
yield f
f.close()
Which I test with
filename = "./context_exception.data"
print(f'removing {filename}', file=sys.stderr, flush=True)
sp = subprocess.run("rm "+filename, shell=True, executable='/bin/bash')
cmd = "echo -e 'spam\\neggs' >"+filename
with myopen(cmd, filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
cmd = "echo -e 'ham\\nbanana' >"+filename
with myopen(cmd, filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
print(f'removing {filename}', file=sys.stderr, flush=True)
sp = subprocess.run("rm "+filename, shell=True, executable='/bin/bash')
with myopen(cmd, filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
Is there any nicer way to combine context manager and exception handling, without having to manually rewrite the context manager ?
And by the way, I'm not very satisfied of the bulkiness of my context manager. Any idea about how to make it more compact would be welcome.
EDIT (Addendum)
Obviously, as #PeterWood noticed, I could, just test the existence beforehand, and then wouldn't even need neither to rewrite a context manager nor any exception handling:
import subprocess
import os
import sys
def create_if_nonexistent(cmd, filename):
if not os.path.isfile(filename):
print(f'creating new {filename}', file=sys.stderr, flush=True)
sp = subprocess.run(cmd, shell=True, executable='/bin/bash')
filename = "./context_exception.data"
print(f'removing {filename}', file=sys.stderr, flush=True)
sp = subprocess.run("rm "+filename, shell=True, executable='/bin/bash')
cmd = "echo -e 'spam\\neggs' >"+filename
create_if_nonexistent(cmd, filename)
with open(filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
cmd = "echo -e 'ham\\nbanana' >"+filename
create_if_nonexistent(cmd, filename)
with open(filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
print(f'removing {filename}', file=sys.stderr, flush=True)
sp = subprocess.run("rm "+filename, shell=True, executable='/bin/bash')
create_if_nonexistent(cmd, filename)
with open(filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
However
I wanted to try this, because using error handling is the generally considered pythonic way of doing those things and
the question is about how to properly combine both of them in order to learn more about python and not about how to work around the example without actually solving the problem.
Your exception handling can be improved, and I would suggest you don't need to use exception handling at all, but just test for non-existence of the file, and can also make use of the file's context manager:
import os
import subprocess
import sys
def myopen(cmd, filename):
if not os.path.isfile(filename):
print(f'creating new {filename}', file=sys.stderr, flush=True)
subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
text=True, shell=True, executable='/bin/bash')
return open(filename)

String input with wrong output using linux command

I created a python program, test.py, below:
import subprocess
import sys, os
FolderPath = subprocess.getoutput("cd . && pwd")
ProgramName = sys.argv[0]
LogName = ProgramName[:-3]+'_printout.txt'
ProgramFile = FolderPath+'/'+ProgramName
LogFile = FolderPath+'/'+LogName
_stdin = sys.stdin
_stdout = sys.stdout
_stderr = sys.stderr
sys.stdin = open(LogFile, 'w')
sys.stdout = open(LogFile, 'a')
sys.stderr = open(LogFile, 'a')
Prog = open(ProgramFile, 'r')
print(Prog.read())
TEST = str(input("Enter the name: \n TEST_NAME: "))
print(TEST)
sys.stdin = _stdin.flush()
sys.stdout = _stdout.flush()
sys.stderr = _stderr.flush()
After I executed on linux with command python test.py, I got the error in test_printout.txt.
Enter the name:
TEST_NAME: Traceback (most recent call last):
File "test.py", line 21, in <module>
TEST = str(input("Enter the name: \n TEST_NAME: "))
io.UnsupportedOperation: not readable
I modified the code:
import subprocess
import sys, os
FolderPath = subprocess.getoutput("cd . && pwd")
ProgramName = sys.argv[0]
LogName = ProgramName[:-3]+'_printout.txt'
ProgramFile = FolderPath+'/'+ProgramName
LogFile = FolderPath+'/'+LogName
_stdin = sys.stdin
_stdout = sys.stdout
_stderr = sys.stderr
sys.stdin = open(LogFile, 'w+')
sys.stdout = open(LogFile, 'a')
sys.stderr = open(LogFile, 'a')
Prog = open(ProgramFile, 'r')
print(Prog.read())
TEST = str(input("Enter the name: \n TEST_NAME: "))
print(TEST)
sys.stdin = _stdin.flush()
sys.stdout = _stdout.flush()
sys.stderr = _stderr.flush()
But got:
Enter the name:
TEST_NAME: import subprocess
It did not let me type anything. What I want is to let me type string and it also save to test_printout.txt.
Enter the name:
TEST_NAME: This Is What I Type And Save!
Does anyone know how to fix it?
Also, if I use w+ instead of w mode, it will take longer time to write to the test_printout.txt if I changed the program to import pandas.DataFrame and manipulate data.
Is there a way to only write simple print words to test_printout.txt without reading entire thing?
UPDATE
I modified the code as below:
import subprocess, sys, os
FolderPath = subprocess.getoutput("cd . && pwd")
ProgramName = sys.argv[0]
LogName = ProgramName[:-3]+'_printout.txt'
ProgramFile = FolderPath+'/'+ProgramName
LogFile = FolderPath+'/'+LogName
_stdin = sys.stdin
_stdout = sys.stdout
_stderr = sys.stderr
class stdout_Logger(object):
def __init__(self):
self.stdout = sys.stdout
self.log = open(LogFile, "a")
def write(self, message):
self.stdout.write(message)
self.log.write(message)
def flush(self):
#this flush method is needed for python 3 compatibility.
#this handles the flush command by doing nothing.
#you might want to specify some extra behavior here.
pass
sys.stdout = stdout_Logger()
class stderr_Logger(object):
def __init__(self):
self.stderr = sys.stderr
self.log = open("test_printout.txt", "a")
def write(self, message):
self.stderr.write(message)
self.log.write(message)
def flush(self):
#this flush method is needed for python 3 compatibility.
#this handles the flush command by doing nothing.
#you might want to specify some extra behavior here.
pass
sys.stderr = stderr_Logger()
Prog = open(ProgramFile, 'r')
print(Prog.read())
##START Program
TEST = str(input("Enter the name: \n TEST_NAME: "))
print(TEST)
#END Program
sys.stdin = _stdin.flush()
sys.stdout = _stdout.flush()
sys.stderr = _stderr.flush()
This got almost what I want. This also save my program to test_printout.txt at the top and do print(TEST) in the bottom.
However, it also prints all program to the linux terminal console which is not I desire. I only want it to print "Enter the name: \n TEST_NAME: " in linux terminal and I can type string instead of printing entire program.
I think the issue came from sys.stdin.
I think I figured it out. The problem is that when you substitute input with a file-handle in write mode you ban input() from reading it. You can get the same error if you tried this:
file = open("foo.txt",'w')
content = file.read()
The way to go around it is to log streams without redirecting them. So either you dump your console to file with python test.py > test_printout.txt or create a logger class to wrap around the streams (check out this answer: How to redirect stdout to both file and console with scripting?).
Perhaps its worth for you to look into the logging module, as I believe it handles these issues rather neatly.
EDIT:
From what you laid out in the comments, this is what you want:
import subprocess, sys, os
FolderPath = subprocess.getoutput("cd . && pwd")
ProgramName = sys.argv[0]
LogName = ProgramName[:-3]+'_printout.txt'
ProgramFile = FolderPath+'/'+ProgramName
LogFile = FolderPath+'/'+LogName
Prog = open(ProgramFile, 'r')
with open(LogFile, 'w') as logfile:
logfile.write(Prog.read())
_stdin = sys.stdin
_stdout = sys.stdout
_stderr = sys.stderr
class stdout_Logger(object):
def __init__(self):
self.stdout = sys.stdout
self.log = open(LogFile, "a")
def write(self, message):
self.stdout.write(message)
self.log.write(message)
def flush(self):
#this flush method is needed for python 3 compatibility.
#this handles the flush command by doing nothing.
#you might want to specify some extra behavior here.
pass
class stderr_Logger(object):
def __init__(self):
self.stderr = sys.stderr
self.log = open("test_printout.txt", "a")
def write(self, message):
self.stderr.write(message)
self.log.write(message)
def flush(self):
#this flush method is needed for python 3 compatibility.
#this handles the flush command by doing nothing.
#you might want to specify some extra behavior here.
pass
sys.stdout = stdout_Logger()
sys.stderr = stderr_Logger()
##START Program
TEST = str(input("Enter the name: \n TEST_NAME: "))
print(TEST)
#END Program
sys.stdin = _stdin.flush()
sys.stdout = _stdout.flush()
sys.stderr = _stderr.flush()

Use StringIO as stdin with Popen

I have the following shell script that I would like to write in Python (of course grep . is actually a much more complex command):
#!/bin/bash
(cat somefile 2>/dev/null || (echo 'somefile not found'; cat logfile)) \
| grep .
I tried this (which lacks an equivalent to cat logfile anyway):
#!/usr/bin/env python
import StringIO
import subprocess
try:
myfile = open('somefile')
except:
myfile = StringIO.StringIO('somefile not found')
subprocess.call(['grep', '.'], stdin = myfile)
But I get the error AttributeError: StringIO instance has no attribute 'fileno'.
I know I should use subprocess.communicate() instead of StringIO to send strings to the grep process, but I don't know how to mix both strings and files.
p = subprocess.Popen(['grep', '...'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output, output_err = p.communicate(myfile.read())
Don't use bare except, it may catch too much. In Python 3:
#!/usr/bin/env python3
from subprocess import check_output
try:
file = open('somefile', 'rb', 0)
except FileNotFoundError:
output = check_output(cmd, input=b'somefile not found')
else:
with file:
output = check_output(cmd, stdin=file)
It works for large files (the file is redirected at the file descriptor level -- no need to load it into the memory).
If you have a file-like object (without a real .fileno()); you could write to the pipe directly using .write() method:
#!/usr/bin/env python3
import io
from shutil import copyfileobj
from subprocess import Popen, PIPE
from threading import Thread
try:
file = open('somefile', 'rb', 0)
except FileNotFoundError:
file = io.BytesIO(b'somefile not found')
def write_input(source, sink):
with source, sink:
copyfileobj(source, sink)
cmd = ['grep', 'o']
with Popen(cmd, stdin=PIPE, stdout=PIPE) as process:
Thread(target=write_input, args=(file, process.stdin), daemon=True).start()
output = process.stdout.read()
The following answer uses shutil as well --which is quite efficient--,
but avoids a running a separate thread, which in turn never ends and goes zombie when the stdin ends (as with the answer from #jfs)
import os
import subprocess
import io
from shutil import copyfileobj
file_exist = os.path.isfile(file)
with open(file) if file_exists else io.StringIO("Some text here ...\n") as string_io:
with subprocess.Popen("cat", stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) as process:
copyfileobj(string_io, process.stdin)
# the subsequent code is not executed until copyfileobj ends,
# ... but the subprocess is effectively using the input.
process.stdin.close() # close or otherwise won't end
# Do some online processing to process.stdout, for example...
for line in process.stdout:
print(line) # do something
Alternatively to close and parsing, if the output is known to fit in memory:
...
stdout_text , stderr_text = process.communicate()

How to redirect stderr of a program that is run using os.system by a third-party python library

I use external library, like this:
from some_lib import runThatProgram
infile = '/tmp/test'
outfile = '/tmp/testout'
runThatProgram(infile, outfile)
while runThatProgram is:
def runThatProgram(infile, outfile):
os.system("%s %s > %s" % ('thatProgram', infile, outfile))
The problem is that 'thatProgram' returns lots of stuff on STDERR, I want to redirect it to a file, but I cannot edit runThatProgram code because it is in third party lib!
To illustrate what Rosh Oxymoron said, you can hack the code like this :
from some_lib import runThatProgram
infile = '/tmp/test'
outfile = '/tmp/testout 2>&1'
runThatProgram(infile, outfile)
with this, it will call
thatProgram /tmp/test > /tmp/testout 2>&1
that will redirected stderr (2) to stdout (1), and everything will be logged in your outfile.
To elaborate on using subprocess, you can open it, give it a pipe and then work from there so
import subprocess
program = "runthatprogram.py".split()
process = subprocess.Popen(program, stdout = subprocess.PIPE, stderr = open('stderr','w')) #stderr to fileobj
process.communicate()[0] #display stdout

[Python]How to convert/record the output from subprocess into a file

I am using subprocess module, which Popen class output some results like:
063.245.209.093.00080-128.192.076.180.01039:HTTP/1.1 302 Found
063.245.209.093.00080-128.192.076.180.01040:HTTP/1.1 302 Found
and here is the script I wrote:
import subprocess, shlex, fileinput,filecmp
proc = subprocess.Popen('egrep \'^HTTP/\' *', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE,)
stdout_value = proc.communicate()[0]
print 'results:'
print stdout_value
My question is: how to convert/record the results from stdout into a file?
I appreciate all your responses and helps!
import subprocess
import glob
def egrep(pattern, *files):
""" runs egrep on the files and returns the entire result as a string """
cmd = ['egrep', pattern]
for filespec in files:
cmd.extend(glob.glob(filespec))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return proc.communicate()[0]
results = egrep(r'^HTTP/', '*')
print 'results:'
print results
# write to file
with open('result_file.txt', 'w') as f:
f.write(results)
One or any of the stdin, stdout, and stderr arguments to subprocess.Popen() can be file objects (or a file descriptor), which will cause the program to read from or write to the given files.

Categories

Resources