communicating with CMD while saving response live - python

Is there any possible way to communicate with the cmd and at the same time save all its output to a file?
I mean that after every command the output will be saved, not at the end of the sub-process.
I want it to be something like this:
import subprocess
process = subprocess.Popen('C:\\Windows\\system32\\cmd.exe', stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
while True:
with open("log.txt", "a+") as myfile:
myfile.write(process.stdout.readlines())
process.stdin(raw_input())

You have two ways of doing this, either by creating an iterator from the read or readline functions and do:
import subprocess
import sys
with open('test.log', 'w') as f:
process = subprocess.Popen(your_command, stdout=subprocess.PIPE)
for c in iter(lambda: process.stdout.read(1), ''):
sys.stdout.write(c)
f.write(c)
or
import subprocess
import sys
with open('test.log', 'w') as f:
process = subprocess.Popen(your_command, stdout=subprocess.PIPE)
for line in iter(process.stdout.readline, ''):
sys.stdout.write(line)
f.write(line)

Related

Is there a pythonic way to mix error handling and context management?

I'd like to make a program which saves intermediate results to be able to reuse them is case of crash.
In this regard, I wanted to try to open the intermediate results file with a context manager. However basic context manager do not include error handling and basic error handling do not include context management. So I was forced to write my own context manager - which works fairly well, but I do not feel like the result is very pythonicly satifying
import subprocess
import contextlib
import sys
#contextlib.contextmanager
def myopen(cmd, filename):
try: f = open(filename, 'r')
except FileNotFoundError: pass
# I didn't put the file creation here to prevent the risk of
# causing an exception during the handling of the previous one
else:
yield f
f.close()
return
print(f'creating new {filename}', file=sys.stderr, flush=True)
sp = subprocess.run(cmd, shell=True, executable='/bin/bash')
try: f = open(filename, 'r')
except: raise
else:
yield f
f.close()
Which I test with
filename = "./context_exception.data"
print(f'removing {filename}', file=sys.stderr, flush=True)
sp = subprocess.run("rm "+filename, shell=True, executable='/bin/bash')
cmd = "echo -e 'spam\\neggs' >"+filename
with myopen(cmd, filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
cmd = "echo -e 'ham\\nbanana' >"+filename
with myopen(cmd, filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
print(f'removing {filename}', file=sys.stderr, flush=True)
sp = subprocess.run("rm "+filename, shell=True, executable='/bin/bash')
with myopen(cmd, filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
Is there any nicer way to combine context manager and exception handling, without having to manually rewrite the context manager ?
And by the way, I'm not very satisfied of the bulkiness of my context manager. Any idea about how to make it more compact would be welcome.
EDIT (Addendum)
Obviously, as #PeterWood noticed, I could, just test the existence beforehand, and then wouldn't even need neither to rewrite a context manager nor any exception handling:
import subprocess
import os
import sys
def create_if_nonexistent(cmd, filename):
if not os.path.isfile(filename):
print(f'creating new {filename}', file=sys.stderr, flush=True)
sp = subprocess.run(cmd, shell=True, executable='/bin/bash')
filename = "./context_exception.data"
print(f'removing {filename}', file=sys.stderr, flush=True)
sp = subprocess.run("rm "+filename, shell=True, executable='/bin/bash')
cmd = "echo -e 'spam\\neggs' >"+filename
create_if_nonexistent(cmd, filename)
with open(filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
cmd = "echo -e 'ham\\nbanana' >"+filename
create_if_nonexistent(cmd, filename)
with open(filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
print(f'removing {filename}', file=sys.stderr, flush=True)
sp = subprocess.run("rm "+filename, shell=True, executable='/bin/bash')
create_if_nonexistent(cmd, filename)
with open(filename) as f:
lines=f.readlines()
for i, line in enumerate(lines):
print(f'line{i:02}={line}', end='')
However
I wanted to try this, because using error handling is the generally considered pythonic way of doing those things and
the question is about how to properly combine both of them in order to learn more about python and not about how to work around the example without actually solving the problem.
Your exception handling can be improved, and I would suggest you don't need to use exception handling at all, but just test for non-existence of the file, and can also make use of the file's context manager:
import os
import subprocess
import sys
def myopen(cmd, filename):
if not os.path.isfile(filename):
print(f'creating new {filename}', file=sys.stderr, flush=True)
subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
text=True, shell=True, executable='/bin/bash')
return open(filename)

Unable to read file with python

I'm trying to read the content of a file with python 3.8.5 but the output is empty, I don't understand what I'm doing wrong.
Here is the code:
import subprocess
import os
filename = "ls.out"
ls_command = "ls -la"
file = open(filename, "w")
subprocess.Popen(ls_command, stdout=file, shell=True)
file.close()
# So far, all is ok. The file "ls.out" is correctly created and filled with the output of "ls -la" command"
file = open(filename, "r")
for line in file:
print(line)
file.close()
The output of this script is empty, it doesn't print anything. I'm not able to see the content of ls.out.
What is not correct here ?
Popen creates a new process and launches it but returns immediately. So the end result is that you've forked your code and have both processes running at once. Your python code in executing faster than the start and finish of ls. Thus, you need to wait for the process to finish by adding a call to wait():
import subprocess
import os
filename = "ls.out"
ls_command = "ls -la"
file = open(filename, "w")
proc = subprocess.Popen(ls_command, stdout=file, shell=True)
proc.wait()
file.close()
file = open(filename, "r")
for line in file:
print(line)
file.close()
Popen merely starts the subprocess. Chances are the file is not yet populated when you open it.
If you want to wait for the Popen object to finish, you have to call its wait method, etc; but a much better and simpler solution is to use subprocess.check_call() or one of the other higher-level wrappers.
If the command prints to standard output, why don't you read it drectly?
import subprocess
import shlex
result = subprocess.run(
shlex.split(ls_command), # avoid shell=True
check=True, text=True, capture_output=True)
line = result.stdout

How to clear the STDOUT of 'cmd.exe' with subprocess popen?

Problem
The code below is a simulation of a real terminal, in this case, a CMD terminal. The problem is that the "cls" don't clear the STDOUT of CMD. So, the string STDOUT start to stay so much extensive.
Example of problem
Microsoft Windows [versÆo 10.0.19042.746]
(c) 2020 Microsoft Corporation. Todos os direitos reservados.
C:\Users\Lsy\PycharmProjects\Others>chdir
C:\Users\Lsy\PycharmProjects\Others
C:\Users\Lsy\PycharmProjects\Others>echo test
test
C:\Users\Lsy\PycharmProjects\Others>cls
Type:
Question
How to clear the STDOUT?
Script
import subprocess
f = open('output.txt', 'w')
proc = subprocess.Popen('cmd.exe', stderr=subprocess.STDOUT, stdin=subprocess.PIPE, stdout=f, shell=True)
while True:
command = input('Type:')
command = command.encode('utf-8') + b'\n'
proc.stdin.write(command)
proc.stdin.flush()
with open('output.txt', 'r') as ff:
print(ff.read())
ff.close()
This is not how I recommend using sub processes - but I'm assuming you have some reason for doing things this way...
Given:
You've directed the CMD sub process to STDOUT to a file called "output.txt".
The CLS character is captured in the output.txt.
Your terminal then displaying the contents of the "output.txt" file (which is not ever cleared) and leaves a mess.
Therefore: If you want to "clear" your sub process terminal, then you will have to flush your "output.txt" file.
You can trivially do this by processing on the "command" variable before encoding and sending it to the sub process.
e.g:
import subprocess
import os
f = open('output.txt', 'w')
proc = subprocess.Popen('cmd.exe', stderr=subprocess.STDOUT, stdin=subprocess.PIPE, stdout=f, shell=True)
while True:
command = input('Type:')
if command == "cls":
open('output.txt', 'w').close()
os.system('cls' if os.name == 'nt' else 'clear')
else:
command = command.encode('utf-8') + b'\n'
proc.stdin.write(command)
proc.stdin.flush()
with open('output.txt', 'r+') as ff:
print(ff.read())
You could maybe also not redirect the output to a text file...

Use StringIO as stdin with Popen

I have the following shell script that I would like to write in Python (of course grep . is actually a much more complex command):
#!/bin/bash
(cat somefile 2>/dev/null || (echo 'somefile not found'; cat logfile)) \
| grep .
I tried this (which lacks an equivalent to cat logfile anyway):
#!/usr/bin/env python
import StringIO
import subprocess
try:
myfile = open('somefile')
except:
myfile = StringIO.StringIO('somefile not found')
subprocess.call(['grep', '.'], stdin = myfile)
But I get the error AttributeError: StringIO instance has no attribute 'fileno'.
I know I should use subprocess.communicate() instead of StringIO to send strings to the grep process, but I don't know how to mix both strings and files.
p = subprocess.Popen(['grep', '...'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output, output_err = p.communicate(myfile.read())
Don't use bare except, it may catch too much. In Python 3:
#!/usr/bin/env python3
from subprocess import check_output
try:
file = open('somefile', 'rb', 0)
except FileNotFoundError:
output = check_output(cmd, input=b'somefile not found')
else:
with file:
output = check_output(cmd, stdin=file)
It works for large files (the file is redirected at the file descriptor level -- no need to load it into the memory).
If you have a file-like object (without a real .fileno()); you could write to the pipe directly using .write() method:
#!/usr/bin/env python3
import io
from shutil import copyfileobj
from subprocess import Popen, PIPE
from threading import Thread
try:
file = open('somefile', 'rb', 0)
except FileNotFoundError:
file = io.BytesIO(b'somefile not found')
def write_input(source, sink):
with source, sink:
copyfileobj(source, sink)
cmd = ['grep', 'o']
with Popen(cmd, stdin=PIPE, stdout=PIPE) as process:
Thread(target=write_input, args=(file, process.stdin), daemon=True).start()
output = process.stdout.read()
The following answer uses shutil as well --which is quite efficient--,
but avoids a running a separate thread, which in turn never ends and goes zombie when the stdin ends (as with the answer from #jfs)
import os
import subprocess
import io
from shutil import copyfileobj
file_exist = os.path.isfile(file)
with open(file) if file_exists else io.StringIO("Some text here ...\n") as string_io:
with subprocess.Popen("cat", stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) as process:
copyfileobj(string_io, process.stdin)
# the subsequent code is not executed until copyfileobj ends,
# ... but the subprocess is effectively using the input.
process.stdin.close() # close or otherwise won't end
# Do some online processing to process.stdout, for example...
for line in process.stdout:
print(line) # do something
Alternatively to close and parsing, if the output is known to fit in memory:
...
stdout_text , stderr_text = process.communicate()

[Python]How to convert/record the output from subprocess into a file

I am using subprocess module, which Popen class output some results like:
063.245.209.093.00080-128.192.076.180.01039:HTTP/1.1 302 Found
063.245.209.093.00080-128.192.076.180.01040:HTTP/1.1 302 Found
and here is the script I wrote:
import subprocess, shlex, fileinput,filecmp
proc = subprocess.Popen('egrep \'^HTTP/\' *', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE,)
stdout_value = proc.communicate()[0]
print 'results:'
print stdout_value
My question is: how to convert/record the results from stdout into a file?
I appreciate all your responses and helps!
import subprocess
import glob
def egrep(pattern, *files):
""" runs egrep on the files and returns the entire result as a string """
cmd = ['egrep', pattern]
for filespec in files:
cmd.extend(glob.glob(filespec))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return proc.communicate()[0]
results = egrep(r'^HTTP/', '*')
print 'results:'
print results
# write to file
with open('result_file.txt', 'w') as f:
f.write(results)
One or any of the stdin, stdout, and stderr arguments to subprocess.Popen() can be file objects (or a file descriptor), which will cause the program to read from or write to the given files.

Categories

Resources