I know how to run a command using cmd = subprocess.Popen and then subprocess.communicate.
Most of the time I use a string tokenized with shlex.split as 'argv' argument for Popen.
Example with "ls -l":
import subprocess
import shlex
print subprocess.Popen(shlex.split(r'ls -l'), stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE).communicate()[0]
However, pipes seem not to work... For instance, the following example returns noting:
import subprocess
import shlex
print subprocess.Popen(shlex.split(r'ls -l | sed "s/a/b/g"'), stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE).communicate()[0]
Can you tell me what I am doing wrong please?
Thx
I think you want to instantiate two separate Popen objects here, one for 'ls' and the other for 'sed'. You'll want to pass the first Popen object's stdout attribute as the stdin argument to the 2nd Popen object.
Example:
p1 = subprocess.Popen('ls ...', stdout=subprocess.PIPE)
p2 = subprocess.Popen('sed ...', stdin=p1.stdout, stdout=subprocess.PIPE)
print p2.communicate()
You can keep chaining this way if you have more commands:
p3 = subprocess.Popen('prog', stdin=p2.stdout, ...)
See the subprocess documentation for more info on how to work with subprocesses.
I've made a little function to help with the piping, hope it helps. It will chain Popens as needed.
from subprocess import Popen, PIPE
import shlex
def run(cmd):
"""Runs the given command locally and returns the output, err and exit_code."""
if "|" in cmd:
cmd_parts = cmd.split('|')
else:
cmd_parts = []
cmd_parts.append(cmd)
i = 0
p = {}
for cmd_part in cmd_parts:
cmd_part = cmd_part.strip()
if i == 0:
p[i]=Popen(shlex.split(cmd_part),stdin=None, stdout=PIPE, stderr=PIPE)
else:
p[i]=Popen(shlex.split(cmd_part),stdin=p[i-1].stdout, stdout=PIPE, stderr=PIPE)
i = i +1
(output, err) = p[i-1].communicate()
exit_code = p[0].wait()
return str(output), str(err), exit_code
output, err, exit_code = run("ls -lha /var/log | grep syslog | grep gz")
if exit_code != 0:
print "Output:"
print output
print "Error:"
print err
# Handle error here
else:
# Be happy :D
print output
shlex only splits up spaces according to the shell rules, but does not deal with pipes.
It should, however, work this way:
import subprocess
import shlex
sp_ls = subprocess.Popen(shlex.split(r'ls -l'), stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
sp_sed = subprocess.Popen(shlex.split(r'sed "s/a/b/g"'), stdin = sp_ls.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
sp_ls.stdin.close() # makes it similiar to /dev/null
output = sp_ls.communicate()[0] # which makes you ignore any errors.
print output
according to help(subprocess)'s
Replacing shell pipe line
-------------------------
output=`dmesg | grep hda`
==>
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
output = p2.communicate()[0]
HTH
"""
Why don't you use shell
"""
def output_shell(line):
try:
shell_command = Popen(line, stdout=PIPE, stderr=PIPE, shell=True)
except OSError:
return None
except ValueError:
return None
(output, err) = shell_command.communicate()
shell_command.wait()
if shell_command.returncode != 0:
print "Shell command failed to execute"
return None
return str(output)
Thank #hernvnc, #glglgl, and #Jacques Gaudin for the answers. I fixed the code from #hernvnc. His version will cause hanging in some scenarios.
import shlex
from subprocess import PIPE
from subprocess import Popen
def run(cmd, input=None):
"""Runs the given command locally and returns the output, err and exit_code."""
if "|" in cmd:
cmd_parts = cmd.split('|')
else:
cmd_parts = []
cmd_parts.append(cmd)
i = 0
p = {}
for cmd_part in cmd_parts:
cmd_part = cmd_part.strip()
if i == 0:
if input:
p[i]=Popen(shlex.split(cmd_part),stdin=PIPE, stdout=PIPE, stderr=PIPE)
else:
p[i]=Popen(shlex.split(cmd_part),stdin=None, stdout=PIPE, stderr=PIPE)
else:
p[i]=Popen(shlex.split(cmd_part),stdin=p[i-1].stdout, stdout=PIPE, stderr=PIPE)
i = i +1
# close the stdin explicitly, otherwise, the following case will hang.
if input:
p[0].stdin.write(input)
p[0].stdin.close()
(output, err) = p[i-1].communicate()
exit_code = p[0].wait()
return str(output), str(err), exit_code
# test case below
inp = b'[ CMServer State ]\n\nnode node_ip instance state\n--------------------------------------------\n1 linux172 10.90.56.172 1 Primary\n2 linux173 10.90.56.173 2 Standby\n3 linux174 10.90.56.174 3 Standby\n\n[ ETCD State ]\n\nnode node_ip instance state\n--------------------------------------------------\n1 linux172 10.90.56.172 7001 StateFollower\n2 linux173 10.90.56.173 7002 StateLeader\n3 linux174 10.90.56.174 7003 StateFollower\n\n[ Cluster State ]\n\ncluster_state : Normal\nredistributing : No\nbalanced : No\ncurrent_az : AZ_ALL\n\n[ Datanode State ]\n\nnode node_ip instance state | node node_ip instance state | node node_ip instance state\n------------------------------------------------------------------------------------------------------------------------------------------------------------------------\n1 linux172 10.90.56.172 6001 P Standby Normal | 2 linux173 10.90.56.173 6002 S Primary Normal | 3 linux174 10.90.56.174 6003 S Standby Normal'
cmd = "grep -E 'Primary' | tail -1 | awk '{print $3}'"
run(cmd, input=inp)
Related
On trying to run the grep for the output of previous command using popen returning blank without any error
proc1cmd = "grep " + fetchname1
p1 = subprocess.Popen(['kubectl', 'get', 'abr', '-A'], stdout=subprocess.PIPE)
p2 = subprocess.Popen(proc1cmd, shell=True, stdin=p1.stdout, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p1.stdout.close()
stdout_list = p2.communicate()[0]
stdout_list = stdout_list.decode()
print(p2.stdout)
print(p2.communicate())
output i got:
<_io.BufferedReader name=7>
(b'', b'')
You don't need to concoct a pipeline of kubectl + grep here.
kubectl_output = subprocess.check_output(
["kubectl", "get", "abr", "-A"], encoding="utf-8"
)
matching_lines = [
line for line in kubectl_output.splitlines() if fetchname1 in line
]
print(matching_lines)
as in the question, I would like to pass process A, that is the code below which reads the contents of the outfile.txt file, to be passed to process B which will "download" the contents of process A and create a new file with the contents of process A?
How can I do this or can someone give me an idea?
import subprocess
proc = subprocess.Popen(['python','outfile.txt'],stdout=subprocess.PIPE)
while True:
line = proc.stdout.readline()
if not line:
break
print("test:", line.rstrip())
I don't know if I understand what you want to do.
For me Process A and Process B means two Popen in one script.
if you want to send directly from one process to another then you can use
process_B = subprocess.Popen(..., stdin=process_A.stdout)
Minimal working code. It runs ls | sort -r on Linux
import subprocess
#import sys
process_A = subprocess.Popen(['ls'], stdout=subprocess.PIPE)
process_B = subprocess.Popen(['sort', '-r'], stdout=subprocess.PIPE, stdin=process_A.stdout)
# - show result -
for line in process_B.stdout:
#sys.stdout.write(line.decode())
print(line.decode().rstrip())
or show result with `communicate()
import subprocess
process_A = subprocess.Popen(['ls'], stdout=subprocess.PIPE)
process_B = subprocess.Popen(['sort', '-r'], stdout=subprocess.PIPE, stdin=process_A.stdout)
# - show result -
stout, stderr = process_B.communicate()
print(stout.decode())
EDIT:
If you want to modify data between processes
import subprocess
process_A = subprocess.Popen(['ls', '/'], stdout=subprocess.PIPE)
process_B = subprocess.Popen(['sort', '-r'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
# - get all from one process -
stdout_A, stderr_A = process_A.communicate()
# - modify all -
stdout_A = stdout_A.upper()
# - send all to other process and get result -
stout_B, stderr_B = process_B.communicate(stdout_A)
print(stout_B.decode())
or
import subprocess
process_A = subprocess.Popen(['ls', '/'], stdout=subprocess.PIPE)
process_B = subprocess.Popen(['sort', '-r'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
# - send from one process to another line by line -
for line in process_A.stdout:
line = line.upper()
process_B.stdin.write(line)
# - get result -
stout_B, stderr_B = process_B.communicate()
print(stout_B.decode())
Consider the following snippet that runs three different subprocesses one after the other with subprocess.run (and notably all with defaulted kwargs):
import subprocess
p1 = subprocess.run(args1)
if p1.returncode != 0:
error()
p2 = subprocess.run(args2)
if p2.returncode != 0:
error()
p3 = subprocess.run(args3)
if p3.returncode != 0:
error()
How can we rewrite this so that the subprocesses are run in parallel to each other?
With Popen right? What does that exactly look like?
For reference, the implementation of subprocess.run is essentially:
with Popen(*popenargs, **kwargs) as process:
try:
stdout, stderr = process.communicate(input, timeout=timeout)
except TimeoutExpired as exc:
process.kill()
if _mswindows:
exc.stdout, exc.stderr = process.communicate()
else:
process.wait()
raise
except:
process.kill()
raise
retcode = process.poll()
return CompletedProcess(process.args, retcode, stdout, stderr)
So something like...
with Popen(args1) as p1:
with Popen(args2) as p2:
with Popen(args3) as p3:
try:
p1.communicate(None, timeout=None)
p2.communicate(None, timeout=None)
p3.communicate(None, timeout=None)
except:
p1.kill()
p2.kill()
p3.kill()
raise
if p1.poll() != 0 or p2.poll() != 0 or p3.poll() != 0:
error()
Is that along the right lines?
I would just use multiprocessing to accomplish your mission but ensuring that your invocation of subprocess.run uses capture_output=True so that the output from the 3 commands running in parallel are not interlaced:
import multiprocessing
import subprocess
def runner(args):
p = subprocess.run(args, capture_output=True, text=True)
if p.returncode != 0:
raise Exception(r'Return code was {p.returncode}.')
return p.stdout, p.stderr
def main():
args1 = ['git', 'status']
args2 = ['git', 'log', '-3']
args3 = ['git', 'branch']
args = [args1, args2, args3]
with multiprocessing.Pool(3) as pool:
results = [pool.apply_async(runner, args=(arg,)) for arg in args]
for result in results:
try:
out, err = result.get()
print(out, end='')
except Exception as e: # runner completed with an Exception
print(e)
if __name__ == '__main__': # required for Windows
main()
Update
With just subprocess we have something like:
import subprocess
args1 = ['git', 'status']
args2 = ['git', 'log', '-3']
args3 = ['git', 'branch']
p1 = subprocess.Popen(args1)
p2 = subprocess.Popen(args2)
p3 = subprocess.Popen(args3)
p1.communicate()
rc1 = p1.returncode
p2.communicate()
rc2 = p2.returncode
p3.communicate()
rc3 = p3.returncode
But, for whatever reason on my Windows platform I never saw the output from the third subprocess command ('git branch'), so there must be some limitation there. Also, if the command you were running required input from stdin before proceeding, that input would have to be provided to the communicate method. But the communicate method would not complete until the entire subprocess has completed and you would get no parallelism, so as a general solution this is not really very good. In the multiprocessing code, there is no problem with having stdin input to communicate.
Update 2
When I recode it as follows, I now get all the expected output. I am not sure why it makes a difference, however. According to the documentation, Popen.communicate:
Interact with process: Send data to stdin. Read data from stdout and stderr, until end-of-file is reached. Wait for process to terminate and set the returncode attribute. The optional input argument should be data to be sent to the child process, or None, if no data should be sent to the child. If streams were opened in text mode, input must be a string. Otherwise, it must be bytes.
So the call should be waiting for the process to terminate. Nevertheless, my preceding comment about the situation where the command you are executing requiring stdin input (via a pipe) would not run in parallel without using multiprocessing.
import subprocess
args1 = ['git', 'status']
args2 = ['git', 'log', '-3']
args3 = ['git', 'branch']
with subprocess.Popen(args1) as p1:
with subprocess.Popen(args2) as p2:
with subprocess.Popen(args3) as p3:
p1.communicate()
rc1 = p1.returncode
p2.communicate()
rc2 = p2.returncode
p3.communicate()
rc3 = p3.returncode
Following code is to redirect the output of the Pipe to a file "CONTENT" and it has some content, I want to overwrite it with output of "sort CONTENT1 | uniq "....
But I'm not able overwrite it and also i don't know weather following code is redirecting to CONTENT(ie correct or not) or not. Please help me out....
f1=open('CONTENT','w')
sys.stdout=f1
p1 = subprocess.Popen(["sort", "CONTENT1"], stdout=subprocess.PIPE)
p2 = subprocess.Popen(["uniq"], stdin=p1.stdout, stdout=subprocess.PIPE)
p1.stdout.close()
p2.communicate()
sys.stdout=sys.__stdout__
Here is how you can catch the output of the first process and pass it to the second, which will then write its output to the file:
import subprocess
with open('CONTENT','w') as f1:
p1 = subprocess.Popen(["sort", "CONTENT1"], stdout=subprocess.PIPE)
p2 = subprocess.Popen(["uniq"], stdin=subprocess.PIPE, stdout=f1)
p1_out = p1.communicate()[0] # catch output
p2.communicate(p1_out) # pass input
You should not tinker with sys.stdout at all. Note that you need one
call to the method communicate for each process. Note also that
communicate() will buffer all output of p1 before it is passed to
p2.
Here is how you can pass the output of p1 line-by-line to p2:
import subprocess
with open('CONTENT','w') as f1:
p1 = subprocess.Popen(["sort", "CONTENT1"], stdout=subprocess.PIPE)
p2 = subprocess.Popen(["uniq"], stdin=subprocess.PIPE, stdout=f1)
out_line = p1.stdout.readline()
while out_line:
p2.stdin.write(out_line)
out_line = p1.stdout.readline()
The cleanest way to do the pipe would be the following:
import subprocess
with open('CONTENT','w') as f1:
p1 = subprocess.Popen(["sort", "CONTENT1"], stdout=subprocess.PIPE)
p2 = subprocess.Popen(["uniq"], stdin=p1.stdout, stdout=f1)
p1.stdout.close()
Alternatively, of course, you could just use the facilities of the
shell, which is just made for these tasks:
import subprocess
with open('CONTENT','w') as f1:
p = subprocess.Popen("sort CONTENT1 | uniq", shell=True,
stdout=f1)
Reference: http://docs.python.org/2/library/subprocess.html
Ok. I've got two shell commands that return string results, which I then need to process further. The first command produces a result (hostnames), and then the next command takes the result of the first, to expand another, more extensive list of hostnames. This is in order to generate a list of hosts to run another shell command on.
I don't need a complete example, just some steering in the right direction.
import subprocess
def getMasterList():
p1 = subprocess.Popen(['get_master_list'], stdout=subprocess.PIPE)
p2 = subprocess.Popen(['grep', '-v', '\['], stdin=p1.stdout, stdout=subprocess.PIPE)
master_list = p2.communicate()[0]
return master_list.split()
def getHostsFromList(master):
p1 = subprocess.Popen(['get_hosts_from_master', master], stdout=subprocess.PIPE)
p2 = subprocess.Popen(['grep', '-v', '\['], stdin=p1.stdout, stdout=subprocess.PIPE)
print p2.communicate()[0]
def doStuff(host):
p = subprocess.Popen('run_shell_command', shell=True, stdout=subprocess.PIPE)
print p.stdout.read()
for master in getMasterList():
getHostsFromList(master)
# How do I iterate further to run doStuff() over the hosts from getHostsFromList(master)?
maybe i'm missing something, but isn't it just:
def getHostsFromList(master):
p1 = subprocess.Popen(['get_hosts_from_master', master], stdout=subprocess.PIPE)
p2 = subprocess.Popen(['grep', '-v', '\['], stdin=p1.stdout, stdout=subprocess.PIPE)
return p2.communicate()[0]
for master in getMasterList():
for host in getHostsFromList(master):
doStuff(host)