I'm using subprocess to communicate with an interactive command line application, but after I send the first command to the application, all other input seems not to be communicated to the subprocess. Can anyone show me where my mistake is?
Here's the code:
from subprocess import Popen, PIPE, STDOUT
from threading import Thread
from queue import Queue, Empty
import time
class Prolog(object):
def __init__(self):
"""
Opens a subprocess running swi-prolog and reads all the header stuff that it writes
"""
self.prolog = Popen(r"C:\Program Files\swipl\bin\swipl.exe", stdin=PIPE, stdout=PIPE, stderr=STDOUT, bufsize=1)
def enqueue_output(out, queue):
for line in iter(out.readline, b''):
queue.put(line)
out.close()
# This thread runs in the background as long as the program is running it enqueues all the output from Prolog
self.q = Queue()
t = Thread(target=enqueue_output, args=(self.prolog.stdout, self.q))
t.daemon = True # thread dies with the program
t.start()
out = True
while out:
out = self.get_line()
def get_line(self):
"""
read line without blocking
:return: the next line in the output, else False if no more output
"""
try:
line = self.q.get(timeout=.1) # or q.get(timeout=.1)
except Empty:
return False
else: # got line
return line
def send_query(self, query):
"""
Sends a query to the Prolog shell
:param query: string containing the query to be sent to the prolog shell
:return: None
"""
query = query + "\n"
query = bytes(query, encoding="utf-8")
self.prolog.stdin.write(query)
self.prolog.stdin.flush()
def get_output(self):
output = self.get_line()
if not output:
return False
else:
return output[:-2]
def query(self, query):
output = []
self.send_query(query)
temp = self.get_output()
print(temp)
while not temp:
time.sleep(.1)
temp = self.get_output()
output.append(temp)
while not temp == b'true.' and not temp == b'false.':
self.send_query(";")
temp = self.get_output()
print(temp)
while not temp:
time.sleep(.1)
temp = self.get_output()
output.append(temp)
print(output)
if __name__ == "__main__":
p = Prolog()
p.query('[\"GCD.pl\"].')
p.get_output()
p.query("permut([a, b, c], X).")
The problem comes during the second call to p.query. The command doesn't seem to be passed to the shell at all, so there is never any output, so the program just gets stuck in the "while not temp" loop in the query method.
Related
I am trying to write one script to read the output of another py file. and because the calling py file may have some input. So in main python file, I would handle the stdin.
I got one strange phenomenon, the calling script's prompt ('please input something') is displayed after user input content. Any ideas?
see the picture
eating.py (main py)
# coding: utf-8
import subprocess
from queue import Queue
from threading import Thread
class TextStreaming(Thread):
def __init__(self, stream, queue):
super().__init__()
self._streaming = stream
self.queue = queue
def run(self) -> None:
for line in iter(self._streaming.readline, ''):
self.queue.put(line)
cmd = 'python working.py'
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE)
queue = Queue()
mytest = TextStreaming(p.stdout, queue)
mytest.setDaemon(True)
mytest.start()
is_reading = True
while is_reading and mytest.is_alive():
while is_reading and not queue.empty():
line = queue.get()
line = line.decode('utf-8')
if line:
print(line)
if 'counting' in line:
data = input()
data = data.encode('utf-8')
p.stdin.write(data)
p.stdin.write(b'\n')
p.stdin.flush()
working.py (calling script)
# coding: utf-8
import time
timeout = 30
deadtime = time.time() + timeout
count = 0
while time.time() < deadtime:
count = count + 1
print(f'{time.time()} I am working')
time.sleep(1)
if count == 5:
print('counting came\n')
data = input('please input something\n')
print(f'{data} inputted')
I'm writing a subprocess based python program that acts as a proxy between the user input and the subprocess (trying to go beyond pexpect). I've taken this thread as reference, and some code chunk from pexpect (_read_incoming() method for popen_spawn) to read output (the fcntl method worked, but not satisfactorily).
The code runs but has a problem: There seems to be an additional carriage return being sent to the process. This is causing me issues when I try to do things like sending passwords to ssh etc.
Could you look into what might be the issue? Thanks!
The code is as follows:
from queue import Queue, Empty
from threading import Thread
import subprocess
import signal
import fcntl
import os
global terminating
terminating = False
def setNonBlocking(fd):
"""
Set the file description of the given file descriptor to non-blocking.
"""
print(fd)
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def enqueue(out, q):
fileno = out.fileno()
while not terminating:
buf = b''
try:
buf = os.read(fileno, 1024)
if buf and len(buf)>0:
q.put(buf)
except OSError as e:
#print("OS error: {0}".format(e))
pass
if not buf:
q.put(buf)
# for line in iter(out.readline, b''):
# if len(line.strip()) > 0:
# print(line)
# q.put(line)
out.close()
print('Terminating')
return
def get_output(q):
out_str = bytes()
while True:
try:
incoming = q.get_nowait()
except Empty:
break
else:
if incoming is None:
break
else:
out_str += incoming
if out_str:
return out_str
else:
return b''
def explore(cmd="/bin/bash"):
global terminating
universal_newlines = False
p = subprocess.Popen([cmd], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT,
bufsize=0, shell=False, universal_newlines=universal_newlines)
#setNonBlocking(p.stdout)
outQueue = Queue()
outThread = Thread(target=enqueue, args=(p.stdout, outQueue))
outThread.daemon = True
outThread.start()
while True:
try:
someInput = input()
print('[In]:'+someInput)
someInput += '\n'
if not universal_newlines:
p.stdin.write(someInput.encode('utf-8'))
else:
p.stdin.write(someInput)
p.stdin.flush()
out = get_output(outQueue).decode('utf-8')
print('[Out]:'+out)
#p.communicate(someInput+'\n')
except KeyboardInterrupt:
print('Interrupting')
p.send_signal(signal.SIGINT)
terminating = True
outThread.join()
break
p.wait()
if __name__ == '__main__':
explore()
Example run:
ls
[In]:ls
[Out]:
[In]:
[Out]:explorer.py
__init__.py
^CInterrupting
Terminating
The second In was an enter from user.
Update:
Tested the alternate using pexpect's popen_spawn module. Same result:
from pexpect.popen_spawn import PopenSpawn as Spawn
import signal
def explore(cmd="/bin/bash"):
p = Spawn(cmd)
while True:
try:
someInput = input()
print('[In]:'+someInput)
p.sendline(someInput)
out = p.read_nonblocking(size=1024, timeout=-1).decode('utf-8')
print('[Out]:'+out)
#p.communicate(someInput+'\n')
except KeyboardInterrupt:
print('Interrupting')
p.sendeof()
p.kill(signal.SIGINT)
break
if __name__ == '__main__':
explore()
When I use subprocess in Python to redirect stdout, I get a very slow throughput. Am I doing it wrong?
Basically, I pipe the standard output of an external program to put it in a queue. Then in another function, I print it in the console.
Here is a sample code with hexdump to generate random output:
from subprocess import Popen, PIPE
from queue import Queue
import sys
from threading import Thread, Event
import threading
class Buffer(Queue):
def __init__(self, *args, **kwargs):
Queue.__init__(self, *args, **kwargs)
def write(self, line):
self.put_nowait(line)
self.join()
def read(self):
element = self.get_nowait()
self.task_done()
return element
def write_output(buffer, stopped):
hexdump = Popen(['hexdump', '-C', '/dev/urandom'], stdout=PIPE)
while hexdump.returncode is None:
for line in hexdump.stdout.readlines(8192):
buffer.write(line)
if stopped.is_set():
hexdump.terminate()
hexdump.wait()
print('process terminated.')
break
def read_output(buffer, stopped):
while not stopped.is_set():
while not buffer.empty():
output = buffer.read()
print('********* output: {}'.format(output))
sys.stdout.flush()
print('stopped')
sys.stdout.flush()
buffer = Buffer()
stopped = Event()
generate_random_output = Thread(target=write_output, args=(buffer, stopped))
generate_random_output.name = 'generate_random_output'
generate_random_output.start()
process_output = Thread(target=read_output, args=(buffer, stopped))
process_output.name = 'process_output'
process_output.start()
try:
while True:
continue
except KeyboardInterrupt:
stopped.set()
generate_random_output.join()
process_output.join()
print('finished generating')
print('finished processing')
I would appreciate any help.
Instead of redirecting your output to Queue - process it directly:
def write_output(buffer, stopped):
hexdump = Popen(['hexdump', '-C', '/dev/urandom'], stdout=PIPE)
while hexdump.poll() is None:
while not stopped.is_set():
for line in iter(hexdump.stdout.readline, b''):
print('********* output: %s' % line.decode(), end='')
sys.stdout.flush()
hexdump.terminate()
hexdump.wait()
print('process terminated.')
break
I need to run an interactive Bash instance in a separated process in Python with it's own dedicated TTY (I can't use pexpect).
I used this code snippet I commonly see used in similar programs:
master, slave = pty.openpty()
p = subprocess.Popen(["/bin/bash", "-i"], stdin=slave, stdout=slave, stderr=slave)
os.close(slave)
x = os.read(master, 1026)
print x
subprocess.Popen.kill(p)
os.close(master)
But when I run it I get the following output:
$ ./pty_try.py
bash: cannot set terminal process group (10790): Inappropriate ioctl for device
bash: no job control in this shell
Strace of the run shows some errors:
...
readlink("/usr/bin/python2.7", 0x7ffc8db02510, 4096) = -1 EINVAL (Invalid argument)
...
ioctl(3, SNDCTL_TMR_TIMEBASE or SNDRV_TIMER_IOCTL_NEXT_DEVICE or TCGETS, 0x7ffc8db03590) = -1 ENOTTY (Inappropriate ioctl for device)
...
readlink("./pty_try.py", 0x7ffc8db00610, 4096) = -1 EINVAL (Invalid argument)
The code snippet seems pretty straightforward, is Bash not getting something it needs? what could be the problem here?
This is a solution to run an interactive command in subprocess. It uses pseudo-terminal to make stdout non-blocking(also some command needs a tty device, eg. bash). it uses select to handle input and ouput to the subprocess.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import select
import termios
import tty
import pty
from subprocess import Popen
command = 'bash'
# command = 'docker run -it --rm centos /bin/bash'.split()
# save original tty setting then set it to raw mode
old_tty = termios.tcgetattr(sys.stdin)
tty.setraw(sys.stdin.fileno())
# open pseudo-terminal to interact with subprocess
master_fd, slave_fd = pty.openpty()
try:
# use os.setsid() make it run in a new process group, or bash job control will not be enabled
p = Popen(command,
preexec_fn=os.setsid,
stdin=slave_fd,
stdout=slave_fd,
stderr=slave_fd,
universal_newlines=True)
while p.poll() is None:
r, w, e = select.select([sys.stdin, master_fd], [], [])
if sys.stdin in r:
d = os.read(sys.stdin.fileno(), 10240)
os.write(master_fd, d)
elif master_fd in r:
o = os.read(master_fd, 10240)
if o:
os.write(sys.stdout.fileno(), o)
finally:
# restore tty settings back
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
This is the solution that worked for me at the end (as suggested by qarma) :
libc = ctypes.CDLL('libc.so.6')
master, slave = pty.openpty()
p = subprocess.Popen(["/bin/bash", "-i"], preexec_fn=libc.setsid, stdin=slave, stdout=slave, stderr=slave)
os.close(slave)
... do stuff here ...
x = os.read(master, 1026)
print x
Here is a full object oriented solution to do interactive shell commands with TTYs using threads and queues for stdout and stderr IO handling. This took me a while to build from multiple locations but it works perfectly so far on Unix/Linux systems and also as part of a Juniper op script. Thought I would post this here to save others time in trying to build something like this.
import pty
import re
import select
import threading
from datetime import datetime, timedelta
import os
import logging
import subprocess
import time
from queue import Queue, Empty
lib_logger = logging.getLogger("lib")
# Handler function to be run as a thread for pulling pty channels from an interactive shell
def _pty_handler(pty_master, logger, queue, stop):
poller = select.poll()
poller.register(pty_master, select.POLLIN)
while True:
# Stop handler if flagged
if stop():
logger.debug("Disabling pty handler for interactive shell")
break
fd_event = poller.poll(100)
for descriptor, event in fd_event:
# Read data from pipe and send to queue if there is data to read
if event == select.POLLIN:
data = os.read(descriptor, 1).decode("utf-8")
if not data:
break
# logger.debug("Reading in to handler queue: " + data)
queue.put(data)
# Exit handler if stdout is closing
elif event == select.POLLHUP:
logger.debug("Disabling pty handler for interactive shell")
break
# Function for reading outputs from the given queue by draining it and returning the output
def _get_queue_output(queue: Queue) -> str:
value = ""
try:
while True:
value += queue.get_nowait()
except Empty:
return value
# Helper function to create the needed list for popen and print the command run to the logger
def popen_command(command, logger, *args):
popen_list = list()
popen_list.append(command)
command_output = command
for arg in args:
popen_list.append(arg)
command_output += " " + arg
lib_logger.debug("Making Popen call using: " + str(popen_list))
logger.debug("")
logger.debug(command_output)
logger.debug("")
return popen_list
# Class for create an interactive shell and sending commands to it along with logging output to loggers
class InteractiveShell(object):
def __init__(self, command, logger, *args):
self.logger = logger
self.command = command
self.process = None
self.popen_list = popen_command(command, logger, *args)
self.master_stdout = None
self.slave_stdout = None
self.master_stderr = None
self.slave_stderr = None
self.stdout_handler = None
self.stderr_handler = None
self.stdout_queue = None
self.stderr_queue = None
self.stop_handlers = False
# Open interactive shell and setup all threaded IO handlers
def open(self, shell_prompt, timeout=DEVICE_TIMEOUT):
# Create PTYs
self.master_stdout, self.slave_stdout = pty.openpty()
self.master_stderr, self.slave_stderr = pty.openpty()
# Create shell subprocess
self.process = subprocess.Popen(self.popen_list, stdin=self.slave_stdout, stdout=self.slave_stdout,
stderr=self.slave_stderr, bufsize=0, start_new_session=True)
lib_logger.debug("")
lib_logger.debug("Started interactive shell for command " + self.command)
lib_logger.debug("")
# Create thread and queues for handling pty output and start them
self.stdout_queue = Queue()
self.stderr_queue = Queue()
self.stdout_handler = threading.Thread(target=_pty_handler, args=(self.master_stdout,
lib_logger,
self.stdout_queue,
lambda: self.stop_handlers))
self.stderr_handler = threading.Thread(target=_pty_handler, args=(self.master_stderr,
lib_logger,
self.stderr_queue,
lambda: self.stop_handlers))
self.stdout_handler.daemon = True
self.stderr_handler.daemon = True
lib_logger.debug("Enabling stderr handler for interactive shell " + self.command)
self.stderr_handler.start()
lib_logger.debug("Enabling stdout handler for interactive shell " + self.command)
self.stdout_handler.start()
# Wait for shell prompt
lib_logger.debug("Waiting for shell prompt: " + shell_prompt)
return self.wait_for(shell_prompt, timeout)
# Close interactive shell which should also kill all threaded IO handlers
def close(self):
# Wait 5 seconds before closing to let shell handle all input and outputs
time.sleep(5)
# Stop IO handler threads and terminate the process then wait another 5 seconds for cleanup to happen
self.stop_handlers = True
self.process.terminate()
time.sleep(5)
# Check for any additional output from the stdout handler
output = ""
while True:
data = _get_queue_output(self.stdout_queue)
if data != "":
output += data
else:
break
for line in iter(output.splitlines()):
self.logger.debug(line)
# Check for any additional output from the stderr handler
output = ""
while True:
data = _get_queue_output(self.stderr_queue)
if data != "":
output += data
else:
break
for line in iter(output.splitlines()):
self.logger.error(line)
# Cleanup PTYs
os.close(self.master_stdout)
os.close(self.master_stderr)
os.close(self.slave_stdout)
os.close(self.slave_stderr)
lib_logger.debug("Interactive shell command " + self.command + " terminated")
# Run series of commands given as a list of a list of commands and wait_for strings. If no wait_for is needed then
# only provide the command. Return if all the commands completed successfully or not.
# Ex:
# [
# ["ssh jsas#" + vnf_ip, r"jsas#.*:"],
# ["juniper123", r"jsas#.*\$"],
# ["sudo su", r".*jsas:"],
# ["juniper123", r"root#.*#"],
# ["usermod -p 'blah' jsas"]
# ]
def run_commands(self, commands_list):
shell_status = True
for command in commands_list:
shell_status = self.run(command[0])
if shell_status and len(command) == 2:
shell_status = self.wait_for(command[1])
# Break out of running commands if a command failed
if not shell_status:
break
return shell_status
# Run given command and return False if error occurs otherwise return True
def run(self, command, sleep=0):
# Check process to make sure it is still running and if not grab the stderr output
if self.process.poll():
self.logger.error("Interactive shell command " + self.command + " closed with return code: " +
self.process.returncode)
data = _get_queue_output(self.stderr_queue)
if data != "":
self.logger.error("Interactive shell error messages:")
for line in iter(data.splitlines()):
self.logger.error(line)
return False
# Write command to process and check to make sure a newline is in command otherwise add it
if "\n" not in command:
command += "\n"
os.write(self.master_stdout, command.encode("utf-8"))
if sleep:
time.sleep(sleep)
return True
# Wait for specific regex expression in output before continuing return False if wait time expires otherwise return
# True
def wait_for(self, this, timeout=DEVICE_TIMEOUT):
timeout = datetime.now() + timedelta(seconds=timeout)
output = ""
# Keep searching for output until timeout occurs
while timeout > datetime.now():
data = _get_queue_output(self.stdout_queue)
if data != "":
# Add to output line and check for match to regex given and if match then break and send output to
# logger
output += data
lib_logger.debug("Checking for " + this + " in data: ")
for line in iter(output.splitlines()):
lib_logger.debug(line)
if re.search(r"{}\s?$".format(this), output):
break
time.sleep(1)
# Send output to logger
for line in iter(output.splitlines()):
self.logger.debug(line)
# If wait time expired print error message and return False
if timeout < datetime.now():
self.logger.error("Wait time expired when waiting for " + this)
return False
return True
I'm trying to run a lengthy command within Python that outputs to both stdout and stderr. I'd like to poll the subprocess and write the output to separate files.
I tried the following, based on this answer Non-blocking read on a subprocess.PIPE in python
import subprocess
from Queue import Queue, Empty
from threading import Thread
def send_cmd(cmd, shell=False):
"""
Send cmd to the shell
"""
if not isinstance(cmd, list): cmd = shlex.split(cmd)
params = {'args' : cmd,
'stdout' : subprocess.PIPE,
'stderr' : subprocess.PIPE,
'shell' : shell}
proc = subprocess.Popen(**params)
return proc
def monitor_command(process, stdout_log=os.devnull, stderr_log=os.devnull):
"""
Monitor the process that is running, and log it if desired
"""
def enqueue_output(out, queue):
for line in iter(out.readline, b''):
queue.put(line)
def setup_process(log_name, proc):
FID = open(log_name, 'w')
queue = Queue()
thread = Thread(target=enqueue_output, args=(proc, queue))
thread.daemon = True # Thread dies with program
thread.start()
return (queue, FID)
def check_queues(queue_list, errors):
for queue, FID in queue_list:
try:
line = queue.get_nowait()
if 'error' in line.lower() or 'failed' in line.lower():
errors.append(line)
except Empty:
pass
else:
FID.write(line)
errors = []
queue_list = []
for log, proc in [(stdout_log, process.stdout), (stderr_log, process.stderr)]:
queue_list.append(setup_process(log, proc)
while process.poll() is None:
check_queues(queue_list, errors)
while not queue_list[0][0].empty() or queue_list[1][0].empty():
check_queues(queue_list, errors)
for queue, FID in queue_list:
FID.close()
return errors
process = send_cmd('long_program.exe')
errors = monitor_command(process, stdout_log='stdout.log', stderr_log='stderr.log')
But it the output file for stdout is empty, and the output file for stderr is only a few lines long, whereas both should be quite large.
What am I missing?
I did that once.. here is some old code I wrote
class Process_Communicator():
def join(self):
self.te.join()
self.to.join()
self.running = False
self.aggregator.join()
self.ti.join()
def enqueue_in(self):
while self.running and self.p.stdin is not None:
while not self.stdin_queue.empty():
s = self.stdin_queue.get()
self.p.stdin.write(str(s) + '\n\r')
pass
def enqueue_output(self):
if not self.p.stdout or self.p.stdout.closed:
return
out = self.p.stdout
for line in iter(out.readline, b''):
self.qo.put(line)
# out.flush()
def enqueue_err(self):
if not self.p.stderr or self.p.stderr.closed:
return
err = self.p.stderr
for line in iter(err.readline, b''):
self.qe.put(line)
def aggregate(self):
while (self.running):
self.update()
self.update()
def update(self):
line = ""
try:
while self.qe.not_empty:
line = self.qe.get_nowait() # or q.get(timeout=.1)
self.unbblocked_err += line
except Queue.Empty:
pass
line = ""
try:
while self.qo.not_empty:
line = self.qo.get_nowait() # or q.get(timeout=.1)
self.unbblocked_out += line
except Queue.Empty:
pass
while not self.stdin_queue.empty():
s = self.stdin_queue.get()
self.p.stdin.write(str(s))
def get_stdout(self, clear=True):
ret = self.unbblocked_out
if clear:
self.unbblocked_out = ""
return ret
def has_stdout(self):
ret = self.get_stdout(False)
if ret == '':
return None
else:
return ret
def get_stderr(self, clear=True):
ret = self.unbblocked_out
if clear:
self.unbblocked_out = ""
return ret
def has_stderr(self):
ret = self.get_stdout(False)
if ret == '':
return None
else:
return ret
def __init__(self, subp):
'''This is a simple class that collects and aggregates the
output from a subprocess so that you can more reliably use
the class without having to block for subprocess.communicate.'''
self.p = subp
self.unbblocked_out = ""
self.unbblocked_err = ""
self.running = True
self.qo = Queue.Queue()
self.to = threading.Thread(name="out_read",
target=self.enqueue_output,
args=())
self.to.daemon = True # thread dies with the program
self.to.start()
self.qe = Queue.Queue()
self.te = threading.Thread(name="err_read",
target=self.enqueue_err,
args=())
self.te.daemon = True # thread dies with the program
self.te.start()
self.stdin_queue = Queue.Queue()
self.aggregator = threading.Thread(name="aggregate",
target=self.aggregate,
args=())
self.aggregator.daemon = True # thread dies with the program
self.aggregator.start()
pass
You may not need the whole example, but feel free to cut copy and paste what you need. It's also important to show how I did the threading.
The code looks more complicated than the task requires. I don't see why do you need to call process.poll() or queue.get_nowait() here. To deliver subprocess' stdout/stderr to several sinks; you could start with teed_call() that accepts arbitrary file-like objects: you could pass logfiles and special file-like objects that accumulates errors in theirs .write() methods.
To fix your code with minimal changes; you should call .join() on the reader threads (even if process.poll() is not None i.e., the subprocess exited; there could be some pending output. Joining reader's threads ensures that all output is read).