Use python's pty to create a live console - python

I'm trying to create an execution environment/shell that will remotely execute on a server, which streams the stdout,err,in over the socket to be rendered in a browser. I currently have tried the approach of using subprocess.run with a PIPE. The Problem is that I get the stdout after the process has completed. What i want to achieve is to get a line-by-line, pseudo-terminal sort of implementation.
My current implementation
test.py
def greeter():
for _ in range(10):
print('hello world')
greeter()
and in the shell
>>> import subprocess
>>> result = subprocess.run(['python3', 'test.py'], stdout=subprocess.PIPE)
>>> print(result.stdout.decode('utf-8'))
hello world
hello world
hello world
hello world
hello world
hello world
hello world
hello world
hello world
hello world
If i try to attempt even this simple implementation with pty, how does one do it?

If your application is going to work asynchronously with multiple tasks, like reading data from stdout and then writing it to a websocket, I suggest using asyncio.
Here is an example that runs a process and redirects its output into a websocket:
import asyncio.subprocess
import os
from aiohttp.web import (Application, Response, WebSocketResponse, WSMsgType,
run_app)
async def on_websocket(request):
# Prepare aiohttp's websocket...
resp = WebSocketResponse()
await resp.prepare(request)
# ... and store in a global dictionary so it can be closed on shutdown
request.app['sockets'].append(resp)
process = await asyncio.create_subprocess_exec(sys.executable,
'/tmp/test.py',
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
bufsize=0)
# Schedule reading from stdout and stderr as asynchronous tasks.
stdout_f = asyncio.ensure_future(p.stdout.readline())
stderr_f = asyncio.ensure_future(p.stderr.readline())
# returncode will be set upon process's termination.
while p.returncode is None:
# Wait for a line in either stdout or stderr.
await asyncio.wait((stdout_f, stderr_f), return_when=asyncio.FIRST_COMPLETED)
# If task is done, then line is available.
if stdout_f.done():
line = stdout_f.result().encode()
stdout_f = asyncio.ensure_future(p.stdout.readline())
await ws.send_str(f'stdout: {line}')
if stderr_f.done():
line = stderr_f.result().encode()
stderr_f = asyncio.ensure_future(p.stderr.readline())
await ws.send_str(f'stderr: {line}')
return resp
async def on_shutdown(app):
for ws in app['sockets']:
await ws.close()
async def init(loop):
app = Application()
app['sockets'] = []
app.router.add_get('/', on_websocket)
app.on_shutdown.append(on_shutdown)
return app
loop = asyncio.get_event_loop()
app = loop.run_until_complete(init())
run_app(app)
It uses aiohttp and is based on the web_ws and subprocess streams examples.

Im sure theres a dupe around somewhere but i couldnt find it quickly
process = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,bufsize=0)
for out in iter(process.stdout.readline, b""):
print(out)

If you are on Windows then you will be fighting an uphill battle for a very long time, and I am sorry for the pain you will endure (been there). If you are on Linux, however, you can use the pexpect module. Pexpect allows you to spawn a background child process which you can perform bidirectional communication with. This is useful for all types of system automation, but a very common use case is ssh.
import pexpect
child = pexpect.spawn('python3 test.py')
message = 'hello world'
while True:
try:
child.expect(message)
except pexpect.exceptions.EOF:
break
input('child sent: "%s"\nHit enter to continue: ' %
(message + child.before.decode()))
print('reached end of file!')
I have found it very useful to create a class to handle something complicated like an ssh connection, but if your use case is simple enough that might not be appropriate or necessary. The way pexpect.before is of type bytes and omits the pattern you are searching for can be awkward, so it may make sense to create a function that handles this for you at the very least.
def get_output(child, message):
return(message + child.before.decode())
If you want to send messages to the child process, you can use child.sendline(line). For more details, check out the documentation I linked.
I hope I was able to help!

I don't know if you can render this in a browser, but you can run a program like module so you get stdout immediately like this:
import importlib
from importlib.machinery import SourceFileLoader
class Program:
def __init__(self, path, name=''):
self.path = path
self.name = name
if self.path:
if not self.name:
self.get_name()
self.loader = importlib.machinery.SourceFileLoader(self.name, self.path)
self.spec = importlib.util.spec_from_loader(self.loader.name, self.loader)
self.mod = importlib.util.module_from_spec(self.spec)
return
def get_name(self):
extension = '.py' #change this if self.path is not python program with extension .py
self.name = self.path.split('\\')[-1].strip('.py')
return
def load(self):
self.check()
self.loader.exec_module(self.mod)
return
def check(self):
if not self.path:
Error('self.file is NOT defined.'.format(path)).throw()
return
file_path = 'C:\\Users\\RICHGang\\Documents\\projects\\stackoverflow\\ptyconsole\\test.py'
file_name = 'test'
prog = Program(file_path, file_name)
prog.load()
You can add sleep in test.py to see the difference:
from time import sleep
def greeter():
for i in range(10):
sleep(0.3)
print('hello world')
greeter()

Take a look at terminado. Works on Windows and Linux.
Jupiter Lab uses it.

Related

Running scripts at the same time but on different consoles [duplicate]

Apart from the scripts own console (which does nothing) I want to open two consoles and print the variables con1 and con2 in different consoles, How can I achieve this.
con1 = 'This is Console1'
con2 = 'This is Console2'
I've no idea how to achieve this and spent several hours trying to do so with modules such as subprocess but with no luck. I'm on windows by the way.
Edit:
Would the threading module do the job? or is multiprocessing needed?
Eg:
If you don't want to reconsider your problem and use a GUI such as in #Kevin's answer then you could use subprocess module to start two new consoles concurrently and display two given strings in the opened windows:
#!/usr/bin/env python3
import sys
import time
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
messages = 'This is Console1', 'This is Console2'
# open new consoles
processes = [Popen([sys.executable, "-c", """import sys
for line in sys.stdin: # poor man's `cat`
sys.stdout.write(line)
sys.stdout.flush()
"""],
stdin=PIPE, bufsize=1, universal_newlines=True,
# assume the parent script is started from a console itself e.g.,
# this code is _not_ run as a *.pyw file
creationflags=CREATE_NEW_CONSOLE)
for _ in range(len(messages))]
# display messages
for proc, msg in zip(processes, messages):
proc.stdin.write(msg + "\n")
proc.stdin.flush()
time.sleep(10) # keep the windows open for a while
# close windows
for proc in processes:
proc.communicate("bye\n")
Here's a simplified version that doesn't rely on CREATE_NEW_CONSOLE:
#!/usr/bin/env python
"""Show messages in two new console windows simultaneously."""
import sys
import platform
from subprocess import Popen
messages = 'This is Console1', 'This is Console2'
# define a command that starts new terminal
if platform.system() == "Windows":
new_window_command = "cmd.exe /c start".split()
else: #XXX this can be made more portable
new_window_command = "x-terminal-emulator -e".split()
# open new consoles, display messages
echo = [sys.executable, "-c",
"import sys; print(sys.argv[1]); input('Press Enter..')"]
processes = [Popen(new_window_command + echo + [msg]) for msg in messages]
# wait for the windows to be closed
for proc in processes:
proc.wait()
You can get something like two consoles using two Tkinter Text widgets.
from Tkinter import *
import threading
class FakeConsole(Frame):
def __init__(self, root, *args, **kargs):
Frame.__init__(self, root, *args, **kargs)
#white text on black background,
#for extra versimilitude
self.text = Text(self, bg="black", fg="white")
self.text.pack()
#list of things not yet printed
self.printQueue = []
#one thread will be adding to the print queue,
#and another will be iterating through it.
#better make sure one doesn't interfere with the other.
self.printQueueLock = threading.Lock()
self.after(5, self.on_idle)
#check for new messages every five milliseconds
def on_idle(self):
with self.printQueueLock:
for msg in self.printQueue:
self.text.insert(END, msg)
self.text.see(END)
self.printQueue = []
self.after(5, self.on_idle)
#print msg to the console
def show(self, msg, sep="\n"):
with self.printQueueLock:
self.printQueue.append(str(msg) + sep)
#warning! Calling this more than once per program is a bad idea.
#Tkinter throws a fit when two roots each have a mainloop in different threads.
def makeConsoles(amount):
root = Tk()
consoles = [FakeConsole(root) for n in range(amount)]
for c in consoles:
c.pack()
threading.Thread(target=root.mainloop).start()
return consoles
a,b = makeConsoles(2)
a.show("This is Console 1")
b.show("This is Console 2")
a.show("I've got a lovely bunch of cocounts")
a.show("Here they are standing in a row")
b.show("Lorem ipsum dolor sit amet")
b.show("consectetur adipisicing elit")
Result:
I don't know if it suits you, but you can open two Python interpreters using Windows start command:
from subprocess import Popen
p1 = Popen('start c:\python27\python.exe', shell=True)
p2 = Popen('start c:\python27\python.exe', shell=True)
Of course there is problem that now Python runs in interactive mode which is not what u want (you can also pass file as parameter and that file will be executed).
On Linux I would try to make named pipe, pass the name of the file to python.exe and write python commands to that file. 'Maybe' it will work ;)
But I don't have an idea how to create named pipe on Windows. Windows API ... (fill urself).
pymux
pymux gets close to what you want: https://github.com/jonathanslenders/pymux
Unfortunately it is mostly a CLI tool replacement for tmux and does not have a decent programmatic API yet.
But hacking it up to expose that API is likely the most robust option if you are serious about this.
The README says:
Parts of pymux could become a library, so that any prompt_toolkit application can embed a vt100 terminal. (Imagine a terminal emulator embedded in pyvim.)
If you are on windows you can use win32console module to open a second console for your thread or subprocess output. This is the most simple and easiest way that works if you are on windows.
Here is a sample code:
import win32console
import multiprocessing
def subprocess(queue):
win32console.FreeConsole() #Frees subprocess from using main console
win32console.AllocConsole() #Creates new console and all input and output of subprocess goes to this new console
while True:
print(queue.get())
#prints any output produced by main script passed to subprocess using queue
queue = multiprocessing.Queue()
multiprocessing.Process(Target=subprocess, args=[queue]).start()
while True:
print("Hello World")
#and whatever else you want to do in ur main process
You can also do this with threading. You have to use queue module if you want the queue functionality as threading module doesn't have queue
Here is the win32console module documentation
I used jfs' response. Here is my embellishment/theft of jfs response.
This is tailored to run on Win10 and also handles Unicode:
# https://stackoverflow.com/questions/19479504/how-can-i-open-two-consoles-from-a-single-script
import sys, time, os, locale
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
class console(Popen) :
NumConsoles = 0
def __init__(self, color=None, title=None):
console.NumConsoles += 1
cmd = "import sys, os, locale"
cmd += "\nos.system(\'color " + color + "\')" if color is not None else ""
title = title if title is not None else "console #" + str(console.NumConsoles)
cmd += "\nos.system(\"title " + title + "\")"
# poor man's `cat`
cmd += """
print(sys.stdout.encoding, locale.getpreferredencoding() )
endcoding = locale.getpreferredencoding()
for line in sys.stdin:
sys.stdout.buffer.write(line.encode(endcoding))
sys.stdout.flush()
"""
cmd = sys.executable, "-c", cmd
# print(cmd, end="", flush=True)
super().__init__(cmd, stdin=PIPE, bufsize=1, universal_newlines=True, creationflags=CREATE_NEW_CONSOLE, encoding='utf-8')
def write(self, msg):
self.stdin.write(msg + "\n" )
if __name__ == "__main__":
myConsole = console(color="c0", title="test error console")
myConsole.write("Thank you jfs. Cool explanation")
NoTitle= console()
NoTitle.write("default color and title! This answer uses Windows 10")
NoTitle.write(u"♥♥♥♥♥♥♥♥")
NoTitle.write("♥")
time.sleep(5)
myConsole.terminate()
NoTitle.write("some more text. Run this at the python console.")
time.sleep(4)
NoTitle.terminate()
time.sleep(5)
Do you know about screen/tmux?
How about tmuxp? For example, you can try to run cat in split panes and use "sendkeys" to send output (but dig the docs, may be there is even easier ways to achieve this).
As a side bonus this will work in the text console or GUI.

Getting latest lines of streaming stdout from a Python subprocess

My goal : To read the latest "chunk" (N lines) of streaming stdout every M seconds from a subprocess.
Current code:
start the subprocess
reads stdout
once I have a chunk of N lines, print it out (or save as current chunk)
wait M seconds
repeat
I have also put code for the moment to terminate the subprocess (which is an endless stream until you hit Ctrl-C)
What I want to achieve is after I wait for M seconds, if for it to always read the latest N lines and not the subsequent N lines in stdout (they can be discarded as I'm only interested in the latest)
My end goal would be to spawn a thread to run the process and keep saving the latest lines and then call from the main process whenever I need the latest results of the stream.
Any help would be greatly appreciated!
#!/usr/bin/env python3
import signal
import time
from subprocess import Popen, PIPE
sig = signal.SIGTERM
N=9
M=5
countlines=0
p = Popen(["myprogram"], stdout=PIPE, bufsize=1, universal_newlines=True)
chunk=[]
for line in p.stdout:
countlines+=1
chunk.append(line)
if len(chunk)==N:
print(chunk)
chunk=[]
time.sleep(M)
if countlines>100:
p.send_signal(sig)
break
print("done")
After much searching, I stumbled upon a solution here:
https://eli.thegreenplace.net/2017/interacting-with-a-long-running-child-process-in-python/
Eli's "Launch, interact, get output in real time, terminate" code section worked for me.
So far its the most elegant solution I've found.
Adapted to my problem above, and written within a class (not shown here):
def output_reader(self,proc):
chunk=[]
countlines=0
for line in iter(proc.stdout.readline, b''):
countlines+=1
chunk.append(line.decode("utf-8"))
if countlines==N:
self.current_chunk = chunk
chunk=[]
countlines=0
def main():
proc = subprocess.Popen(['myprocess'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
t = threading.Thread(target=output_reader, args=(proc,))
t.start()
try:
time.sleep(0.2)
for i in range(10):
time.sleep(1) # waits a while before getting latest lines
print(self.current_chunk)
finally:
proc.terminate()
try:
proc.wait(timeout=0.2)
print('== subprocess exited with rc =', proc.returncode)
except subprocess.TimeoutExpired:
print('subprocess did not terminate in time')
t.join()
Here is another possible solution. It is a program that you would run as a separate process in the pipeline, which presents a REST API that when queried will return the last N lines that it read on stdin (where N and the port number are supplied on stdin). It is using run in flask so should not be used in situations where the outside world has access to the local server port to make requests, though this could be adapted.
import sys
import time
import threading
import argparse
from flask import Flask, request
from flask_restful import Resource, Api
class Server:
def __init__(self):
self.data = {'at_eof': False,
'lines_read': 0,
'latest_lines': []}
self.thread = None
self.args = None
self.stop = False
def parse_args(self):
parser = argparse.ArgumentParser()
parser.add_argument("num_lines", type=int,
help="number of lines to cache")
parser.add_argument("port", type=int,
help="port to serve on")
self.args = parser.parse_args()
def start_updater(self):
def updater():
lines = self.data['latest_lines']
while True:
if self.stop:
return
line = sys.stdin.readline()
if not line:
break
self.data['lines_read'] += 1
lines.append(line)
while len(lines) > self.args.num_lines:
lines.pop(0)
self.data['at_eof'] = True
self.thread = threading.Thread(target=updater)
self.thread.start()
def get_data(self):
return self.data
def shutdown(self):
self.stop = True
func = request.environ.get('werkzeug.server.shutdown')
if func:
func()
return 'Shutting down'
else:
return 'shutdown failed'
def add_apis(self, app):
class GetData(Resource):
get = self.get_data
class Shutdown(Resource):
get = self.shutdown
api = Api(app)
api.add_resource(GetData, "/getdata")
api.add_resource(Shutdown, "/shutdown")
def run(self):
self.parse_args()
self.start_updater()
app = Flask(__name__)
self.add_apis(app)
app.run(port=self.args.port)
server = Server()
server.run()
Example usage: here is a test program whose output we want to serve:
import sys
import time
for i in range(100):
print("this is line {}".format(i))
sys.stdout.flush()
time.sleep(.1)
And a simple pipeline to launch it (here from the linux shell prompt but could be done via subprocess.Popen), serving the last 5 lines, on port 8001:
python ./writer.py | python ./server.py 5 8001
An example query, here using curl as the client but it could be done via Python requests:
$ curl -s http://localhost:8001/getdata
{"at_eof": false, "lines_read": 30, "latest_lines": ["this is line 25\n", "this is line 26\n", "this is line 27\n", "this is line 28\n", "this is line 29\n"]}
The server also provides an http://localhost:<port>/shutdown URL to terminate it, though if you call it before you first see "at_eof": true, then expect the writer to die with a broken pipe.

How to attend several request in parallel with twisted Klein

I am creating an API to execute command line commands. The server has only two methods practically, "run" and "stop". So, the main function of "run" is to run a command line program in the server side and return a list with the system output. For the other hand, the function of "stop" just kill the process running. Here is the code:
import sys
import json
import subprocess
from klein import Klein
class ItemStore(object):
app = Klein()
current_process = None
def __init__(self):
self._items = {}
def create_process(self, exe):
"""
Run command and return the system output inside a JSON string
"""
print("COMMAND: ", exe)
process = subprocess.Popen(exe, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.current_process = process
# Poll process for new output until finished
output_lines = []
counter = 0
while True:
counter = counter + 1
nextline = process.stdout.readline()
if process.poll() is not None:
break
aux = nextline.decode("utf-8")
output_lines.append(aux)
sys.stdout.flush()
counter = counter + 1
print("RETURN CODE: ", process.returncode)
return json.dumps(output_lines)
#app.route('/run/<command>', methods=['POST'])
def run(self, request, command):
"""
Execute command line process
"""
exe = command
print("COMMAND: ", exe)
output_lines = self.create_process(exe)
request.setHeader("Content-Type", "application/json")
request.setResponseCode(200)
return output_lines
#app.route('/stop', methods=['POST'])
def stop(self, request):
"""
Kill current execution
"""
self.current_process.kill()
request.setResponseCode(200)
return None
if __name__ == '__main__':
store = ItemStore()
store.app.run('0.0.0.0', 15508)
Well, the problem with this is, if I need to stop the current execution, "stop" request will be not attend until "run" request has finished, so it has no sense to work in this way. I have been reading several pages about async/await solution, but I can not get it work!. I think the most prominent solution is in this webpage https://crossbario.com/blog/Going-Asynchronous-from-Flask-to-Twisted-Klein/ , however, "run" is still a synchronous process. I just posted my main and original code in order to not confuse with the webpage changes.
Best regards
Everything to do with Klein in this example is already handling requests concurrently. However, your application code blocks until it has fully responded to a request.
You have to write your application code to be non-blocking instead of blocking.
Switch your code from the subprocess module to Twisted's process support.
Use Klein's feature of being able to return a Deferred instead of a result (if you want incremental results while the process is running, also look at the request interface - in particular, the write method - so you can write those results before the Deferred fires with a final result).
After Deferreds make sense to you, then you might want to think about syntactic sugar that's available in the form of async/await. Until you understand what Deferreds are doing, async/await is just going to be black magic that will only ever work by accident in your programs.

How can I open two consoles from a single script

Apart from the scripts own console (which does nothing) I want to open two consoles and print the variables con1 and con2 in different consoles, How can I achieve this.
con1 = 'This is Console1'
con2 = 'This is Console2'
I've no idea how to achieve this and spent several hours trying to do so with modules such as subprocess but with no luck. I'm on windows by the way.
Edit:
Would the threading module do the job? or is multiprocessing needed?
Eg:
If you don't want to reconsider your problem and use a GUI such as in #Kevin's answer then you could use subprocess module to start two new consoles concurrently and display two given strings in the opened windows:
#!/usr/bin/env python3
import sys
import time
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
messages = 'This is Console1', 'This is Console2'
# open new consoles
processes = [Popen([sys.executable, "-c", """import sys
for line in sys.stdin: # poor man's `cat`
sys.stdout.write(line)
sys.stdout.flush()
"""],
stdin=PIPE, bufsize=1, universal_newlines=True,
# assume the parent script is started from a console itself e.g.,
# this code is _not_ run as a *.pyw file
creationflags=CREATE_NEW_CONSOLE)
for _ in range(len(messages))]
# display messages
for proc, msg in zip(processes, messages):
proc.stdin.write(msg + "\n")
proc.stdin.flush()
time.sleep(10) # keep the windows open for a while
# close windows
for proc in processes:
proc.communicate("bye\n")
Here's a simplified version that doesn't rely on CREATE_NEW_CONSOLE:
#!/usr/bin/env python
"""Show messages in two new console windows simultaneously."""
import sys
import platform
from subprocess import Popen
messages = 'This is Console1', 'This is Console2'
# define a command that starts new terminal
if platform.system() == "Windows":
new_window_command = "cmd.exe /c start".split()
else: #XXX this can be made more portable
new_window_command = "x-terminal-emulator -e".split()
# open new consoles, display messages
echo = [sys.executable, "-c",
"import sys; print(sys.argv[1]); input('Press Enter..')"]
processes = [Popen(new_window_command + echo + [msg]) for msg in messages]
# wait for the windows to be closed
for proc in processes:
proc.wait()
You can get something like two consoles using two Tkinter Text widgets.
from Tkinter import *
import threading
class FakeConsole(Frame):
def __init__(self, root, *args, **kargs):
Frame.__init__(self, root, *args, **kargs)
#white text on black background,
#for extra versimilitude
self.text = Text(self, bg="black", fg="white")
self.text.pack()
#list of things not yet printed
self.printQueue = []
#one thread will be adding to the print queue,
#and another will be iterating through it.
#better make sure one doesn't interfere with the other.
self.printQueueLock = threading.Lock()
self.after(5, self.on_idle)
#check for new messages every five milliseconds
def on_idle(self):
with self.printQueueLock:
for msg in self.printQueue:
self.text.insert(END, msg)
self.text.see(END)
self.printQueue = []
self.after(5, self.on_idle)
#print msg to the console
def show(self, msg, sep="\n"):
with self.printQueueLock:
self.printQueue.append(str(msg) + sep)
#warning! Calling this more than once per program is a bad idea.
#Tkinter throws a fit when two roots each have a mainloop in different threads.
def makeConsoles(amount):
root = Tk()
consoles = [FakeConsole(root) for n in range(amount)]
for c in consoles:
c.pack()
threading.Thread(target=root.mainloop).start()
return consoles
a,b = makeConsoles(2)
a.show("This is Console 1")
b.show("This is Console 2")
a.show("I've got a lovely bunch of cocounts")
a.show("Here they are standing in a row")
b.show("Lorem ipsum dolor sit amet")
b.show("consectetur adipisicing elit")
Result:
I don't know if it suits you, but you can open two Python interpreters using Windows start command:
from subprocess import Popen
p1 = Popen('start c:\python27\python.exe', shell=True)
p2 = Popen('start c:\python27\python.exe', shell=True)
Of course there is problem that now Python runs in interactive mode which is not what u want (you can also pass file as parameter and that file will be executed).
On Linux I would try to make named pipe, pass the name of the file to python.exe and write python commands to that file. 'Maybe' it will work ;)
But I don't have an idea how to create named pipe on Windows. Windows API ... (fill urself).
pymux
pymux gets close to what you want: https://github.com/jonathanslenders/pymux
Unfortunately it is mostly a CLI tool replacement for tmux and does not have a decent programmatic API yet.
But hacking it up to expose that API is likely the most robust option if you are serious about this.
The README says:
Parts of pymux could become a library, so that any prompt_toolkit application can embed a vt100 terminal. (Imagine a terminal emulator embedded in pyvim.)
If you are on windows you can use win32console module to open a second console for your thread or subprocess output. This is the most simple and easiest way that works if you are on windows.
Here is a sample code:
import win32console
import multiprocessing
def subprocess(queue):
win32console.FreeConsole() #Frees subprocess from using main console
win32console.AllocConsole() #Creates new console and all input and output of subprocess goes to this new console
while True:
print(queue.get())
#prints any output produced by main script passed to subprocess using queue
queue = multiprocessing.Queue()
multiprocessing.Process(Target=subprocess, args=[queue]).start()
while True:
print("Hello World")
#and whatever else you want to do in ur main process
You can also do this with threading. You have to use queue module if you want the queue functionality as threading module doesn't have queue
Here is the win32console module documentation
I used jfs' response. Here is my embellishment/theft of jfs response.
This is tailored to run on Win10 and also handles Unicode:
# https://stackoverflow.com/questions/19479504/how-can-i-open-two-consoles-from-a-single-script
import sys, time, os, locale
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
class console(Popen) :
NumConsoles = 0
def __init__(self, color=None, title=None):
console.NumConsoles += 1
cmd = "import sys, os, locale"
cmd += "\nos.system(\'color " + color + "\')" if color is not None else ""
title = title if title is not None else "console #" + str(console.NumConsoles)
cmd += "\nos.system(\"title " + title + "\")"
# poor man's `cat`
cmd += """
print(sys.stdout.encoding, locale.getpreferredencoding() )
endcoding = locale.getpreferredencoding()
for line in sys.stdin:
sys.stdout.buffer.write(line.encode(endcoding))
sys.stdout.flush()
"""
cmd = sys.executable, "-c", cmd
# print(cmd, end="", flush=True)
super().__init__(cmd, stdin=PIPE, bufsize=1, universal_newlines=True, creationflags=CREATE_NEW_CONSOLE, encoding='utf-8')
def write(self, msg):
self.stdin.write(msg + "\n" )
if __name__ == "__main__":
myConsole = console(color="c0", title="test error console")
myConsole.write("Thank you jfs. Cool explanation")
NoTitle= console()
NoTitle.write("default color and title! This answer uses Windows 10")
NoTitle.write(u"♥♥♥♥♥♥♥♥")
NoTitle.write("♥")
time.sleep(5)
myConsole.terminate()
NoTitle.write("some more text. Run this at the python console.")
time.sleep(4)
NoTitle.terminate()
time.sleep(5)
Do you know about screen/tmux?
How about tmuxp? For example, you can try to run cat in split panes and use "sendkeys" to send output (but dig the docs, may be there is even easier ways to achieve this).
As a side bonus this will work in the text console or GUI.

python interprocess querying/control

I have this Python based service daemon which is doing a lot of multiplexed IO (select).
From another script (also Python) I want to query this service daemon about status/information and/or control the processing (e.g. pause it, shut it down, change some parameters, etc).
What is the best way to send control messages ("from now on you process like this!") and query processed data ("what was the result of that?") using python?
I read somewhere that named pipes might work, but don't know that much about named pipes, especially in python - and whether there are any better alternatives.
Both the background service daemon AND the frontend will be programmed by me, so all options are open :)
I am using Linux.
Pipes and Named pipes are good solution to communicate between different processes.
Pipes work like shared memory buffer but has an interface that mimics a simple file on each of two ends. One process writes data on one end of the pipe, and another reads that data on the other end.
Named pipes are similar to above , except that this pipe is actually associated with a real file in your computer.
More details at
http://www.softpanorama.org/Scripting/pipes.shtml
In Python, named pipe files are created with the os.mkfifo call
x = os.mkfifo(filename)
In child and parent open this pipe as file
out = os.open(filename, os.O_WRONLY)
in = open(filename, 'r')
To write
os.write(out, 'xxxx')
To read
lines = in.readline( )
Edit: Adding links from SO
Create a temporary FIFO (named pipe) in Python?
https://stackoverflow.com/search?q=python+named+pipes
You may want to read more on "IPC and Python"
http://www.freenetpages.co.uk/hp/alan.gauld/tutipc.htm
The best way to do IPC is using message Queue in python as bellow
server process server.py (run this before running client.py and interact.py)
from multiprocessing.managers import BaseManager
import Queue
queue1 = Queue.Queue()
queue2 = Queue.Queue()
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1', callable=lambda:queue1)
QueueManager.register('get_queue2', callable=lambda:queue2)
m = QueueManager(address=('', 50000), authkey='abracadabra')
s = m.get_server()
s.serve_forever()
The inter-actor which is for I/O interact.py
from multiprocessing.managers import BaseManager
import threading
import sys
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1')
QueueManager.register('get_queue2')
m = QueueManager(address=('localhost', 50000),authkey='abracadabra')
m.connect()
queue1 = m.get_queue1()
queue2 = m.get_queue2()
def read():
while True:
sys.stdout.write(queue2.get())
def write():
while True:
queue1.put(sys.stdin.readline())
threads = []
threadr = threading.Thread(target=read)
threadr.start()
threads.append(threadr)
threadw = threading.Thread(target=write)
threadw.start()
threads.append(threadw)
for thread in threads:
thread.join()
The client program Client.py
from multiprocessing.managers import BaseManager
import sys
import string
import os
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1')
QueueManager.register('get_queue2')
m = QueueManager(address=('localhost', 50000), authkey='abracadabra')
m.connect()
queue1 = m.get_queue1()
queue2 = m.get_queue2()
class RedirectOutput:
def __init__(self, stdout):
self.stdout = stdout
def write(self, s):
queue2.put(s)
class RedirectInput:
def __init__(self, stdin):
self.stdin = stdin
def readline(self):
return queue1.get()
# redirect standard output
sys.stdout = RedirectOutput(sys.stdout)
sys.stdin = RedirectInput(sys.stdin)
# The test program which will take input and produce output
Text=raw_input("Enter Text:")
print "you have entered:",Text
def x():
while True:
x= raw_input("Enter 'exit' to end and some thing else to continue")
print x
if 'exit' in x:
break
x()
this can be used to communicate between two process in network or on same machine
remember that inter-actor and server process will not terminate until you manually kill it.

Categories

Resources