Related
I'm writing a subprocess based python program that acts as a proxy between the user input and the subprocess (trying to go beyond pexpect). I've taken this thread as reference, and some code chunk from pexpect (_read_incoming() method for popen_spawn) to read output (the fcntl method worked, but not satisfactorily).
The code runs but has a problem: There seems to be an additional carriage return being sent to the process. This is causing me issues when I try to do things like sending passwords to ssh etc.
Could you look into what might be the issue? Thanks!
The code is as follows:
from queue import Queue, Empty
from threading import Thread
import subprocess
import signal
import fcntl
import os
global terminating
terminating = False
def setNonBlocking(fd):
"""
Set the file description of the given file descriptor to non-blocking.
"""
print(fd)
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def enqueue(out, q):
fileno = out.fileno()
while not terminating:
buf = b''
try:
buf = os.read(fileno, 1024)
if buf and len(buf)>0:
q.put(buf)
except OSError as e:
#print("OS error: {0}".format(e))
pass
if not buf:
q.put(buf)
# for line in iter(out.readline, b''):
# if len(line.strip()) > 0:
# print(line)
# q.put(line)
out.close()
print('Terminating')
return
def get_output(q):
out_str = bytes()
while True:
try:
incoming = q.get_nowait()
except Empty:
break
else:
if incoming is None:
break
else:
out_str += incoming
if out_str:
return out_str
else:
return b''
def explore(cmd="/bin/bash"):
global terminating
universal_newlines = False
p = subprocess.Popen([cmd], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT,
bufsize=0, shell=False, universal_newlines=universal_newlines)
#setNonBlocking(p.stdout)
outQueue = Queue()
outThread = Thread(target=enqueue, args=(p.stdout, outQueue))
outThread.daemon = True
outThread.start()
while True:
try:
someInput = input()
print('[In]:'+someInput)
someInput += '\n'
if not universal_newlines:
p.stdin.write(someInput.encode('utf-8'))
else:
p.stdin.write(someInput)
p.stdin.flush()
out = get_output(outQueue).decode('utf-8')
print('[Out]:'+out)
#p.communicate(someInput+'\n')
except KeyboardInterrupt:
print('Interrupting')
p.send_signal(signal.SIGINT)
terminating = True
outThread.join()
break
p.wait()
if __name__ == '__main__':
explore()
Example run:
ls
[In]:ls
[Out]:
[In]:
[Out]:explorer.py
__init__.py
^CInterrupting
Terminating
The second In was an enter from user.
Update:
Tested the alternate using pexpect's popen_spawn module. Same result:
from pexpect.popen_spawn import PopenSpawn as Spawn
import signal
def explore(cmd="/bin/bash"):
p = Spawn(cmd)
while True:
try:
someInput = input()
print('[In]:'+someInput)
p.sendline(someInput)
out = p.read_nonblocking(size=1024, timeout=-1).decode('utf-8')
print('[Out]:'+out)
#p.communicate(someInput+'\n')
except KeyboardInterrupt:
print('Interrupting')
p.sendeof()
p.kill(signal.SIGINT)
break
if __name__ == '__main__':
explore()
I need to run an interactive Bash instance in a separated process in Python with it's own dedicated TTY (I can't use pexpect).
I used this code snippet I commonly see used in similar programs:
master, slave = pty.openpty()
p = subprocess.Popen(["/bin/bash", "-i"], stdin=slave, stdout=slave, stderr=slave)
os.close(slave)
x = os.read(master, 1026)
print x
subprocess.Popen.kill(p)
os.close(master)
But when I run it I get the following output:
$ ./pty_try.py
bash: cannot set terminal process group (10790): Inappropriate ioctl for device
bash: no job control in this shell
Strace of the run shows some errors:
...
readlink("/usr/bin/python2.7", 0x7ffc8db02510, 4096) = -1 EINVAL (Invalid argument)
...
ioctl(3, SNDCTL_TMR_TIMEBASE or SNDRV_TIMER_IOCTL_NEXT_DEVICE or TCGETS, 0x7ffc8db03590) = -1 ENOTTY (Inappropriate ioctl for device)
...
readlink("./pty_try.py", 0x7ffc8db00610, 4096) = -1 EINVAL (Invalid argument)
The code snippet seems pretty straightforward, is Bash not getting something it needs? what could be the problem here?
This is a solution to run an interactive command in subprocess. It uses pseudo-terminal to make stdout non-blocking(also some command needs a tty device, eg. bash). it uses select to handle input and ouput to the subprocess.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import select
import termios
import tty
import pty
from subprocess import Popen
command = 'bash'
# command = 'docker run -it --rm centos /bin/bash'.split()
# save original tty setting then set it to raw mode
old_tty = termios.tcgetattr(sys.stdin)
tty.setraw(sys.stdin.fileno())
# open pseudo-terminal to interact with subprocess
master_fd, slave_fd = pty.openpty()
try:
# use os.setsid() make it run in a new process group, or bash job control will not be enabled
p = Popen(command,
preexec_fn=os.setsid,
stdin=slave_fd,
stdout=slave_fd,
stderr=slave_fd,
universal_newlines=True)
while p.poll() is None:
r, w, e = select.select([sys.stdin, master_fd], [], [])
if sys.stdin in r:
d = os.read(sys.stdin.fileno(), 10240)
os.write(master_fd, d)
elif master_fd in r:
o = os.read(master_fd, 10240)
if o:
os.write(sys.stdout.fileno(), o)
finally:
# restore tty settings back
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
This is the solution that worked for me at the end (as suggested by qarma) :
libc = ctypes.CDLL('libc.so.6')
master, slave = pty.openpty()
p = subprocess.Popen(["/bin/bash", "-i"], preexec_fn=libc.setsid, stdin=slave, stdout=slave, stderr=slave)
os.close(slave)
... do stuff here ...
x = os.read(master, 1026)
print x
Here is a full object oriented solution to do interactive shell commands with TTYs using threads and queues for stdout and stderr IO handling. This took me a while to build from multiple locations but it works perfectly so far on Unix/Linux systems and also as part of a Juniper op script. Thought I would post this here to save others time in trying to build something like this.
import pty
import re
import select
import threading
from datetime import datetime, timedelta
import os
import logging
import subprocess
import time
from queue import Queue, Empty
lib_logger = logging.getLogger("lib")
# Handler function to be run as a thread for pulling pty channels from an interactive shell
def _pty_handler(pty_master, logger, queue, stop):
poller = select.poll()
poller.register(pty_master, select.POLLIN)
while True:
# Stop handler if flagged
if stop():
logger.debug("Disabling pty handler for interactive shell")
break
fd_event = poller.poll(100)
for descriptor, event in fd_event:
# Read data from pipe and send to queue if there is data to read
if event == select.POLLIN:
data = os.read(descriptor, 1).decode("utf-8")
if not data:
break
# logger.debug("Reading in to handler queue: " + data)
queue.put(data)
# Exit handler if stdout is closing
elif event == select.POLLHUP:
logger.debug("Disabling pty handler for interactive shell")
break
# Function for reading outputs from the given queue by draining it and returning the output
def _get_queue_output(queue: Queue) -> str:
value = ""
try:
while True:
value += queue.get_nowait()
except Empty:
return value
# Helper function to create the needed list for popen and print the command run to the logger
def popen_command(command, logger, *args):
popen_list = list()
popen_list.append(command)
command_output = command
for arg in args:
popen_list.append(arg)
command_output += " " + arg
lib_logger.debug("Making Popen call using: " + str(popen_list))
logger.debug("")
logger.debug(command_output)
logger.debug("")
return popen_list
# Class for create an interactive shell and sending commands to it along with logging output to loggers
class InteractiveShell(object):
def __init__(self, command, logger, *args):
self.logger = logger
self.command = command
self.process = None
self.popen_list = popen_command(command, logger, *args)
self.master_stdout = None
self.slave_stdout = None
self.master_stderr = None
self.slave_stderr = None
self.stdout_handler = None
self.stderr_handler = None
self.stdout_queue = None
self.stderr_queue = None
self.stop_handlers = False
# Open interactive shell and setup all threaded IO handlers
def open(self, shell_prompt, timeout=DEVICE_TIMEOUT):
# Create PTYs
self.master_stdout, self.slave_stdout = pty.openpty()
self.master_stderr, self.slave_stderr = pty.openpty()
# Create shell subprocess
self.process = subprocess.Popen(self.popen_list, stdin=self.slave_stdout, stdout=self.slave_stdout,
stderr=self.slave_stderr, bufsize=0, start_new_session=True)
lib_logger.debug("")
lib_logger.debug("Started interactive shell for command " + self.command)
lib_logger.debug("")
# Create thread and queues for handling pty output and start them
self.stdout_queue = Queue()
self.stderr_queue = Queue()
self.stdout_handler = threading.Thread(target=_pty_handler, args=(self.master_stdout,
lib_logger,
self.stdout_queue,
lambda: self.stop_handlers))
self.stderr_handler = threading.Thread(target=_pty_handler, args=(self.master_stderr,
lib_logger,
self.stderr_queue,
lambda: self.stop_handlers))
self.stdout_handler.daemon = True
self.stderr_handler.daemon = True
lib_logger.debug("Enabling stderr handler for interactive shell " + self.command)
self.stderr_handler.start()
lib_logger.debug("Enabling stdout handler for interactive shell " + self.command)
self.stdout_handler.start()
# Wait for shell prompt
lib_logger.debug("Waiting for shell prompt: " + shell_prompt)
return self.wait_for(shell_prompt, timeout)
# Close interactive shell which should also kill all threaded IO handlers
def close(self):
# Wait 5 seconds before closing to let shell handle all input and outputs
time.sleep(5)
# Stop IO handler threads and terminate the process then wait another 5 seconds for cleanup to happen
self.stop_handlers = True
self.process.terminate()
time.sleep(5)
# Check for any additional output from the stdout handler
output = ""
while True:
data = _get_queue_output(self.stdout_queue)
if data != "":
output += data
else:
break
for line in iter(output.splitlines()):
self.logger.debug(line)
# Check for any additional output from the stderr handler
output = ""
while True:
data = _get_queue_output(self.stderr_queue)
if data != "":
output += data
else:
break
for line in iter(output.splitlines()):
self.logger.error(line)
# Cleanup PTYs
os.close(self.master_stdout)
os.close(self.master_stderr)
os.close(self.slave_stdout)
os.close(self.slave_stderr)
lib_logger.debug("Interactive shell command " + self.command + " terminated")
# Run series of commands given as a list of a list of commands and wait_for strings. If no wait_for is needed then
# only provide the command. Return if all the commands completed successfully or not.
# Ex:
# [
# ["ssh jsas#" + vnf_ip, r"jsas#.*:"],
# ["juniper123", r"jsas#.*\$"],
# ["sudo su", r".*jsas:"],
# ["juniper123", r"root#.*#"],
# ["usermod -p 'blah' jsas"]
# ]
def run_commands(self, commands_list):
shell_status = True
for command in commands_list:
shell_status = self.run(command[0])
if shell_status and len(command) == 2:
shell_status = self.wait_for(command[1])
# Break out of running commands if a command failed
if not shell_status:
break
return shell_status
# Run given command and return False if error occurs otherwise return True
def run(self, command, sleep=0):
# Check process to make sure it is still running and if not grab the stderr output
if self.process.poll():
self.logger.error("Interactive shell command " + self.command + " closed with return code: " +
self.process.returncode)
data = _get_queue_output(self.stderr_queue)
if data != "":
self.logger.error("Interactive shell error messages:")
for line in iter(data.splitlines()):
self.logger.error(line)
return False
# Write command to process and check to make sure a newline is in command otherwise add it
if "\n" not in command:
command += "\n"
os.write(self.master_stdout, command.encode("utf-8"))
if sleep:
time.sleep(sleep)
return True
# Wait for specific regex expression in output before continuing return False if wait time expires otherwise return
# True
def wait_for(self, this, timeout=DEVICE_TIMEOUT):
timeout = datetime.now() + timedelta(seconds=timeout)
output = ""
# Keep searching for output until timeout occurs
while timeout > datetime.now():
data = _get_queue_output(self.stdout_queue)
if data != "":
# Add to output line and check for match to regex given and if match then break and send output to
# logger
output += data
lib_logger.debug("Checking for " + this + " in data: ")
for line in iter(output.splitlines()):
lib_logger.debug(line)
if re.search(r"{}\s?$".format(this), output):
break
time.sleep(1)
# Send output to logger
for line in iter(output.splitlines()):
self.logger.debug(line)
# If wait time expired print error message and return False
if timeout < datetime.now():
self.logger.error("Wait time expired when waiting for " + this)
return False
return True
i wrote a simple agaent in python that all it dose is just cheacks for the internet connection.
When he finds out that ther is no connection he writes a log file to a text the hour and date and then just exit the program.
I want it to keep testing if there is a connection even if there is not how can i do this ? without the program exit
this is the code:
import os
import time
def Main():
ping =os.system('ping -n 1 -l 1000 8.8.8.8 ')
while ping ==0:
time.sleep(4)
ping = os.system('ping -n 1 -l 1000 8.8.8.8 ')
if ping ==1:
print 'no connection'
CT =time.strftime("%H:%M:%S %d/%m/%y")
alert=' No Connection'
f = open('logfile.txt','a+')
f.write('\n'+CT)
f.write(alert)
f.close()
if __name__ == "__main__":
Main()
Thanx a lot.
Wrap the Main call in an infinite loop?
if __name__ == "__main__":
while True:
Main()
time.sleep(1) # optional, as Main already contains a sleep time
This code should set you on your way. Just substitute the host with that of your choosing in the call to the LogPing object.
Check out the comments inline and please ask me if you have any questions.
from datetime import datetime
import os
import shlex
import subprocess
from time import sleep
class LogPing:
def __init__(self, host, count=1, timeout_seconds=10, logfile="ping_log.txt"):
self.host = host
self.count = count
self.timeout_seconds = timeout_seconds
self.logfile = logfile
self.output_blackhole = open(os.devnull, 'wb')
def _command(self):
command_string = "ping -c {count} -t {timeout} {host}".format(
count=self.count,
timeout=self.timeout_seconds,
host=self.host
)
try:
# we don't actually care about the output, just the return code,
# so trash the output. result == 0 on success
result = subprocess.check_call(
shlex.split(command_string),
stdout=self.output_blackhole,
stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError:
# if here, that means that the host couldn't be reached for some reason.
result = -1
return result
def run(self):
ping_command_result = self._command()
if ping_command_result == 0:
status = "OK"
else:
status = "NOK"
# The time won't be exact, but close enough
message = "{status} : {time} : {host}\n".format(
status=status,
time=datetime.utcnow().strftime("%Y-%m-%d_%T"),
host=self.host
)
# open file in a context manager for writing, creating if not exists
# using a+ so that we append to the end of the last line.
with open(self.logfile, 'a+') as f:
f.write(message)
if __name__ == "__main__":
while True:
ping_instance = LogPing("example.org").run()
sleep(4)
If I understand yous correctly this will do its job:
import os
import time
def Main():
while True:
ping = os.system('ping -n 1 -l 1000 8.8.8.8 ')
if ping:
print 'no connection'
CT =time.strftime("%H:%M:%S %d/%m/%y")
alert=' No Connection'
with open('logfile.txt','a+') as f:
f.write('\n'+CT)
f.write(alert)
time.sleep(4)
if __name__ == "__main__":
Main()
I am playing around with a little netcat tool of my own, but I keep getting "Connection refused" and a reference to a specific line, I've highlighted that below.
First I run the server, with the following command:
python Netstatx.py -l -p 9999 -c
Then I run the "client" which tries to make a connection to the server, which is listening on port 9999:
python Netstatx.py -t localhost -p 9999
As mentioned, the above gives me an "Connected refused"-exception, how come?
import sys
import socket
import getopt
import threading
import subprocess
# Define globals
listen = False
command = False
upload = False
execute = ""
target = ""
upload_destination = ""
port = 0
def usage():
print "Netstatx - Net Tool for your convenience"
print
print "Usage: Netstatx.py -t target_host -p port"
print "-l --listen - Listen on [host]:[port] for
incoming connections"
print "-e --execute=file_to_run - Execute the given file upon
receiving a connection"
print "-c --command - Initialize a command shell"
print "-u --upload=destination - Upon receiving connection,
upload a file and write to
[destination]"
print
print
print "Examples: "
print "Netstatx.py -t 192.168.0.1 -p 5555 -l -c"
print "Netstatx.py -t 192.168.0.1 -p 5555 -l -u=\\target.exe"
print "Netstatx.py -t 192.168.0.1 -p 5555 -l -e=\"cat /etc/passwd\""
sys.exit(0)
def client_sender(buffer):
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print "%s:%s" % (target, port)
# Connect to our target host
**client.connect((target, port))** <-- This is failing.
if len(buffer):
client.send(buffer)
while True:
# Now wait for data back
recv_len = 1
response = ""
while recv_len:
data = client.recv(4096)
recv_len = len(data)
response += data
if recv_len < 4096:
break
print response,
# Wait for more input
buffer = raw_input("")
buffer += "\n"
# Send it off
client.send(buffer)
def server_loop():
global target
# If no target is defined, we listen on all interfaces
if not len(target):
target = "0.0.0.0"
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((target, port))
server.listen(5)
while True:
client_socket, addr = server.accept()
# Spin off a thread to handle our new client
client_thread = threading.Thread(target=client_handler,
args=(client_socket,))
client_thread.start()
def main():
global listen
global port
global execute
global command
global upload_destination
global target
if not len(sys.argv[1:]):
usage()
# Read the commandline options
try:
opts, args = getopt.getopt(sys.argv[1:], "hle:t:p:cu:",
["help","listen","execute","target","port","command",
"upload"])
except getopt.GetoptError as err:
print str(err)
usage()
for o,a in opts:
if o in ("-h", "--help"):
usage()
elif o in ("-l", "--listen"):
listen = True
elif o in ("-e", "--execute"):
execute = a
elif o in ("-c", "--commandshell"):
command = True
elif o in ("-u", "--upload"):
upload_destination = a
elif o in ("-t", "--target"):
target = a
elif o in ("-p", "--port"):
port = int(a)
else:
assert False, "Unhandled option!"
# Are we going to listen or just send data?
# if not listen and len(target) and port > 0
# Read in the buffer from the commandline
# this will block, so send CTRL-D if not sending input
# to stdin
buffer = sys.stdin.read()
# Send data off
client_sender(buffer)
# We are going to listen and potentially
# upload things, execute commands, and drop a shell back
# depending on our command line options above
if listen:
server_loop()
main()
def run_command(command):
# trim the newline
command = command.rstrip()
# Run the command and get the output back
try:
output = subprocess.check_output(command,
stderr=subprocess.STDOUT, shell=True)
except:
output = "Failed to execute command. \r\n"
# Send the output back to the client return output
return output
def client_handler(client_socket):
global upload
global execute
global command
# Check for upload
if len(upload_destination):
# Read on all of the bytes and write to our destination
file_buffer = ""
# Keep reading data until none is available
while True:
data = client_socket.recv(1024)
if not data:
break
else:
file_buffer += data
# Now we take these bytes and try to write them out
try:
file_descriptor = open(upload_destination, "wb")
file_descriptor.write(file_buffer)
file_descriptor.close()
# Acknowledge that we rote the file out
client_socket.send("Successfully saved file to %s\r\n" %
upload_destination)
except:
client_socket.send("Failed to save file to %s\r\n" %
upload_destination)
# Check for command execution
if len(execute):
# Run the command
output = run_command(execute)
client_socket.send(output)
# Now we go into another loop if a command shell was requested
if command:
while True:
# Show a simple prompt
client_socket.send("<Netstatx:#> ")
# Now we receive until we see a linefeed (enter key)
cmd_buffer = ""
while "\n" not in cmd_buffer:
cmd_buffer += client_socket.recv(1024)
# Send back the command output
response = run_command(cmd_buffer)
# Send back the response
client_socket.send(response)
import sys
import socket
import getopt
import threading
import subprocess
#define some global variables
listen = False
command = False
upload = False
execute = ""
target = ""
upload_destination = ""
port = 0
def usage():
print "Net Tool"
print
print "Usage : netcat.py -t target_host -p port"
print "-l --listen -listen on [host]:[port] for incoming connections"
print "-e --execute=file_to_run -execute the given file upon receiving a connection "
print "-c --command -intialize a command shell"
print "-u --upload=destination -upon receiving connection upload a file and write to [destination]"
print
print
print "Examples : "
print "netcat.py -t 192.168.0.1 -p 5555 -l -c"
print "netcat.py -t 192.168.0.1 -p 5555 -l -u=c:\\target.exe"
print "netcat.py -t 192.168.0.1 -p 5555 -l -e=\"cat /etc/passwd\""
print "echo 'ABCDEEGHI' | ./netcat.py -t 192.168.11.12 -p 135"
sys.exit(0)
def run_command(command):
#trim the newline
command= command.rstrip()
#run the command get the output back
try:
output = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
except:
output = "Failed to execute command.\r\n"
#send the output back to the client
return output
def client_handler(client_socket):
global upload
global execute
global command
#check for upload
if len(upload_destination):
#read in all of the bytes and write to our destination
file_buffer= ""
#keep reading data until none is available
while True:
data= client.socket.recv(1024)
if not data:
break
else:
file_buffer += data
#now we take these bytes and try to write them out
try:
file_descriptor=open(upload_destination,"wb")
file_descriptor.write(file_buffer)
file_descriptor.close()
#aknowledg that we wrote the file out
client_socket.send("Successfully saved file to %s \r\n" % upload_destination)
except:
client_socket.send("Failed to save file to %s \r\n" % upload_destination)
# check for command execution
if len(execute):
# run the command
output = run_command(execute)
client_socket.send(output)
# now we go into another loop if a command shell was requested
if command:
while True:
# show a simple prompt
client_socket.send("<BHP:#> ")
# now we receive until we see a linefeed (enter key)
cmd_buffer = ""
while "\n" not in cmd_buffer:
cmd_buffer += client_socket.recv(1024)
# send back the command output
response = run_command(cmd_buffer)
# send back the response
client_socket.send(response)
def client_sender(buffer):
client= socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
#connect to our target host
client.connect((target,port))
if len(buffer):
client.send(buffer)
while True:
#now wait for data back
recv_len=1
response=""
while recv_len:
data = client.recv(4096)
recv_len= len(data)
response+=data
if recv_len<4096:
break
print response,
#wait for more input
buffer = raw_input("")
buffer+= "\n"
# send it off
client.send(buffer)
except:
print "[*] Exception! Exiting."
client.close()
def server_loop():
global target
#if no target is defined , we listen on all interfaces
if not len(target):
target ="0.0.0.0"
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((target, port))
server.listen(5)
while True:
client_socket, addr = server.accept()
#spin off a thread to handl our new client
client_thread= threading.Thread(target=client_handler, args=(client_socket,))
client_thread.start()
def main():
global listen
global port
global execute
global command
global upload_destination
global target
if not len(sys.argv[1:]):
usage()
#read the commandline options
try:
opts, args = getopt.getopt(sys.argv[1:],"hle:t:p:cu",["help","listen","execute","target","port","command","upload"])
except getopt.GetoptError as err:
print str(err)
usage()
for o,a in opts:
if o in ("-h", "--help"):
usage()
elif o in ("-l","--listen"):
listen=True
elif o in ("-e", "--execute"):
execute =a
elif o in ("-c", "--commandshell"):
command= True
elif o in ("-u", "--upload"):
upload_destination = a
elif o in ("-t", "--target"):
target =a
elif o in ("-p", "--port"):
port=int(a)
else :
assert False, "unhandled option"
# are we going to listen or just send data from stdin?
if not listen and len(target) and port> 0 :
#read in the buffer from the cmdline
#this will block, so send CTRL-D if not sending input
#to stdin
buffer = sys.stdin.read()
client_sender(buffer)
#we are goin to listen and potentially
#upload things, execute commands, and drop a shell back
#depending on our command line options above
if listen :
server_loop()
main()
I found some syntax errors running out your script ( it may be just from copy past), any way i did my small edits and it's working (knowing i'm under linux)
Your problem may be the firewall is refusing connection on that port, try to check it out
I'm trying to write a program that will run a program on a remote machine using ssh and pass it SIGINT signals without killing my ssh connection. If I was running on a terminal, then this would be easy with something like
ssh -t -t host "command to run"
I tried this with the following script:
#!/bin/bash
ssh -t -t host "program $#"
and when I run that from the terminal it works fine, but when proofgeneral runs this script and sends it SIGINT it just ends up killing the program (I guess it can't allocate a terminal?).
I've got the following mess of code:
CTRL_C = "<CTRL-C>"
def endpoint(proc, handler):
signal.signal(2, signal.SIG_IGN)
inp = ""
out = ""
err = ""
inp_from_fd = sys.stdin.fileno()
inp_to_fd = proc.stdin.fileno()
out_from_fd = proc.stdout.fileno()
out_to_fd = sys.stdout.fileno()
err_from_fd = proc.stderr.fileno()
err_to_fd = sys.stderr.fileno()
while True:
try:
ins,outs,_ = select.select([inp_from_fd, out_from_fd, err_from_fd],
[inp_to_fd, out_to_fd, err_to_fd],
[])
for fd in ins:
if fd == inp_from_fd:
k = os.read(fd, 1024)
if k == "":
os.close(proc.stdin.fileno())
return
inp = inp + k
elif fd == out_from_fd:
k = os.read(fd, 1024)
out = out + k
elif fd == err_from_fd:
k = os.read(fd, 1024)
err = err + k
else:
assert False
for fd in outs:
if fd == inp_to_fd:
while CTRL_C in inp:
proc.send_signal(2)
p = inp.find(CTRL_C)
inp = inp[0:p] + inp[p+len(CTRL_C):]
k = os.write(fd, inp)
inp = inp[k:]
elif fd == out_to_fd:
k = os.write(fd, out)
out = out[k:]
elif fd == err_to_fd:
k = os.write(fd, err)
err = err[k:]
else:
assert False
except select.error:
pass
except KeyboardInterrupt:
handler()
except IOError, e:
pass
def usage(args):
print "must specify --client or --server"
if __name__ == '__main__':
if len(sys.argv) == 1:
usage(sys.argv)
elif sys.argv[1] == '--server':
proc = subprocess.Popen(sys.argv[2:],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def INT():
proc.stdin.write(CTRL_C)
proc.stdin.flush()
endpoint(proc, INT)
elif '--client' in sys.argv:
proc = subprocess.Popen(sys.argv[2:],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
import time
time.sleep(1)
def INT():
pass
endpoint(proc, INT)
else:
usage(sys.argv)
which I'm invoking using something like:
remote.py --client ssh -t -t host "remote.py --server <program-to-run> <args>"
Is there something that I'm doing wrong here to handle the signal? I've tried putting a print in the signal 2 handler and it does print it, but it is also killing ssh (I'm getting "Killed by signal 2." printed on the console). Is python forwarding the signal to it's children? Is there a way to get around this? Is there an easier way to do this?
Thanks for any pointers.
os.setpgrp (look at setpgrp manpage) otherwise the signal is propagated to children