can we use paramiko to execute command over multiple shell - python

hosts=['10.101.x.x'] username='root' password=''
from datetime import datetime date = datetime.now() dates
=date.strftime('%d%b%Y') print dates CheckStr = "Log-" + dates print CheckStr
cmd1='cd /usr/local/element/log/global/log/;ls -ltr' cmd2='/usr/local/element/rel/RAN_RCS_1.0.0.31/bin/linux-x86_pcm64/execCmdCli' cmd3='element add filter rule SPNLOGS enable class SPNAPP severity Informational,Major,Debug,Critical,Minor target log'
def work_comp():
for host in hosts:
print host
ssh = paramiko.SSHClient()
global ssh
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, username=username, password=password, timeout=5.0 )
exe_comm(cmd1)
exe_comm(cmd2)
ssh.close()
# cmd='cd /usr/local/element/log/global/log/;ls -ltr'
# cmd2='pwd'
# #cmd2='/usr/local/element/rel/RAN_RCS_1.0.0.31/bin/linux-x86_pcm64/execCmdCli'
# cmd3='element add filter rule SPNLOGS enable class SPNAPP severity Informational,Major,Debug,Critical,Minor target log'
# #cmd5="date |awk '{print $2,$3,$6}'"
def exe_comm(cmd):
stdin, stdout, stderr = ssh.exec_command(cmd)
for line in stdout:
print (line)
print "Done"
work_comp()

There is a parallel SSH client (uses paramiko) that can do this which is also asynchronous.
from pssh import ParallelSSHClient
client = ParallelSSHClient(['10.101.x.x'], user='root', password='')
cmd1='cd /usr/local/element/log/global/log/;ls -ltr'
cmd2='/usr/local/element/rel/RAN_RCS_1.0.0.31/bin/linux-x86_pcm64/execCmdCli'
cmd3='element add filter rule SPNLOGS enable class SPNAPP severity Informational,Major,Debug,Critical,Minor target log'
for cmd in [cmd1, cmd2, cmd3]:
output = client.run_command(cmd)
for host, host_out in output.items():
for line in host_out.stdout:
print("[%s] - %s" % (host, line,))
print("done")

Related

Connect to host and run ps aux command

Trying to ssh to Ubuntu and get process details. Can connect and login to device, but unable to get output. Getting output as below,
connected to device x.x.x.x
1
('Command: ', 'ps aux | grep vmstat')
2
Did not connect to x.x.x.x
import os
import paramiko
import time
import signal
import subprocess
processname = 'vmstat'
def connect_to_devices(ip, procname):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(ip, username = 'xxxxx', password='xxxxx', timeout = 10)
time.sleep(.25)
channel = ssh.invoke_shell()
output = channel.recv(10000)
print(output)
print("connected to device")
proc = findprocess(procname)
time.sleep(10)
print('2')
print(proc)
return proc
except:
print("Did not connect to %s" % ip)
return 'failed'
def findprocess(p):
print('1')
command = 'ps aux | grep ' + processname
print('Command: ', command)
print('2')
for line in os.popen(command,'r',1):
print('3')
fields = line.split()
print('4')
print (fields)
return fields
hosts = ['x.x.x.x']
for h in hosts:
t = connect_to_devices(h, processname)

paramiko SSHException: Channel Closed when using exec_command on an Alactel Lucent Omniswitch 6000

when i run:
def conn_string(cmd, switch_IP, uname, pword):
try:
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(switch_IP, username=uname, password=pword, port=22)
stdin, stdout, stderr = ssh.exec_command(command=cmd, timeout=60)
output = stdout.read()
current_res = output.decode('ascii')
print(current_res)
return current_res
except paramiko.AuthenticationException:
print("\n Authentication Failed!")
exit()
except paramiko.SSHException:
print("\n Issues with SSH service!!!")
exit()
except BaseException as e:
print(str(e))
exit()
if __name__ == '__main__':
switch_IP = input("Enter switch IP address: ")
u_name = "abc"
p_word = "xyz"
res = conn_string("show configuration snapshot", switch_IP, u_name, p_word)
i get SSH exception: Channel Closed
but when i run:
conn = paramiko.SSHClient()
conn.set_missing_host_key_policy(paramiko.AutoAddPolicy())
conn.connect(switch_IPAddress, port=22, username=u_name,
password=p_word,
look_for_keys=False, allow_agent=False)
con = conn.invoke_shell()
output = con.recv(65535)
final_output = output.decode('ascii')
print(final_output)
con.send("show configuration snapshot")
time.sleep(.5)
output = con.recv(65535)
final_output = output.decode('ascii')
print(final_output)
it only shows me device info and the command without any output or errors
The cmd works fine on the switch console.
if I use the same code on a cisco switch(with diff command of course) it works perfect but fails on the alcatel switch. Please help me out.

How to run PSCP cmd window step in my Python script

I am running Hadoop MapReduce and other SSH commands from a Python script using the paramiko module (code can be seen here). Once the MapReduce job is complete, I run the getmerge step to get the output into a text file.
The problem is, I then have to open a cmd window and run PSCP to download the output.txt file from the HDFS environment to my computer. For example:
pscp xxxx#xx.xx.xx.xx:/nfs_home/appers/cnielsen/MROutput_121815_0.txt C:\Users\cnielsen\Desktop\MR_Test
How can I incorporate this pscp step into my script so that I don't have to open a cmd window to run this after my MapReduce and getmerge tasks are complete? I would like my script to be able to run the MR task, getmerge task, and then automatically save the MR output to my computer.
Here is my code.
I have solved this problem with the following code. The trick was to use the scp module and import SCPClient. See the scp_download(ssh) function below.
When the MapReduce job completes the getmerge command is run, followed by the scp_download step.
import paramiko
from scp import SCPClient
import time
# Define connection info
host_ip = 'xx.xx.xx.xx'
user = 'xxxxxxxx'
pw = 'xxxxxxxx'
port = 22
# Paths
input_loc = '/nfs_home/appers/extracts/*/*.xml'
output_loc = '/user/lcmsprod/output/cnielsen/'
python_path = "/usr/lib/python_2.7.3/bin/python"
hdfs_home = '/nfs_home/appers/cnielsen/'
output_log = r'C:\Users\cnielsen\Desktop\MR_Test\MRtest011316_0.txt'
# File names
xml_lookup_file = 'product_lookups.xml'
mapper = 'Mapper.py'
reducer = 'Reducer.py'
helper_script = 'Process.py'
product_name = 'test1'
output_ref = 'test65'
target_file = 'test_011416_3.txt'
# ----------------------------------------------------
def createSSHClient(host_ip, port, user, pw):
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(host_ip, port, user, pw)
return client
# ----------------------------------------------------
def buildMRcommand(product_name):
space = " "
mr_command_list = [ 'hadoop', 'jar', '/share/hadoop/tools/lib/hadoop-streaming.jar',
'-files', hdfs_home+xml_lookup_file,
'-file', hdfs_home+mapper,
'-file', hdfs_home+reducer,
'-mapper', "'"+python_path, mapper, product_name+"'",
'-file', hdfs_home+helper_script,
'-reducer', "'"+python_path, reducer+"'",
'-input', input_loc,
'-output', output_loc+output_ref]
MR_command = space.join(mr_command_list)
print MR_command
return MR_command
# ----------------------------------------------------
def unbuffered_lines(f):
line_buf = ""
while not f.channel.exit_status_ready():
line_buf += f.read(1)
if line_buf.endswith('\n'):
yield line_buf
line_buf = ""
# ----------------------------------------------------
def stream_output(stdin, stdout, stderr):
writer = open(output_log, 'w')
# Using line_buffer function
for l in unbuffered_lines(stderr):
e = '[stderr] ' + l
print '[stderr] ' + l.strip('\n')
writer.write(e)
# gives full listing..
for line in stdout:
r = '[stdout]' + line
print '[stdout]' + line.strip('\n')
writer.write(r)
writer.close()
# ----------------------------------------------------
def run_MapReduce(ssh):
stdin, stdout, stderr = ssh.exec_command(buildMRcommand(product_name))
stream_output(stdin, stdout, stderr)
return 1
# ----------------------------------------------------
def run_list_dir(ssh):
list_dir = "ls "+hdfs_home+" -l"
stdin, stdout, stderr = ssh.exec_command(list_dir)
stream_output(stdin, stdout, stderr)
# ----------------------------------------------------
def run_getmerge(ssh):
getmerge = "hadoop fs -getmerge "+output_loc+output_ref+" "+hdfs_home+target_file
print getmerge
stdin, stdout, stderr = ssh.exec_command(getmerge)
for line in stdout:
print '[stdout]' + line.strip('\n')
time.sleep(1.5)
return 1
# ----------------------------------------------------
def scp_download(ssh):
scp = SCPClient(ssh.get_transport())
print "Fetching SCP data.."
scp.get(hdfs_home+target_file, local_dir)
print "File download complete."
# ----------------------------------------------------
def main():
# Get the ssh connection
global ssh
ssh = createSSHClient(host_ip, port, user, pw)
print "Executing command..."
# Command list
##run_list_dir(ssh)
##run_getmerge(ssh)
##scp_download(ssh)
# Run MapReduce
MR_status = 0
MR_status = run_MapReduce(ssh)
if MR_status == 1:
gs = 0
gs = run_getmerge(ssh)
if gs == 1:
scp_download(ssh)
# Close ssh connection
ssh.close()
if __name__ == '__main__':
main()

ssh command with argument execution in python

I need to execute a ssh command with arguments in python. I have been able to execute the ssh command.But, I cannot figure out, how to pass the arguments.
The command:
ssh -L 22222:localhost:5434 sayan#155.97.73.252
Here is the code :
ssh = paramiko.SSHClient();
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy());
ssh.connect("155.97.73.252", username="sayan", password="#####");
paramiko Example
class RunCommand(cmd.Cmd):
""" Simple shell to run a command on the host """
prompt = 'ssh > '
def __init__(self):
cmd.Cmd.__init__(self)
self.hosts = []
self.connections = []
def do_add_host(self, args):
"""add_host
Add the host to the host list"""
if args:
self.hosts.append(args.split(','))
else:
print "usage: host "
def do_connect(self, args):
"""Connect to all hosts in the hosts list"""
for host in self.hosts:
client = paramiko.SSHClient()
client.set_missing_host_key_policy(
paramiko.AutoAddPolicy())
client.connect(host[0],
username=host[1],
password=host[2])
self.connections.append(client)
def do_run(self, command):
"""run
Execute this command on all hosts in the list"""
if command:
for host, conn in zip(self.hosts, self.connections):
stdin, stdout, stderr = conn.exec_command(command)
stdin.close()
for line in stdout.read().splitlines():
print 'host: %s: %s' % (host[0], line)
else:
print "usage: run "
def do_close(self, args):
for conn in self.connections:
conn.close()
if __name__ == '__main__':
RunCommand().cmdloop()
Example output:
ssh > add_host 127.0.0.1,jesse,lol
ssh > connect
ssh > run uptime
host: 127.0.0.1: 14:49 up 11 days, 4:27, 8 users,
load averages: 0.36 0.25 0.19
ssh > close
fabric example
from fabric import tasks
env.hosts = ['localhost', 'sunflower.heliotropic.us']
pattern = re.compile(r'up (\d+) days')
# No need to decorate this function with #task
def uptime():
res = run('uptime')
match = pattern.search(res)
if match:
days = int(match.group(1))
env['uts'].append(days)
def main():
env['uts'] = []
tasks.execute(uptime)
uts_list = env['uts']
if not uts_list:
return # Perhaps we should print a notice here?
avg = sum(uts_list) / float(len(uts_list))
print '-' * 80
print 'Average uptime: %s days' % avg
print '-' * 80
if __name__ == '__main__':
main()

Setting date/time on a unix based server with Python (Paramiko module)

Okay, can someone tell what I'm doing wrong with this simple request to change the time? I'm on a win 7 machine, trying to change the time on a linux box. I can login, search logs and run other commands, of course adjusting my code below. But this simple command is not changing the date/time. I must be overlooking something?
datetime_string = raw_input("Enter date and time in format 11/1/2011 1600")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(iP_address, username='root', password='******')
apath = '/'
apattern = datetime_string
rawcommand = 'date -s' + datetime_string
command1 = rawcommand.format(pattern=apattern)
stdin, stdout, stderr = ssh.exec_command(command1)
dateresult = stdout.read().splitlines()
Try changing this:
rawcommand = 'date -s' + datetime_string
To this:
rawcommand = 'date -s "%s"' % datetime_string
And im not positive, but I dont think rawcommand.format(pattern=apattern) is necessary:
datetime_string = raw_input("Enter date and time in format 11/1/2011 1600")
command1 = 'date -s "%s"' % datetime_string
stdin, stdout, stderr = ssh.exec_command(command1)
dateresult = stdout.read().splitlines()
You should validate user input. Especially if it might be passed unescaped to the shell.
#!/usr/bin/env python
from datetime import datetime
import paramiko
# read new date from stdin
datetime_format = "%m/%d/%Y %H%M"
newdate_string = raw_input("Enter date and time in format 11/1/2011 1600")
# validate that newdate string is in datetime_format
newdate = datetime.strptime(newdate_string, datetime_format)
# print date (change it to `-s` to set the date)
command = "date -d '%s'" % newdate.strftime(datetime_format)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect("localhost") # use ssh keys to authenticate
# run it
stdin, stdout, stderr = ssh.exec_command(command)
stdin.close()
# get output of the command
print
print "stdout: %r" % (stdout.read(),)
print '*'*79
print "stderr: %r" % (stderr.read(),)
Output
$ echo 1/11/2011 1600 | python set-date.py
Enter date and time in format 11/1/2011 1600
stdout: 'Tue Jan 11 16:00:00 EST 2011\n'
*******************************************************************************
stderr: ''

Categories

Resources