Knowing if my server is up or down using ping. . - python

Thanks to all for your time.
I'm trying to know if several server are up or down using ping, and it works . . . but when I try convert the result into a up or down, something is wrong and always is down.
Dunno what other thing I should try, don't need anything else, just up or down and the IP.
import os
import datetime
import platform
import subprocess
import string
date = datetime.datetime.now()
day = date.day
hour = date.hour
def writedoc ():
os.chdir ('Path')
wresult = open ("pingresults_{}_{}.txt".format(day,hour), 'a')
wresult.write ('{}-{}\n'.format(ips, rping))
wresult.close ()
os.chdir ('Path')
openips = open ("ips.txt","r")
ipfile = openips.readlines()
for ips in ipfile:
ips = ips.strip()
print (ips)
args = ["ping", "-n", "4", "-l", "1", "-w", "1000", ips]
pping = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
rping = pping.stdout
for line in rping:
print (line)
if (rping.find("(100% perdidos)" != -1)):
result = "down"
print (result)
else:
result = "up"
print (result)
writedoc()

if (rping.find("(100% perdidos)" != -1))
Should this instead be
if (rping.find("(100% perdidos)") != -1)
So that this checks that rping.find("(100% perdidos)") does not return - 1.
With your example you are effectively calling rping.find(True) as
"(100% perdidos)" does not equal - 1.

Related

Python on Crontab does not execute bash script

import subprocess as sub
import re
import os
from datetime import datetime as influx_timestap
from influxdb import InfluxDBClient
from collections import OrderedDict
insert_json = []
hostname = str(sub.check_output('hostname')).strip()
location = str(sub.check_output(['ps -ef | grep mgr'], shell=True)).split()
current_dir = os.getcwd()
print("script executed")
gg_location_pattern = re.compile(r'mgr\.prm$')
gg_process_pattertn = re.compile(r'^REPLICAT|^EXTRACT')
for index in location:
if gg_location_pattern.search(index) != None:
gg_location = index[:-14]
os.chdir(gg_location)
print("checkpoint1")
get_lag = sub.check_output(str(current_dir) + '/ggsci_test.sh', shell=True)
print("checkpoint2")
processes = get_lag.split("\n")
for process in processes:
if gg_process_pattertn.search(process) != None:
lag_at_chkpnt = int((process.split()[3]).split(":")[0]) * 3600 + int((process.split()[3]).split(":")[1]) *60 + int((process.split()[3]).split(":")[2])
time_since_chkpnt = int((process.split()[4]).split(":")[0]) * 3600 + int((process.split()[4]).split(":")[1]) *60 + int((process.split()[4]).split(":")[2]
)
process_dict = OrderedDict({"measurement": "GoldenGate_Mon_" + str(hostname) + "_Graph",
"tags": {"hostname": hostname, "process_name": process.split()[2]},
"time": influx_timestap.now().isoformat('T'),
"fields": {"process_type": process.split()[0], "process_status": process.split()[1],
"lag_at_chkpnt": lag_at_chkpnt, "time_since_chkpnt": time_since_chkpnt}})
insert_json.append(process_dict)
host = 'xxxxxxxx'
port = 'x'
user = 'x'
password = 'x'
dbname = 'x'
print("before client")
client = InfluxDBClient(host, port, user, password, dbname)
client.write_points(insert_json)
print("after client")
This code works manually perfect, but on the crontab it is not working. After searching on the internet I found that they say change or set your "PATH" variable on the crontab. I changed my "PATH" variable and it is still not working.
Crontab log file write "checkpoint1" after that there is nothing. So, line not working is "get_lag = sub.check_output(str(current_dir) + '/ggsci_test.sh', shell=True)"
What can I do here afterwards?
Take care,
it looks like your external script (ggsci_test.sh) has some issues with the paths / general failure.
From the Python subprocess documentation about subprocess.check_output:
If the return code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and any output in the output attribute.
So thats the reason why you see the error when catching it, but not being able to continue.
You should check therefore if your shell script has any issues that need to be solved before.

Tryin to do ping to several pc with python. .

I'm trying to read a file which contains the ips of 300 computers an then writing in a new file if exists or not each one.
import os
import datetime
import platform
import subprocess
date = datetime.datetime.now()
day = date.day
hour = date.hour
os.chdir ('Path')
openips = open ("ips.txt","r")
ipfile = openips.readlines()
print (ipfile)
for ips in ipfile():
ips = ips.strip()
print (ips)
args = ["ping", "-n", "4", "-l", "1", "-w", "1000", ips]
pping = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
for line in pping.stdout:
print (line)
os.chdir ('Path')
presult = open ("pingresults_{}_{}.txt".format(day,hour), 'a')
presult.write ('{}_{}\n'.format(ips, line))
presult.close ()
Dunno why, everytime I'm testing my code. . . the result is:
TypeError: 'list' object is not callable
I've tried everything, even when I change the variable to string . . . say the same, just changing the list for string
change:
for ips in ipfile:
...
no brackets

Pytonic way of passing values between process

I need a simple way to pass the stdout of a subprocess as a list to another function using multiprocess:
The first function that invokes subprocess:
def beginRecvTest():
command = ["receivetest","-f=/dev/pcan33"]
incoming = Popen(command, stdout = PIPE)
processing = iter(incoming.stdout.readline, "")
lines = list(processing)
return lines
The function that should receive lines:
def readByLine(lines):
i = 0
while (i < len(lines)):
system("clear")
if(lines[i][0].isdigit()):
line = lines[i].split()
dictAdd(line)
else:
next
print ; print "-" *80
for _i in mydict.keys():
printMsg(mydict, _i)
print "Keys: ", ; print mydict.keys()
print ; print "-" *80
sleep(0.3)
i += 1
and the main from my program:
if __name__ == "__main__":
dataStream = beginRecvTest()
p = Process(target=dataStream)
reader = Process(target=readByLine, args=(dataStream,))
p.start()
reader.start()
I've read up on using queues, but I don't think that's exactly what I need.
The subprocess called returns infinite data so some people have suggested using tempfile, but I am totally confused about how to do this.
At the moment the script only returns the first line read, and all efforts on looping the beginRecvTest() function have ended in compilation errors.

Python subprocess: capture output of ffmpeg and run regular expression against it

I have the following code
import subprocess
import re
from itertools import *
command = ['ffprobe', '-i', '/media/some_file.mp4']
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
text = p.stderr.read()
retcode = p.wait()
text = text.decode('utf-8')
p = re.compile("Duration(.*)")
num = 0 #for debugging
for line in iter(text.splitlines()):
print(str(num) + line) #for debugging
m = p.match(str(line))
if m != None:
print(m.group(1))
When I look at the output there is a line that says "Duration" on it, however it is not captured, print(m.group(1)) is never reached. If I change the text variable to a hardcoded string of "Duration blahblah" I get " blahblah", which is what I expect. It seems like the regex doesn't recognize the text coming back from stderr. How can I get the text into a format that the regex will recognize and match on?
I have come up with the following solution, should it help anyone else attempting to capture duration from ffmpeg using python
import subprocess
import re
command = ['ffprobe', '-i', '/media/some_file.mp4']
p = subprocess.Popen(command, stderr=subprocess.PIPE)
text = p.stderr.read()
retcode = p.wait()
text = text.decode('utf-8')
p = re.compile(".*Duration:\s([0-9:\.]*),", re.MULTILINE|re.DOTALL)
m = p.match(text)
print(m.group(1))
p = re.compile(r".*?Duration(.*)")
Try this.match starts from the begining while there may might be something before duration.

Broken Pipe - Trying to display progress of dd on LCD display

I am trying to use Python to create a tool for imaging CF cards with a Raspberry Pi.
I had most of it working until I implemented compressed images with dd.
When I try and pipe the output of gzip to ddI lose the ability to poke the dd process and get a progress.
I have tried to use multiple sub processes but keep getting broken pipe or no such file errors.
Below is my code:
#!/usr/bin/env python
from Adafruit_CharLCD import Adafruit_CharLCD
import os
import sys
import time
import signal
from subprocess import Popen, PIPE
lcd = Adafruit_CharLCD()
lcd.begin(16,2)
imgpth = '/home/pi/image/image_V11.img.gz'
line0 = ""
line1 = ""
q = 0
r = 0
s = 0
def lcdPrint(column, row, message, clear=False):
if ( clear == True ):
lcd.clear()
lcd.setCursor(column, row)
lcd.message(message)
lcd.clear()
lcdPrint(0, 0, 'Preparing Copy', True)
lcdPrint(0, 1, '')
gz = Popen(['gunzip -c /home/pi/image/image_V11.img.gz'], stdout=PIPE)
dd = Popen(['dd of=/dev/sda'],stdin=gz.stdout, stderr=PIPE)
filebyte = os.path.getsize(imgpth)
flsz = filebyte/1024000
while dd.poll() is None:
time.sleep(1)
dd.send_signal(signal.SIGUSR1)
while 1:
l = dd.stderr.readline()
if '(' in l:
param, value = l.split('b',1)
line1 = param.rstrip()
r = float(line1)
s = r/1024000
break
lcdPrint(0, 0, 'Copying....', True)
q = round(s/flsz*100, 2)
per = str(q)
lcdPrint(0, 1, per + '% Complete',)
lcdPrint(0, 0, 'Copy Complete', True)
time.sleep(1)
exit()
How can I fix this?
I stumbled across this question because I am doing exactly the same. My complete solution is here:
http://github.com/jrmhaig/Bakery
I've tried to pick out some differences between what I have and yours that might show you the solution.
When starting the dd I redirected both stderr and stdout to the pipe.
dd = subprocess.Popen(['dd', 'of=/dev/sda', 'bs=1M'], bufsize=1, stdin=unzip.stdout, stdout=PIPE, stderr=STDOUT)
I don't think this should really make a difference. Everything you need should go to stderr but for some reason it appeared to get mixed up for me.
I use a separate thread to pick up the output from dd:
def read_pipe(out, queue):
for line in iter(out.readline, b''):
queue.put(str(line))
out.close()
dd_queue = queue.Queue()
dd_thread = threading.Thread(target = read_pipe, args=(dd.stdout, dd_queue))
dd_thread.daemon = True
dd_thread.start()
Then when you call:
dd.send_signal(signal.SIGUSR1)
the output gets caught on dd_queue.
I also found that the uncompressed size of an gzipped file is stored in the last 4 bytes:
fl = open(str(imgpath), 'rb')
fl.seek(-4, 2)
r = fl.read()
fl.close()
size = struct.unpack('<I', r)[0]
os.path.getsize(imgpth) will only give you the compressed size so the percentage calculation will be wrong.

Categories

Resources