How to write output without newline sequence in in Python - python

I have the ip_file.txt with the following input fields.
8.8.8.8,8.8.4.4
www.google.com,www.yahoo.com
And I am getting the output as below:
8.8.8.8,UP,20,2022-08-11 22:58:16
8.8.4.4,UP,17,2022-08-11 22:58:16
www.google.com,UP,17,2022-08-11 22:58:16
www.yahoo.com,UP,364,2022-08-11 22:58:16
Here is my code in Python.
import subprocess
import threading
import time
import re
timestr = time.strftime("%Y-%m-%d %H%M%S")
timesec = time.strftime("%Y-%m-%d %H:%M:%S")
raw_list = []
def ping(host):
results_file = open("results_bng_" + str(timestr) + ".txt", "a")
p = subprocess.Popen(["ping", host, "-n", "5"], shell=True, universal_newlines=True, stdout=subprocess.PIPE)
response = p.communicate()[0]
for i in response.split("\n"):
para =i.split("=")
# print(para)
try:
if para[0].strip() =="Minimum":
latency =para[3].strip()
print(latency)
latfin = re.findall('\d+', latency)
latfin1 = latfin[0]
except:
print("time run")
if "Received = 1" and "Approximate" in response:
print(f"UP {host} Ping Successful")
results_file.write(f"{host},UP,{latfin1},{timesec}"+ "\n")
else:
print(f"Down {host} Ping Unsuccessful")
results_file.write(f"{host},Down,0,{timesec}" + "\n")
results_file.close()
with open(r'ip_file.txt', "r") as server_list_file:
hosts = server_list_file.read()
hosts_list = hosts.replace('\n', ',').split(',')
print(hosts_list)
num_threads = 1
number = 0
while number< len(hosts_list):
# print(number)
for i in range(num_threads):
t = threading.Thread(target=ping, args=(hosts_list[number+i],))
t.start()
t.join()
number = number +1
Is there any chance I get the output format as noted below:
8.8.8.8,UP,20,2022-08-11 22:58:16,8.8.4.4,UP,17,2022-08-11 22:58:16
www.google.com,UP,17,2022-08-11 22:58:16,www.yahoo.com,UP,364,2022-08-11 22:58:16
I tried multiple ways, but the input file is taken as a single Array and not able to write the output as given above. Can Anybody help in this. Thank you for your valuable time.

Related

how can I extract Average in this each ping result and store in file

Here I can ping each IP but am not able to extract the Average of the result. Can anyone help, please?
import subprocess
import threading
ip_list = []
def ping(host):
ip_list.append(host+ ' '+ str((subprocess.run('ping '+host +' -n 1').returncode)))
with open(r'input.txt', "r") as input_file:
hosts = input_file.read()
hosts_list =hosts.split('\n')
num_threads = 1
number = 0
while number< len(hosts_list):
for i in range(num_threads):
t = threading.Thread(target=ping, args=(hosts_list[number+i],))
t.start()
t.join()
number = number +1
After doing some research I found out that using subprocess.run and then getting the returncode you dont get the output but only the return code so usually 0 for a successfull run with no error.
If you want to get the output of the process you have to use subprocess.Popen and then communicate.
Then if you only want the average you have to do some string manipulation with the output to only get the number after "Average".
Here's an exemple:
def ping(host):
output = subprocess.Popen(["ping", host, "-n", "1"], stdout=subprocess.PIPE).communicate()[0]
words = str(output).split(sep=" ")
average = words[words.index("Average")+2].split("ms")[0]
ip_list.append(host+ ' '+ average)

multithread pinging of IP address in Python

I have a list of IP addresses like 1000 no's. I am reading the ip_file.txt and storing the result file as result_date.txt. Below is the code that I achieved the result. But my issue is it's taking too long to execute the entire files. Can anyone suggest multithreading, please so that the desired result can be achieved quickly? Thanks in advance.
#!/usr/bin/env python
import os
import csv
import paramiko
from datetime import datetime
import time
import sys
import re
from collections import defaultdict
# Verifies your os type
from paramiko import file
OS_TYPE = os.name
# Sets the count modifier to the os type
count = '-n' if OS_TYPE == 'nt' else '-c'
def create_ip_list():
ip_list = []
with open("ip_file.txt", "r") as file:
for line in file:
ip_list.append(line.strip())
return ip_list
# fetching data
now = datetime.now()
dat = now.strftime("%d/%m/%Y")
# time = now.strftime("%H:%M:%S")
date_string = dat.replace('/', '-')
timestr = time.strftime("%d%m%Y-%H%M%S")
def ping_device(ip_list):
"""Ping ip_list and return results
return: None
rtype: None
"""
results_file = open("results_" + str(timestr) + ".txt", "w")
for ip in ip_list:
response = os.popen(f"ping {ip} {count} 1").read()
time.sleep(1.5)
#fetch Average time
print(response)
for i in response.split("\n"):
para = i.split("=")
try:
if para[0].strip() == "Minimum":
latency = para[3].strip()
print(latency)
# output1=latency[0:8].split(" ")
# test=output1[0]
# print(test)
except:
print("time run")
if "Received = 1" and "Approximate" in response:
#print(f"UP {ip} Ping Successful")
results_file.write(f"{ip},UP,{latency}" + "\n")
else:
print(f"Down {ip} Ping Unsuccessful")
results_file.write(f"{ip} Down" + "\n")
results_file.close()
if __name__ == "__main__":
ping_device(create_ip_list())
Write a function ping_one_device that takes a single ip and returns a single string giving the status. It should be easy to pull this out of ping_device.
Then
with open(results_file, "w") as results_file:
with ThreadPoolExecutor() as executor:
for result in map(ping_one_device, ip_list):
results_file.write(result)

Python Repeated IP-Scan with threads

After repeated ip scans the program crashes. The threads are not killed and I don't know how to to this. Any idea?
import time
import datetime
import subprocess,os,threading
from queue import Queue
ipbase = "192.168.101.{0}"
startadd = 20
stoppadd = 100
def ipscan(): #looking for available IP adresses
lock=threading.Lock()
_start=time.time()
def check(n):
with open(os.devnull, "wb") as limbo:
ip=ipbase.format(n)
result=subprocess.Popen(["ping", "-n", "2", "-w", "300", ip],stdout=limbo, stderr=limbo).wait(timeout=10)
with lock:
if not result:
print (ip, "active")
else:
pass
def threader():
while True:
worker=q.get()
check(worker)
q.task_done()
print("Scan IP...")
print("Address scan from " + ipbase + str(startadd) + " until " + ipbase + str(stoppadd))
q=Queue()
for _ in range(startadd,stoppadd):
t=threading.Thread(target=threader)
t.daemon=True
t.start()
for worker in range(startadd,stoppadd):
q.put(worker)
q.join()
if __name__ == "__main__":
starttime = datetime.datetime.now()
print (starttime)
print("first check of available ip adresses..")
ipscan() #looking for available IP adresses
cyclebegin = datetime.datetime.now()
acttime = datetime.datetime.now()
sampletime = 3
while (1):
if ((acttime - cyclebegin) > datetime.timedelta(seconds=sampletime)):
dtime = acttime - cyclebegin
print ("delta-seconds: ",dtime.seconds)
cyclebegin = datetime.datetime.now()
ipscan()
acttime = datetime.datetime.now()
After ipscan is finished with q.join() the tasks should be finished but not killed, as I understood. With the repeated call of ipscan the limit of the threads is exeeded. What do I have to modify to prevent this?

Understanding named Pipes (FIFO) in Python

I am running Python 2.7 on a Unix environment (tested on Ubuntu and OSX)
I have the following programs:
With os.open():
[SCRIPT 1]
import os
pipe_1_name = "pipe_1"
pipe_2_name = "pipe_2"
pipe_3_name = "pipe_3"
def set_connection():
pipe_names = [pipe_1_name, pipe_2_name, pipe_3_name]
for pipe_name in pipe_names:
if os.path.exists(pipe_name):
os.remove(pipe_name)
os.mkfifo(pipe_name)
else:
os.mkfifo(pipe_name)
pipe_1 = os.open(pipe_1_name, os.O_WRONLY)
os.write(pipe_1, "server_message_0\n")
pipe_2 = open(pipe_2_name, 'r')
received = pipe_2.readline()[:-1]
print "[0] Now processing if received is correct: " + received
pipe_3 = open(pipe_3_name, 'r')
received = pipe_3.readline()[:-1]
print "[1] Now processing if received is correct: " + received
print "Connection established."
return pipe_1,pipe_2,pipe_3
def main():
pipe_1, pipe_2, pipe_3 = set_connection()
print str(pipe_1)
print str(pipe_2)
print str(pipe_3)
if __name__ == "__main__":
main()
[SCRIPT 2]
import os
pipe_1_name = "pipe_1"
pipe_2_name = "pipe_2"
pipe_3_name = "pipe_3"
def get_connection():
pipe_names = [pipe_1_name, pipe_2_name, pipe_3_name]
for pipe_name in pipe_names:
if not os.path.exists(pipe_name):
raise Exception("Pipe "+pipe_name+" does not exist!")
pipe_1 = open(pipe_1_name, 'r')
received = pipe_1.readline()[:-1]
print "[0] Now processing if received is correct: " + received
pipe_2 = os.open(pipe_2_name, os.O_WRONLY)
os.write(pipe_2, "client_message_0\n")
pipe_3 = os.open(pipe_3_name, os.O_WRONLY)
os.write(pipe_3, "client_message_1\n")
print "Connection established."
return pipe_1,pipe_2,pipe_3
def main():
pipe_1, pipe_2, pipe_3 = get_connection()
print str(pipe_1)
print str(pipe_2)
print str(pipe_3)
if __name__ == "__main__":
main()
The logic is simple:
[Pipe 1]
1. Script 1 opens a write pipe to Script 2.
2. Script 2 reads from the pipe.
[Pipe 2]
3. Script 2 open a write pipe to Script 1.
4. Script 1 reads from the pipe.
[Pipe 3]
5. Script 2 open a write pipe to Script 1.
6. Script 1 reads from the pipe.
Works exactly as expected.
Here is the problem. I don't want to use os.open(). I would like the receive a file object and use it to interface with the pipe. Clearly, it is not impossible since I can read from a pipe with a file object. However, The following script does not work.
Without os.open()
[Script 1]
import os
pipe_1_name = "pipe_1"
pipe_2_name = "pipe_2"
pipe_3_name = "pipe_3"
def set_connection():
pipe_names = [pipe_1_name, pipe_2_name, pipe_3_name]
for pipe_name in pipe_names:
if os.path.exists(pipe_name):
os.remove(pipe_name)
os.mkfifo(pipe_name)
else:
os.mkfifo(pipe_name)
pipe_1 = open(pipe_1_name, 'w')
pipe_1.write("server_message_0\n")
pipe_2 = open(pipe_2_name, 'r')
received = pipe_2.readline()[:-1]
print "[0] Now processing if received is correct: " + received
pipe_3 = open(pipe_3_name, 'r')
received = pipe_3.readline()[:-1]
print "[1] Now processing if received is correct: " + received
print "Connection established."
return pipe_1,pipe_2,pipe_3
def main():
pipe_1, pipe_2, pipe_3 = set_connection()
print str(pipe_1)
print str(pipe_2)
print str(pipe_3)
if __name__ == "__main__":
main()
[Script 2]
import os
pipe_1_name = "pipe_1"
pipe_2_name = "pipe_2"
pipe_3_name = "pipe_3"
def get_connection():
pipe_names = [pipe_1_name, pipe_2_name, pipe_3_name]
for pipe_name in pipe_names:
if not os.path.exists(pipe_name):
raise Exception("Pipe "+pipe_name+" does not exist!")
pipe_1 = open(pipe_1_name, 'r')
received = pipe_1.readline()[:-1]
print "[0] Now processing if received is correct: " + received
pipe_2 = open(pipe_2_name, 'w')
pipe_2.write("client_message_0\n")
pipe_3 = open(pipe_3_name, 'w')
pipe_3.write("client_message_1\n")
print "Connection established."
return pipe_1,pipe_2,pipe_3
def main():
pipe_1, pipe_2, pipe_3 = get_connection()
print str(pipe_1)
print str(pipe_2)
print str(pipe_3)
if __name__ == "__main__":
main()
They look the same, don't they? The only difference is how I open the fifo. Instead of os.open(pipe_name,os.O_WRONLY) I use pipe = open(pipe_name, 'w').
What happens in the second set of scripts, the ones that don't use os.open(), Script 1 blocks at pipe_2 = open(pipe_2_name, 'r') while Script 2 blocks at pipe_2 = open(pipe_2_name, 'w').
Why is this happening?
Sorry for the wall of text. I am really confused about this issue.
What happens in the second set of scripts, the ones that don't use
os.open(), Script 1 blocks at pipe_2 = open(pipe_2_name, 'r') while Script 2 blocks at pipe_2 = open(pipe_2_name, 'w').
No, Script 2 blocks at received = pipe_1.readline()[:-1].
Why is this happening?
It's because Script 1's open(pipe_1_name, 'w') causes the written message to be buffered in fixed-size chunks (typically 4096 or 8192 bytes), so the pipe_1.write("server_message_0\n") does not yet write anything to the pipe, but only to the buffer, and Script 2 doesn't get anything to read. See open() and also How often does python flush to a file?
To cure this, since your messages are complete lines, it suffices to use line buffering, e. g.
pipe_1 = open(pipe_1_name, 'w', 1)
(as well for the other write pipes).

Tab Error python

import requests
import subprocess
import json
import sys
import threading
import time
from Queue import Queue
numberOfViewers = int(sys.argv[1])
builderThreads = int(sys.argv[2])
startTime = time.time()
numberOfSockets = 0
concurrent = 25
urls = []
urlsUsed = []
def getURL(): # Get tokens
output = subprocess.Popen(["livestreamer", "http://www.twitch.tv/gbowtheking", "-j"], stdout=subprocess.PIPE).communicate()[0] #replace twitch.tv/??? with your channel
return json.loads(output)['streams']['worst']['url'] # Parse json and return the URL parameter
def build(): # Builds a set of tokens, aka viewers
global numberOfSockets
global numberOfViewers
while True:
if numberOfSockets < numberOfViewers:
numberOfSockets += 1
print "Building viewers " + str(numberOfSockets) + "/" + str(numberOfViewers)
urls.append(getURL())
def view(): # Opens connections to send views
global numberOfSockets
while True:
url=q.get()
requests.head(url)
if (url in urlsUsed):
urls.remove(url)
urlsUsed.remove(url)
numberOfSockets -= 1
else:
urlsUsed.append(url)
q.task_done()
if __name__ == '__main__':
for i in range(0, builderThreads):
threading.Thread(target = build).start()
while True:
while (numberOfViewers != numberOfSockets): # Wait until sockets are built
time.sleep(1)
q=Queue(concurrent*2)
for i in range(concurrent):
try:
t=threading.Thread(target=view)
t.daemon=True
t.start()
except:
print 'thread error'
try:
for url in urls:
print url
q.put(url.strip())
q.join()
except KeyboardInterrupt:
sys.exit(1)
I get a taberror on this line
return json.loads(output)['streams']['worst']['url']
full error is "TabError: inconsistent use of tabs and spaces in indentation"
It seems my spacing is accurate but apparently not.
Any ideas on how to fix it? Could I be using an incorrect version of python?
No tab is required at the return statement. (line 19)
Change This
def getURL(): # Get tokens
output = subprocess.Popen(["livestreamer", "http://www.twitch.tv/gbowtheking", "-j"], stdout=subprocess.PIPE).communicate()[0] #replace twitch.tv/??? with your channel
return json.loads(output)['streams']['worst']['url'] # Parse json and return the URL parameter
To
def getURL(): # Get tokens
output = subprocess.Popen(["livestreamer", "http://www.twitch.tv/gbowtheking", "-j"], stdout=subprocess.PIPE).communicate()[0] #replace twitch.tv/??? with your channel
return json.loads(output)['streams']['worst']['url'] # Parse json and return the URL parameter
You can also check your future python code for indentation on http://pythoniter.appspot.com

Categories

Resources