HTTP requests on localhost via python urllib and urllib2 are very slow - python

I have written a simple class using urllib and urllib2 to send http requests and get the response. However, any request to localhost using its IP Address is very slow.
IP Address of LOCALHOST is = 192.168.158.27
import urllib2,urllib,re,datetime,time
class HTTPRequest():
def __init__(self,**kargs):
self._response = None
self._buffer = None
self._conn = urllib2.build_opener(urllib2.HTTPCookieProcessor())
urllib2.install_opener(self._conn)
def _encode_url(self,**kargs):
try:
params = urllib.urlencode(kargs)
except:
raise HTTPError("Failed to encode URL parameters..")
return str(params)
def _request(self,url=None,params=None):
try:
self._buffer = self._conn.open(url,params)
self._response = self._buffer.read()
except ValueError:
raise HTTPError("Invalid URL %s" % url)
except:
raise HTTPError("Failed to send HTTP(s) Request")
return str(self._response)
class HTTPError(Exception):
pass
PARAM_PASSWORD = 'password'
PARAM_USER = 'userName'
PARAM_ACTION = 'a'
PARAM_RID = 'rid'
PARAM_XO = 'xo'
PARAM_START_TIME = 't1'
PARAM_END_TIME = 't2'
PARAM_PATH = 'path'
BOOLEAN_TRUE = 'true'
BOOLEAN_FALSE = 'false'
ACTION_SIGNIN = 'signIn'
ACTION_SEARCH = 'search'
ACTION_GET_NEXT_RESULTS = 'getNextResults'
STATUS_SUCCEEDED = 'succeeded'
DEFAULT_WAIT = 5
host = "192.168.158.27"
user = "admin"
password = "admin"
protocol = "https"
port = 8443
query = "vm[=name rx (?i) *]&[#cpuUsage rx b .+][#cpuUsagemhz rx b .+]"
start_time = "10/05/2013 16:16:00"
end_time = "10/05/2013 17:16:00"
base_url = "%s://%s:%d" % (protocol,host,port)
login_url = "%s/user" % base_url
http = HTTPRequest()
attributes = {PARAM_PASSWORD : password,
PARAM_USER : user,
PARAM_ACTION : ACTION_SIGNIN,
PARAM_RID : 1000,
PARAM_XO : BOOLEAN_TRUE}
params = http._encode_url(**attributes)
if not http._request(login_url,params):
print "Login Failed.."
else:
print "Login Successful.. \n"
rid = 1000
search_url = "%s/Search" % base_url
status = STATUS_SUCCEEDED
hasMoreData = BOOLEAN_TRUE
completed = BOOLEAN_FALSE
total = 0
processed = 1
responseContent = ""
xml_dict = {}
_response = ""
attributes = {PARAM_START_TIME : start_time,
PARAM_END_TIME : end_time,
PARAM_ACTION : ACTION_SEARCH,
PARAM_RID : rid,
PARAM_PATH : query}
print "URL PARAMETERS :"
print "\tBase url = %s" % base_url
for param in attributes:
print "\t%s = %s" % (param,attributes[param])
#Query Execution Start Time
start = datetime.datetime.now()
while True:
params = http._encode_url(**attributes)
if hasMoreData == BOOLEAN_TRUE:
#Delay 10ms
time.sleep(10/1000)
#Send HTTP Request
response = http._request(search_url,params)
pattern = re.match(".*?hasMoreData=\"(.*?)\".*?",response)
if pattern:
hasMoreData = pattern.group(1)
pattern = re.match(".*?status=\"(.*?)\".*?",response)
if pattern:
status = pattern.group(1)
pattern = re.match(".*?completed=\"(.*?)\".*?",response)
if pattern:
completed = pattern.group(1)
pattern = re.match(".*?processed=\"(.*?)\".*?",response)
if pattern:
processed = pattern.group(1)
pattern = re.match(".*?total=\"(.*?)\".*?",response)
if pattern:
total = pattern.group(1)
pattern = re.match(".*?matched=\"(.*?)\".*?",response)
if pattern:
matched = pattern.group(1)
attributes = {PARAM_ACTION : ACTION_GET_NEXT_RESULTS,
PARAM_RID : rid}
if matched != "0":
response = re.sub(r'\n',"",response)
matchObj = re.search(r'(<Resource.*</Resource>)',response)
resp_match = ""
if matchObj:
resp_match = matchObj.group(1)
responseContent = str(responseContent) + str(resp_match)
else:
#Query Execution Completed
#Query Execution End Time
end = datetime.datetime.now()
print "RESULTS : "
print "\tStatus = %s"%status
print "\tHas More Data = %s"%hasMoreData
print "\tCompleted = %s"%completed
print "\tProcessed = %s"%processed
print "\tTotal = %s"%total
print "\tMatched = %s"%matched
print "\nQuery Execution Started : %s" % start
print "Query Execution Ended : %s\n" % end
if total != processed:
err = "The number records processed did not match"
err += " with the number of records completed."
print err
if not status == STATUS_SUCCEEDED:
err = "The responce status is not 'succeeded'"
print err
if completed == BOOLEAN_FALSE:
err = "The repsponse is completed. "
err += "However, the flag is set to 'False'"
print err
break

Instead of your local network IP, try using 127.0.0.1 instead.

Related

python script to run splunk query and get output as text output

Trying to run below code it executes but I do not get the correct value any help is appreciated expecting single value like 492. Code runs but does not give the correct value. Tried splunk library but unable to use those.
import urllib
import httplib2 #import library
import json
import pprint
import time
import re
from xml.dom import minidom
searchquery = 'search index="movable_in" sourcetype="movable:in:assets" | stats avg(exposure_score)'
myhttp = httplib2.Http()
baseurl = 'https://xxxx.splunkxxx.com:8089'
usernamesp = 'xxxx'
passwordsp = 'xxxx'
def get_splunk_result(searchquery):
# Step 1: Get a session key
servercontent = myhttp.request(f'{baseurl}/services/auth/login', 'POST', headers={},
body=urllib.parse.urlencode({'username': usernamesp, 'password': passwordsp}))[1]
sessionkey = minidom.parseString(servercontent).getElementsByTagName('sessionKey')[0].childNodes[0].nodeValue
# print ("====>sessionkey: %s <====" % sessionkey)
sid = ''
# ------------------
if not searchquery.startswith('search'):
searchquery = f'search {searchquery}'
# Step 2: Get a sid with the search query
i = 0
while True:
time.sleep(1)
try:
searchjob = myhttp.request(f'{baseurl}/services/search/jobs', 'POST',
headers={F'Authorization': F'Splunk %s' % sessionkey},
body=urllib.parse.urlencode({'search': searchquery}))[1]
sid = minidom.parseString(searchjob).getElementsByTagName('sid')[0].childNodes[0].nodeValue
break
except:
i = i + 1
# print(i)
if (i > 30): break
# print("====>SID: %s <====" % sid)
# Step 3: Get search status
myhttp.add_credentials(usernamesp, passwordsp)
servicessearchstatusstr = '/services/search/jobs/%s/' % sid
isnotdone = True
while isnotdone:
searchstatus = myhttp.request(f'{baseurl}{servicessearchstatusstr}', 'GET')[1]
isdonestatus = re.compile('isDone">(0|1)')
strstatus = str(searchstatus)
isdonestatus = isdonestatus.search(strstatus).groups()[0]
if (isdonestatus == '1'):
isnotdone = False
# Step 4: Get the search result
services_search_results_str = '/services/search/jobs/%s/results?output_mode=json_rows&count=0' % sid
searchresults = myhttp.request(f'{baseurl}{services_search_results_str}', 'GET')[1]
searchresults = json.loads(searchresults)
# searchresults = splunk_result(searchresults)
return searchresults
output = get_splunk_result(searchquery)
print(output)
import urllib
import httplib2 #import library
import json
import pprint
import time
import re
from xml.dom import minidom
searchquery = 'search index="movable_in" sourcetype="movable:in:assets" | stats avg(exposure_score)'
myhttp = httplib2.Http()
baseurl = 'https://xxxx.splunkxxx.com:8089'
usernamesp = 'xxxx'
passwordsp = 'xxxx'
def get_splunk_result(searchquery):
# Step 1: Get a session key
servercontent = myhttp.request(f'{baseurl}/services/auth/login', 'POST', headers={},
body=urllib.parse.urlencode({'username': usernamesp, 'password': passwordsp}))[1]
sessionkey = minidom.parseString(servercontent).getElementsByTagName('sessionKey')[0].childNodes[0].nodeValue
# print ("====>sessionkey: %s <====" % sessionkey)
sid = ''
# ------------------
if not searchquery.startswith('search'):
searchquery = f'search {searchquery}'
# Step 2: Get a sid with the search query
i = 0
while True:
time.sleep(1)
try:
searchjob = myhttp.request(f'{baseurl}/services/search/jobs', 'POST',
headers={F'Authorization': F'Splunk %s' % sessionkey},
body=urllib.parse.urlencode({'search': searchquery}))[1]
sid = minidom.parseString(searchjob).getElementsByTagName('sid')[0].childNodes[0].nodeValue
break
except:
i = i + 1
# print(i)
if (i > 30): break
# print("====>SID: %s <====" % sid)
# Step 3: Get search status
myhttp.add_credentials(usernamesp, passwordsp)
servicessearchstatusstr = '/services/search/jobs/%s/' % sid
isnotdone = True
while isnotdone:
searchstatus = myhttp.request(f'{baseurl}{servicessearchstatusstr}', 'GET')[1]
isdonestatus = re.compile('isDone">(0|1)')
strstatus = str(searchstatus)
isdonestatus = isdonestatus.search(strstatus).groups()[0]
if (isdonestatus == '1'):
isnotdone = False
# Step 4: Get the search result
services_search_results_str = '/services/search/jobs/%s/results?output_mode=json_rows&count=0' % sid
searchresults = myhttp.request(f'{baseurl}{services_search_results_str}', 'GET')[1]
searchresults = json.loads(searchresults)
# searchresults = splunk_result(searchresults)
return searchresults
output = get_splunk_result(searchquery)
print(output)

trying to send data from one script in python to another script

I am trying to write a script so when an IP address can't be seen a message gets sent with a telegram letting me know which computer is offline
I have been able to get the telegram side working but i have not been able to pass the data from the main script where it is testing the ip address , at the moment i have test data in there but i would like it to send the error with the computer name
main.py
import socket
import ssl
from datetime import datetime
import pickle
import subprocess
import platform
class Server:
def __init__(self, name, port, connection, priority):
self.name = name
self.port = port
self.connection = connection.lower()
self.priority = priority.lower()
self.history = []
self.alert = False
def check_connection(self):
msg = ""
success = False
now = datetime.now().strftime("%d-%m-%Y %H:%M")
try:
if self.connection == "plain":
socket.create_connection((self.name, self.port), timeout=10)
msg = f"{self.name} is up. On port {self.port} with {self.connection}"
success = True
self.alert = False
elif self.connection == "ssl":
ssl.wrap_socket(socket.create_connection((self.name, self.port), timeout=10))
msg = f"{self.name} is up. On port {self.port} with {self.connection}"
success = True
self.alert = False
else:
if self.ping():
msg = f"{self.name} is up. On port {self.port} with {self.connection}"
success = True
self.alert = False
except socket.timeout:
msg = f"server: {self.name} timeout. On port {self.port}"
except (ConnectionRefusedError, ConnectionResetError) as e:
msg = f"server: {self.name} {e}"
except Exception as e:
msg = f"No Clue??: {e}"
if success == False and self.alert == False:
# Send Alert
self.alert = True
import tg_start
tg_start.send_message("Happy days")
self.create_history(msg, success, now)
def create_history(self, msg, success, now):
history_max = 100
self.history.append((msg, success, now))
while len(self.history) > history_max:
self.history.pop(0)
def ping(self):
try:
output = subprocess.check_output("ping -{} 1 {}".format('n' if platform.system().lower(
) == "windows" else 'c', self.name), shell=True, universal_newlines=True)
if 'unreachable' in output:
return False
else:
return True
except Exception:
return False
if __name__ == "__main__":
try:
servers = pickle.load(open("servers.pickle", "rb"))
except:
servers = [
# Server("ifmc-repserver", 80, "plain", "high"),
# Server("ifmc-repserver", 80, "plain", "high"),
# Server("ifmc-repserver", 465, "ssl", "high"),
# Server("ifmc-repserver", 80, "ping", "high"),
Server("ifmc-repserver", 80, "ping", "high")
]
for server in servers:
server.check_connection()
print(len(server.history))
print(server.history[-1])
pickle.dump(servers, open("servers.pickle", "wb"))
and tg_start.py
import requests
message = "global"
alert = ""
def send_message(text):
global alert
global message
print ("this is text messsage" + " " + text)
#text = "Superman"
alert = text
print("Sending ALERT ...")
token = "token"
chat_id = "chat_id"
print("test message" + " " + alert)
url_req = "https://api.telegram.org/bot" + token + "/sendMessage" + "?chat_id=" + chat_id + "&text=" + alert
print(url_req)
#results = requests.get(url_req)
results = requests.post(url_req) # this request is a post, not a get
print(results.json())
# text = "my name" + text
send_message(alert)
You code worked with a slight change, sendMessage require a POST request, not a GET request.
def send_message(text):
global alert
global message
print ("this is text messsage" + " " + text)
alert = text
print("Sending ALERT ...")
token = "token"
chat_id = "chat_id"
print("test message" + " " + alert)
url_req = f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}&text={alert}"
print(url_req)
results = requests.post(url_req) # this request is a post, not a get
print(results.json())
# text = "my name" + text

return all data from a loop

I have code that logs into devices. I can print the inform from the devices in the loop just fine. But i can only return "not print" the data from the last device in the list. How can i return all data from all devices on the loop ?
From flask import Flask, jsonify, request
import netmiko
from netmiko.ssh_autodetect import SSHDetect
from netmiko.ssh_exception import NetMikoTimeoutException
import time
'app = Flask(name)
#app.route('/firewall', methods=['GET','POST', 'DELETE'])
def firewall():
# Authentication
headers = request.headers
auth = headers.get("xxxxx")
if auth == 'xxxx':
data = request.get_json(force=True)
fw_a = data["DeviceAddressList"]
src_a = data['SourceAddressList']
src_p = data['SourcePortList']
dst_a = data['DestinationAddressList']
dst_p = data['DestinationPortList']
policy = data["PolicyAllow"]
p_col = data['Protocol']
p_show = data['show']
p_push = data['push']
config = data['config']
# Juniper Normalize the data for command line interface
juniper_command = '"({})"'.format('|'.join(src_a + src_p + dst_a + dst_p))
username = "xxxx"
password = "Pxxxx"
try:
ip_list = fw_a
for ip in ip_list:
#print(ip)
device = {"device_type": "autodetect", "username": username, "host": ip, "password": password}
guesser = SSHDetect(**device)
best_match = guesser.autodetect()
print(best_match)
if "None" in str(best_match):
continue
#else:
if "true" in str(p_show) and "juniper_junos" in str(best_match):
device["device_type"] = best_match
connection = netmiko.ConnectHandler(**device,)
connection.find_prompt(delay_factor=2)
time.sleep(1)
connection.enable()
resp = connection.send_command(
'show configuration | display xml | match ' + str(juniper_command), delay_factor=2)
print(ip + '\n' + best_match + resp)
if "true" in str(p_push) and "juniper_junos" in str(best_match):
device["device_type"] = best_match
connection = netmiko.ConnectHandler(**device)
connection.find_prompt(delay_factor=2)
time.sleep(1)
connection.enable()
push_resp = connection.send_command(config, delay_factor=2)
connection.disconnect()
print(push_resp)
return ip + '\n' + best_match + resp
except NetMikoTimeoutException:
return "This Network Device is not reachable"
else:
return jsonify({"message": "ERROR: Unauthorized"}), 401
Blockquote
Code example: Loop over the ips, get the value you want to return for each ip and push it into a dict. Return the dict to the caller of the function 'firewall'
def firewall():
result = dict()
for ip in ip_list:
push_resp = dummy_get_push_resp()
result[ip] = push_resp
return result

running parallel process on page submit in django

i want to create simple website for pnr check . code is working right but it performs only one work at a time either render result to other page or send mail . while working with thread it send mail on back end till then page remains loading .
please anyone give me suggestion. also i want to run it on google appengine so i haven't tried celery.
from django.http import HttpResponse
from bs4 import BeautifulSoup
import re
import requests
from django.shortcuts import render
from functools import partial, wraps
from django.core.mail import send_mail
import time
import thread
def checkpnr(request):
return render(request, 'checkpnr.html')
def check(string, pnr, sleeptime, lock, *args):
while 1:
# entering critical section
lock.acquire()
# time1=request.get_all("notif")
url_pnr = "pnr url"
r = requests.get(url_pnr)
data = r.text
soup = BeautifulSoup(data)
train = str(soup.find("ul", attrs={"class": "train_info"}))
train_number = soup.find("li", attrs={"class": "first"}).text
source = str(soup.find("travellers"))
route = str(soup.findAll("li")[1]).replace(
'<li>', '').replace('</li>', '')
#head, sep, tail = route.partition(' -')
travel_date = str(soup.findAll("li")[2].text)
date, sep, total = travel_date.partition('|')
rows = soup.findAll("td", attrs={"class": "pax"})
rowlength = len(rows)
chart_status = str(soup.findAll("tr")[rowlength + 1].findAll("td")[0]).replace(
'<td colspan="3"><strong>', '').replace('</strong>', '').replace('</td>', '')
passengers = []
status = []
coach = []
tot = []
w=''
i = 1
j = 1
while i <= rowlength:
j = str(soup.findAll("tr")[i].findAll(
"td")[0].text).replace(':', '')
passengers.append(j)
s = str(soup.findAll("tr")[i].findAll("td")[1].text)
w=w+','+s
status.append(s)
c = str(soup.findAll("tr")[i].findAll("td")[2].text)
coach.append(c)
tot.append(i)
i += 1
time.sleep(sleeptime)
emailMsg = status
subject = pnr+'-'+w
send_mail(
subject,'emailMsg', 'email-from',
[email], fail_silently=False)
lock.release()
if (status[rowlength - 1] == "CONFIRMED"):
time.sleep(sleeptime)
else:
time.sleep(1000000000000000000000000)
def fetch(request):
pnr = request.POST['pnr']
if len(pnr) != 10:
msg = "PNR must be of 10 digits ..."
return render(request, 'checkpnr.html', {'msg': msg})
email = request.POST['email']
e = request.POST['ntime']
if (e != ''):
n_time = int(e)
n = request.POST['notify']
if (n != ''):
notify = int(n)
sleeptim = notify * n_time
sleeptime= 10
# time1=request.get_all("notif")
url_pnr = "pnr url"
try:
r = requests.get(url_pnr)
data = r.text
soup = BeautifulSoup(data)
train = str(soup.find("ul", attrs={"class": "train_info"}))
train_number = soup.find("li", attrs={"class": "first"}).text
source = str(soup.find("travellers"))
route = str(soup.findAll("li")[1]).replace(
'<li>', '').replace('</li>', '')
#head, sep, tail = route.partition(' -')
travel_date = str(soup.findAll("li")[2].text)
date, sep, total = travel_date.partition('|')
rows = soup.findAll("td", attrs={"class": "pax"})
rowlength = len(rows)
chart_status = str(soup.findAll("tr")[rowlength + 1].findAll("td")[0]).replace(
'<td colspan="3"><strong>', '').replace('</strong>', '').replace('</td>', '')
passengers = []
status = []
coach = []
tot = []
w=''
i = 1
j = 1
while i <= rowlength:
j = str(soup.findAll("tr")[i].findAll(
"td")[0].text).replace(':', '')
passengers.append(j)
s = str(soup.findAll("tr")[i].findAll("td")[1].text)
w=w+','+s
status.append(s)
c = str(soup.findAll("tr")[i].findAll("td")[2].text)
coach.append(c)
tot.append(i)
i += 1
msg = "Mail not Sent"
msg1 = ''
if(email != ''):
emailMsg = status
subject = pnr+'-'+w
send_mail(
subject,'emailMsg', 'ashutosh8nitjsr#gmail.com',
[email], fail_silently=False)
msg = "mail sent.."
if __name__ == "__main__":
lock = thread.allocate_lock()
thread.start_new_thread(
check,("Thread No:1", pnr, email, sleeptime, lock))
msg1 = "thread created"
time.sleep(sleeptime)
while 1:
pass
detail2 = {
'train_number': train_number, 'route': route, 'date': date, 'chart_status': chart_status, 'tot': tot,
'passengers': passengers, 'status': status, 'coach': coach, 'msg': msg}
return render(request, 'status.html', detail2)l
except:
msg = "there was error. please try again..."
return render(request, 'checkpnr.html', {'msg': msg})
You can try using TaskQueues for this purpose on app engine.
Task Queue API

Pass array into class in Python

I'm a php guy and I don't understand python very well... so .. excuse me if my question is stupid.
I have 2 php scripts (client.php and worker.php) using Gearman, that I need to convert to python versions. I have been able to do it partially, but I'm stuck. First, here are my two scripts:
client.py
#!/usr/bin/env python
import gearman
import json
RBLS = [
'b.barracudacentral.org',
'bl.emailbasura.org'
]
IP = '1.2.3.4'
def check_request_status(job_request):
if job_request.complete:
print "Job %s finished! Result: %s - %s" % (job_request.job.unique, job_request.state, job_request.result)
elif job_request.timed_out:
print "Job %s timed out!" % job_request.unique
elif job_request.state == JOB_UNKNOWN:
print "Job %s connection failed!" % job_request.unique
data = {"ip": IP, "rbls": RBLS}
serialized_data = json.dumps(data)
gm_client = gearman.GearmanClient(['localhost:4730'])
completed_job_request = gm_client.submit_job("runcheck", serialized_data)
check_request_status(completed_job_request)
worker.py
#!/usr/bin/env python
import gearman
import sys
import socket
import re
import json
from dns.resolver import Resolver, NXDOMAIN, NoNameservers, Timeout, NoAnswer
from threading import Thread
# This hardcoded RBLS need to be passed by gearman client script
# RBLS = ['xbl.spamhaus.org', 'zen.spamhaus.org']
class Lookup(Thread):
def __init__(self, host, dnslist, listed, resolver):
Thread.__init__(self)
self.host = host
self.listed = listed
self.dnslist = dnslist
self.resolver = resolver
def run(self):
try:
host_record = self.resolver.query(self.host, "A")
if len(host_record) > 0:
self.listed[self.dnslist]['LISTED'] = True
self.listed[self.dnslist]['HOST'] = host_record[0].address
text_record = self.resolver.query(self.host, "TXT")
if len(text_record) > 0:
self.listed[self.dnslist]['TEXT'] = "\n".join(text_record[0].strings)
self.listed[self.dnslist]['ERROR'] = False
except NXDOMAIN:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = NXDOMAIN
except NoNameservers:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = NoNameservers
except Timeout:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = Timeout
except NameError:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = NameError
except NoAnswer:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = NoAnswer
class RBLSearch(object):
def __init__(self, lookup_host):
self.lookup_host = lookup_host
self._listed = None
self.resolver = Resolver()
self.resolver.timeout = 0.2
self.resolver.lifetime = 1.0
def search(self):
if self._listed is not None:
pass
else:
host = self.lookup_host.split(".")
host = ".".join(list(reversed(host)))
self._listed = {'SEARCH_HOST': self.lookup_host}
threads = []
for LIST in RBLS:
self._listed[LIST] = {'LISTED': False}
query = Lookup("%s.%s" % (host, LIST), LIST, self._listed, self.resolver)
threads.append(query)
query.start()
for thread in threads:
thread.join()
return self._listed
listed = property(search)
def print_results(self):
listed = self.listed
print("")
print("--- DNSBL Report for %s ---" % listed['SEARCH_HOST'])
for key in listed:
if key == 'SEARCH_HOST':
continue
if not listed[key].get('ERROR'):
if listed[key]['LISTED']:
print("Results for %s: %s" % (key, listed[key]['LISTED']))
print(" + Host information: %s" % \
(listed[key]['HOST']))
if 'TEXT' in listed[key].keys():
print(" + Additional information: %s" % \
(listed[key]['TEXT']))
else:
#print "*** Error contacting %s ***" % key
pass
def task_listener_runcheck(gearman_worker, gearman_job):
jdata = json.loads(gearman_job.data)
host = jdata['ip']
ip = host
RBLS = jdata['rbls']
print("Looking up: %s (please wait)" % host)
pat = re.compile("\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}")
is_ip_address = pat.match(host)
if not is_ip_address:
try:
ip = socket.gethostbyname(host)
print("Hostname %s resolved to ip %s" % (host,ip))
except socket.error:
print("Hostname %s can't be resolved" % host)
ip = ""
if ip:
searcher = RBLSearch(ip)
searcher.print_results()
return "RunCheck was successfull"
gm_worker = gearman.GearmanWorker(['localhost:4730'])
gm_worker.set_client_id('python-worker')
gm_worker.register_task('runcheck', task_listener_runcheck)
gm_worker.work()
Here is how this 2 scripts are working: client.py passes the IP address and array of rbl's to the worker.py. Then worker gets the IP address and check it against all the rbl's.
The problem is that I don't know how to use the RBLS inside the RBLSearch class. It's working if I hardcode the RBLS in the beginning of the script (See worker.py, Line 12), but it's not working if I define RBLS in task_listener_runcheck
I have been able to solve it. Here is the edited version (in case anyone want it):
worker.py
#!/usr/bin/env python
import gearman
import sys
import socket
import re
import json
from dns.resolver import Resolver, NXDOMAIN, NoNameservers, Timeout, NoAnswer
from threading import Thread
class Lookup(Thread):
def __init__(self, host, dnslist, listed, resolver):
Thread.__init__(self)
self.host = host
self.listed = listed
self.dnslist = dnslist
self.resolver = resolver
def run(self):
try:
host_record = self.resolver.query(self.host, "A")
if len(host_record) > 0:
self.listed[self.dnslist]['LISTED'] = True
self.listed[self.dnslist]['HOST'] = host_record[0].address
text_record = self.resolver.query(self.host, "TXT")
if len(text_record) > 0:
self.listed[self.dnslist]['TEXT'] = "\n".join(text_record[0].strings)
self.listed[self.dnslist]['ERROR'] = False
except NXDOMAIN:
self.listed[self.dnslist]['ERROR'] = False
self.listed[self.dnslist]['ERRORTYPE'] = NXDOMAIN
except NoNameservers:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = NoNameservers
self.listed[self.dnslist]['TEXT'] = "%s - The operation timed out." % self.host
except Timeout:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = Timeout
self.listed[self.dnslist]['TEXT'] = "%s - The operation timed out." % self.host
except NameError:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = NameError
self.listed[self.dnslist]['TEXT'] = "%s - NameError" % self.host
except NoAnswer:
self.listed[self.dnslist]['ERROR'] = True
self.listed[self.dnslist]['ERRORTYPE'] = NoAnswer
self.listed[self.dnslist]['TEXT'] = "%s - The response did not contain an answer to the question." % self.host
class RBLSearch(object):
def __init__(self, lookup_host, rbls):
self.lookup_host = lookup_host
self.rbls = rbls
self._listed = None
self.resolver = Resolver()
self.resolver.timeout = 0.2
self.resolver.lifetime = 1.0
def search(self):
if self._listed is not None:
pass
else:
host = self.lookup_host.split(".")
host = ".".join(list(reversed(host)))
self._listed = {'SEARCH_HOST': self.lookup_host}
threads = []
for LIST in self.rbls:
self._listed[LIST] = {'LISTED': False}
query = Lookup("%s.%s" % (host, LIST), LIST, self._listed, self.resolver)
threads.append(query)
query.start()
for thread in threads:
thread.join()
return self._listed
listed = property(search)
def print_results(self):
listed = self.listed
print("")
print("--- DNSBL Report for %s ---" % listed['SEARCH_HOST'])
for key in listed:
if key == 'SEARCH_HOST':
continue
if not listed[key].get('ERROR'):
if listed[key]['LISTED']:
print("Results for %s: %s" % (key, listed[key]['LISTED']))
print(" + Host information: %s" % \
(listed[key]['HOST']))
if 'TEXT' in listed[key].keys():
print(" + Additional information: %s" % \
(listed[key]['TEXT']))
else:
print("Not listed in %s" % (key))
else:
#print "*** Error contacting %s ***" % key
pass
def task_listener_runcheck(gearman_worker, gearman_job):
jdata = json.loads(gearman_job.data)
host = jdata['ip']
rbls = jdata['rbls']
ip = host
print("Looking up: %s (please wait)" % host)
pat = re.compile("\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}")
is_ip_address = pat.match(host)
if not is_ip_address:
try:
ip = socket.gethostbyname(host)
print("Hostname %s resolved to ip %s" % (host,ip))
except socket.error:
print("Hostname %s can't be resolved" % host)
ip = ""
if ip:
searcher = RBLSearch(ip, rbls)
searcher.print_results()
return "RunCheck was successfull"
gm_worker = gearman.GearmanWorker(['localhost:4730'])
gm_worker.set_client_id('python-worker')
gm_worker.register_task('runcheck', task_listener_runcheck)
gm_worker.work()

Categories

Resources