parse a url in python - python

I want to create a HTTP link from db info. such as ip and port number, somehow i tried this but keep getting a error cant parse and port hope any one can help me out
#app.route('/link_test/<string:id>')
#is_logged_in
def link_test(id):
#Create cursor
cur = mysql.get_db().cursor()
#get item by itemno
result = cur.execute("SELECT * FROM testlist WHERE id = %s", [id])
doc = cur.fetchone()
)
doc = {"id": test[0], "rdno": test[1], "ipno": test[2], "port": test[3]}
cur.close()
ipno = test['ipno']
port = test['port']
url_uptime = 'https://<string:ipno>:<string:port>/stats/uptime/'
return render_template('link_test.html', uptime=json.loads(r_uptime))

<string:ipno>:<string:port> is a Flask path, not something to be literally used as a URL
If you wanted to create this string, it's called formatting, not parsing
ipno = doc['ipno']
port = doc['port']
url_uptime = 'https://{}:{}/stats/uptime/'.format(ipno, port)
r_uptime = requests.get(url_uptime, verify=False).content
You'll also want to define test somewhere before doc is created

getVars1 = doc[2]
url='/stats/uptime/'
url_uptime = ('https://'+getVars1+url)
great thanks mate

Related

loop for json data extraction

this is webhook
. i recive json form api with this . i need extract json tags and send to mysql database . problem is it not sent all json tags . i think it need loop at the step i tag with this step at code . thanks .
import json
import urllib.parse
import urllib.request
import mysql.connector
urls = ('/.*', 'hooks')
app = web.application(urls, globals())
class hooks:
def POST(self):
data = web.data()
print()
print('DATA RECEIVED:')
print(data)
print()
cts = data.decode('utf-8') #decode bytes to string
r1 = cts.replace(cts[:9], '')
parsed = urllib.parse.unquote_plus(r1) # ready for post
print(parsed)
print(cts)
print(type(cts))
myurl = "https://webhook.site/c0e861b0-3cc1-42c2-a0c6-54ad980b01b0"
req = urllib.request.Request(myurl)
req.add_header('Content-Type', 'application/json; charset=utf-8')
jsondata = parsed
jsondataasbytes = jsondata.encode('utf-8') # convert to be bytes
req.add_header('Content-Length', len(jsondataasbytes))
print(jsondataasbytes)
response = urllib.request.urlopen(req, jsondataasbytes)
test_dict = json.loads(parsed)[0]
print(type(test_dict))
# Extracting specific keys from dictionary <<<<<<THIS STEP>>>>>>>>>
indic_label = test_dict['indicator_label']
status = test_dict['status']
creation_date = test_dict['creation_date']
laststatus = test_dict['last_status']
base = test_dict['base_currency']
quote_currency = test_dict['quote_currency']
indic = test_dict['indicator']
prices = test_dict['prices']
mydb = mysql.connector.connect(
host="*",
user="*",
password="*",
database="*"
)
cursor = mydb.cursor()
cursor.execute("""INSERT INTO allcoins
(base,quote_currency , indic,status,laststatus,creation_date,prices,indic_label)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s)""",
(base, quote_currency, indic, status, laststatus, creation_date, prices, indic_label))
mydb.commit()
cursor.close()
mydb.close()
return 'OK'
if __name__ == '__main__':
app.run()
you can post test data to this hook by this curl
curl -d "messages=%5B%7B%22values%22%3A+%7B%22momentum%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22momentum%22%2C+%22indicator_number%22%3A+0%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22momentum%22%5D%2C+%22hot%22%3A+0%2C+%22cold%22%3A+0%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+10%7D%2C+%22status%22%3A+%22hot%22%7D%2C+%22status%22%3A+%22hot%22%2C+%22last_status%22%3A+%22hot%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22%22%2C+%22indicator_label%22%3A+%22%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22leading_span_a%22%3A+%220.00%22%2C+%22leading_span_b%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22ichimoku%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22leading_span_a%22%2C+%22leading_span_b%22%5D%2C+%22hot%22%3A+true%2C+%22cold%22%3A+true%2C+%22candle_period%22%3A+%224h%22%2C+%22hot_label%22%3A+%22Bullish+Alert%22%2C+%22cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22bbp%22%3A+%220.96%22%2C+%22mfi%22%3A+%2298.05%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22bbp%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+20%2C+%22hot%22%3A+0.09%2C+%22cold%22%3A+0.8%2C+%22std_dev%22%3A+2%2C+%22signal%22%3A+%5B%22bbp%22%2C+%22mfi%22%5D%2C+%22hot_label%22%3A+%22Lower+Band%22%2C+%22cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%5D" -X POST http://192.168.30.1
Perhaps define all variables upfront because you still need them to write to the db, then check if they are in the response and update:
indic_label = ''
status = ''
creation_date = ''
laststatus = ''
base = ''
quote_currency = ''
indic =''
prices = ''
if test_dict['indicator_label']:
indic_label = test_dict['indicator_label']
if test_dict['status']:
status = test_dict['status']
...
...
If it's an all or none situation, you can check for one variable then exit, otherwise check for each of them.

Django Celery Error Message Cannot Serialize

I keep getting this error message when I run my script on my Djano App. (Object of type WSGIRequest is not JSON serializable) when I have my serializer set to JSON. If i change it to Pickle, I get this error message. (cannot serialize '_io.BufferedReader' object). I have spent days on this trying to figure out how to fix this. I appreciate any help. Thanks
Here is my script im sending to celery.
def ImportSchools(request):
print("Getting school data from SIS")
url = ""
payload = {}
token = APIInformation.objects.get(api_name="PowerSchool")
key = token.key
headers = {'Authorization': 'Bearer {}'.format(key)}
response = requests.request("GET", url, headers=headers, data = payload)
encode_xml = response.text.encode('utf8')
pretty_xml = xml.dom.minidom.parseString(encode_xml)
pretty_xml_str = pretty_xml.toprettyxml()
xml_string = ET.fromstring(encode_xml)
schools = xml_string.findall("school")
for school in schools:
psid = school.find("id").text
name = school.find("name").text
school_number = school.find("school_number").text
low_grade = school.find("low_grade").text
high_grade = school.find("high_grade").text
if not School.objects.filter(schoolpsid=psid):
print("Record doesn't exist in DB, creating record.")
x = School.objects.create(schoolpsid=psid, school_name=name, school_number=school_number, low_grade=low_grade, high_grade=high_grade)
x.save()
elif School.objects.filter(schoolpsid=psid).exists():
print("Record exists in DB, updating record.")
School.objects.filter(schoolpsid=psid).update(school_name=name, school_number=school_number, low_grade=low_grade, high_grade=high_grade)
print("School Data Pull Complete")
return("Done")
So the solution of this was to remove request as an argument and it fixed the issue.

Python Ldap module limits search for 1000 users, how to use simple search?

I am using Ldap3 module in python to query AD, however i get result if the ad account is in first 1000 searches, i have more than 25000 + user ids in AD, how i can change my code here into simple page search, Please help me.
''''
import sys
from ldap3 import Server, Connection, ALL, NTLM, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, AUTO_BIND_NO_TLS, SUBTREE
from ldap3.core.exceptions import LDAPCursorError
server = Server('bluepedia.com',get_info=ALL)
conn = Connection(server, user='bluepedia\\administrator', password='Vihaan#2016', authentication=NTLM)
conn.bind()
fusers = ['vihaan','phani','rammohan','raja','bindu','MA977956', 'MA625841','gopal','govind','MA177397','MA259156']
status = ''
usermail= ''
format_string = '{:25} {:21} {}'
print(format_string.format('User ID', 'Account Status', 'E-mail Address'))
conn.search('dc=bluepedia,dc=com', '(objectclass=person)',attributes=['sAMAccountName', 'mail'])
for x in fusers:
for e in conn.entries:
usermail = e.mail
if x in e.sAMAccountName:
# status = 'active' +"\t\t\t\t"+ str(usermail)
status = 'active'
break
else:
status = "Not Active"
usermail = ''
print(format_string.format(str(x),str(status),str(usermail)))
''''
My required result is i want to search user id from a list using Ldap3 module, and i have 10000 users in list.

Python LDAP3 Search in multiple search_base at once

I have a code like this:
from ldap3 import Server, Connection
uri = 'ldaps://ca1.ad.xxx.com:123'
bind_user = 'CN=svc_bind_user,OU=Service Accounts,DC=subdomain1,DC=ad,DC=xxx,DC=com'
bind_password = 'svc_bind_p4$$'
server = Server(uri)
conn = Connection(server, bind_user, bind_password)
conn.bind()
user_filter = 'objectClass=*'
user_name_attr = 'sAMAccountName'
search_scope = 'SUBTREE'
I can successfully search for user1 user1#subdomain1.ad.xxx.com like this
username = 'user1'
search_base= 'DC=subdomain1,DC=ad,DC=xxx,DC=com'
search_filter = "(&({0})({1}={2}))".format(
user_filter,
user_name_attr,
username
)
res = conn.search(search_base,
search_filter,
search_scope)
as well as user2 user2#subdomain2.ad.xxx.com like this
username = 'user2'
search_base= 'DC=subdomain2,DC=ad,DC=xxx,DC=com'
search_filter = "(&({0})({1}={2}))".format(
user_filter,
user_name_attr,
username
)
res = conn.search(search_base,
search_filter,
search_scope)
As you can see codes above are tailored for each user to look into different search_base : subdomain1 and subdomain2 accordingly
I tired to search for both user1 and user2 in a code like this with a higher level search_base= 'DC=ad,DC=xxx,DC=com' :
username = 'user1'
search_base= 'DC=ad,DC=xxx,DC=com'
search_filter = "(&({0})({1}={2}))".format(
user_filter,
user_name_attr,
username
)
res = conn.search(search_base,
search_filter,
search_scope)
but the code above doesn't find the user, only returns a list of subdomains
So the question is, if I am not doing anything wrong here, is there a way to search within multiple domains, by having a perhaps search_base with special syntax that combines multiple subdomains?
I don't want to do multiple searches and also as I mentioned the SUBTREE/higher level serach_base does not seem to work for me either
Thanks

python imaplib gmail fetching multiple results from list

I am trying to obtain email ids and then fetch all of them. How do I do this? Thanks!
The following is my code:
import imaplib
import re
user = 'user'
pwd = 'password'
imap_server = imaplib.IMAP4_SSL('imap.gmail.com', 993)
imap_server.login(user, pwd)
imap_server.select('Inbox')
typ, response = imap_server.search(None, '(SUBJECT "Hello")')
response = str(response[0])
response_re = re.compile('\d+')
response_pat = re.findall(response_re, response)
for i in response_pat:
results, datas = imap_server.fetch(i, "(RFC822)")
for i in datas:
print i
this still on print one value of datas, when I have iterated through a list of multiple #values.
You made a mistake with the command. It should be RFC822 instead of RCF822. Simply just change one line of your code. Change this line from
results, datas = imap_server.fetch(i, "(RCF822)")
to
results, datas = imap_server.fetch(i, "(RFC822)")
And also, don't use regex when you can simply use string libraries. Instead of using regex, simply do this in your loop:
for i in response[0].split():
results, datas = m.fetch(i, "(RFC822)")

Categories

Resources