urllib not working with api request - python

im trying to write this code where I request this information from openweathermap.org thourgh an api and try to print the temperature and location at the current time.
Most of the code is kind off a mix of the tips I could find on the internet.
Now I get this error and im stuck. Can anyone help me on the right path again?
Heres my code:
import urllib.request, urllib.parse, urllib.error
import json
while True:
zipcode = input('Enter zipcode: ')
if len(zipcode) < 1: break
url = 'http://api.openweathermap.org/data/2.5/weather?
zip='+zipcode+',nl&appid=db071ece9a338a36e9d7a660ec4f0e37?'
print('Retrieving', url)
uh = urllib.request.urlopen(url)
data = uh.read().decode()
print('Retrieved', len(data), 'characters')
try:
js = json.loads(data)
except:
js = None
temp = js["main"]["temp"]
loc = js["name"]
print("temperatuur:", temp)
print("locatie:", loc)
So the url is this: http://api.openweathermap.org/data/2.5/weather?zip=3032,nl&appid=db071ece9a338a36e9d7a660ec4f0e37
The error im getting is:
Enter zipcode: 3343
Retrieving http://api.openweathermap.org/data/2.5/weather?zip=3343,nl&appid=db071ece9a338a36e9d7a660ec4f0e37?
Traceback (most recent call last):
File "weatherapi2.py", line 12, in
uh = urllib.request.urlopen(url)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 223, in urlopen
return opener.open(url, data, timeout)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 532, in open
response = meth(req, response)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 642, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 570, in error
return self._call_chain(*args)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 504, in _call_chain
result = func(*args)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 650, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 401: Unauthorized

After some troubleshooting i found your issue. You have an extra '?' at the end of your url in your python.
Remove that and your request works fine. Tried it with this code and worked-
import urllib.request, urllib.parse, urllib.error
import json
while True:
zipcode = input('Enter zipcode: ')
if len(zipcode) < 1: break
url = 'http://api.openweathermap.org/data/2.5/weather?zip='+zipcode+',nl&appid=db071ece9a338a36e9d7a660ec4f0e37'
print('Retrieving', url)
uh = urllib.request.urlopen(url)
data = uh.read().decode()
print('Retrieved', len(data), 'characters')
try:
js = json.loads(data)
except:
js = None
temp = js["main"]["temp"]
loc = js["name"]
print("temperatuur:", temp)
print("locatie:", loc)

Related

Issue with BingX Swap Api Python HTTP Error 405: Method Not Allowed

I'm using urllib.request.
I'm trying to use the BingX Swap Api with Python and I'm getting this error:
I'm trying to use the BingX Swap Api with Python and I'm getting this error:
I'm trying to use the BingX Swap Api with Python and I'm getting this error:
I'm trying to use the BingX Swap Api with Python and I'm getting this error:
Traceback (most recent call last):
File "Desktop\MyBot\MyApi.py", line 118, in <module>
main()
File "Desktop\MyBot\MyApi.py", line 103, in main
print("getLatestPrice:", getLatestPrice())
^^^^^^^^^^^^^^^^
File "Desktop\MyBot\MyApi.py", line 70, in getLatestPrice
return post(url, paramsStr)
^^^^^^^^^^^^^^^^^^^^
File "Desktop\MyBot\MyApi.py", line 19, in post
return urllib.request.urlopen(req).read()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 216, in urlopen
return opener.open(url, data, timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 525, in open
response = meth(req, response)
^^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 634, in http_response
response = self.parent.error(
^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 563, in error
return self._call_chain(*args)
^^^^^^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 496, in _call_chain
result = func(*args)
^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 643, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 405: Method Not Allowed
I don't know what I'm doing wrong, I think that my code is ok.
This is My Code:
import urllib.request
import json
import base64
import hmac
import time
APIURL = "https://api-swap-rest.bingbon.pro"
APIKEY = "xxxxxxxxxxxxx"
SECRETKEY = "xxxxxxxxxxxxx"
def genSignature(path, method, paramsMap):
sortedKeys = sorted(paramsMap)
paramsStr = "&".join(["%s=%s" % (x, paramsMap[x]) for x in sortedKeys])
paramsStr = method + path + paramsStr
return hmac.new(SECRETKEY.encode("utf-8"), paramsStr.encode("utf-8"), digestmod="sha256").digest()
def post(url, body):
req = urllib.request.Request(url, data=body.encode("utf-8"), headers={'User-Agent': 'Mozilla/5.0'})
return urllib.request.urlopen(req).read()
def getLatestPrice():
paramsMap = {
"symbol": "BTC-USDT",
}
sortedKeys = sorted(paramsMap)
paramsStr = "&".join(["%s=%s" % (x, paramsMap[x]) for x in sortedKeys])
paramsStr += "&sign=" + urllib.parse.quote(base64.b64encode(genSignature("/api/v1/market/getLatestPrice", "GET", paramsMap)))
url = "%s/api/v1/market/getLatestPrice" % APIURL
return post(url, paramsStr)
def main():
print("getLatestPrice:", getLatestPrice())
if __name__ == "__main__":
main()

Raise HTTPError(req.full_url, code, msg, hdrs, fp) urllib.error.HTTPError: HTTP Error 400: Bad Request

So i was trying to follow this tutorial https://www.youtube.com/watch?v=Lg4T9iJkwhE&t=155s to achieve image detection with YOLO.
and when trying to run this code to auto download images of cell phones but it didn't work
import os
import urllib.request as ulib
from bs4 import BeautifulSoup as Soup
import json
url_a = 'https://www.google.com/search?ei=1m7NWePfFYaGmQG51q7IBg&hl=en&q={}'
url_b = '\&tbm=isch&ved=0ahUKEwjjovnD7sjWAhUGQyYKHTmrC2kQuT0I7gEoAQ&start={}'
url_c = '\&yv=2&vet=10ahUKEwjjovnD7sjWAhUGQyYKHTmrC2kQuT0I7gEoAQ.1m7NWePfFYaGmQG51q7IBg'
url_d = '\.i&ijn=1&asearch=ichunk&async=_id:rg_s,_pms:s'
url_base = ''.join((url_a, url_b, url_c, url_d))
headers = {'User-Agent': 'Chrome/41.0.2228.0 Safari/537.36'}
def get_links(search_name):
search_name = search_name.replace(' ', '+')
url = url_base.format(search_name, 0)
request = ulib.Request(url, None, headers)
json_string = ulib.urlopen(request).read()
page = json.loads(json_string)
new_soup = Soup(page[1][1], 'lxml')
images = new_soup.find_all('img')
links = [image['src'] for image in images]
return links
def save_images(links, search_name):
directory = search_name.replace(' ', '_')
if not os.path.isdir(directory):
os.mkdir(directory)
for i, link in enumerate(links):
savepath = os.path.join(directory, '{:06}.png'.format(i))
ulib.urlretrieve(link, savepath)
if __name__ == '__main__':
search_name = 'cell phones'
links = get_links(search_name)
save_images(links, search_name)
I got bunch of errors like this :
C:\Python36\python.exe "C:/dark/darkflow-master/new_model_data/part5 - get_images.py"
Traceback (most recent call last):
File "C:/dark/darkflow-master/new_model_data/part5 - get_images.py", line 39, in <module>
links = get_links(search_name)
File "C:/dark/darkflow-master/new_model_data/part5 - get_images.py", line 19, in get_links
json_string = ulib.urlopen(request).read()
File "C:\Python36\lib\urllib\request.py", line 223, in urlopen
return opener.open(url, data, timeout)
File "C:\Python36\lib\urllib\request.py", line 532, in open
response = meth(req, response)
File "C:\Python36\lib\urllib\request.py", line 642, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Python36\lib\urllib\request.py", line 570, in error
return self._call_chain(*args)
File "C:\Python36\lib\urllib\request.py", line 504, in _call_chain
result = func(*args)
File "C:\Python36\lib\urllib\request.py", line 650, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 400: Bad Request
Process finished with exit code 1
Someone please help me fix this mess,thanks.

Using Python Script to post data to web server

I am using Python 2.7.3 and I am trying to post data to my local web server. The data I am posting is temperature readings from my raspberry pi. I know the url is right because if I use the postman chrome plugin the data is successfully posted and I get a return message. In postman I can only use form-data though and NOT x-www-form-urlencoded which is how my python script has the content type setup. Can I change it to form-data?
Python Code:
import os
import glob
import time
import threading
import urllib
import urllib2
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
def read_temp_raw():
f = open(device_file, 'r')
lines = f.readlines()
f.close()
return lines
def read_temp():
lines = read_temp_raw()
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = read_temp_raw()
equals_pos = lines[1].find('t=')
if equals_pos != -1:
temp_string = lines[1][equals_pos+2:]
temp_c = float(temp_string) / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
temperature = {'tempf':temp_f, 'tempc':temp_c}
return temperature
def post():
threading.Timer(1800.0, post).start()
temperature = read_temp()
data = temperature
data['room'] = 'Server Room'
print(data)
data=urllib.urlencode(data)
path='http://client.pathtophppage' #the url you want to POST to
req=urllib2.Request(path, data)
req.add_header("Content-type", "application/x-www-form-urlencoded")
page=urllib2.urlopen(req).read()
post()
And the Error:
pi#raspberrypi ~/Documents $ python Temperature.py
{'tempc': 22.0, 'tempf': 71.6, 'room': 'Server Room'}
Traceback (most recent call last):
File "Temperature.py", line 49, in <module>
post()
File "Temperature.py", line 45, in post
page=urllib2.urlopen(req).read()
File "/usr/lib/python2.7/urllib2.py", line 127, in urlopen
return _opener.open(url, data, timeout)
File "/usr/lib/python2.7/urllib2.py", line 407, in open
response = meth(req, response)
File "/usr/lib/python2.7/urllib2.py", line 520, in http_response
'http', request, response, code, msg, hdrs)
File "/usr/lib/python2.7/urllib2.py", line 439, in error
result = self._call_chain(*args)
File "/usr/lib/python2.7/urllib2.py", line 379, in _call_chain
result = func(*args)
File "/usr/lib/python2.7/urllib2.py", line 626, in http_error_302
return self.parent.open(new, timeout=req.timeout)
File "/usr/lib/python2.7/urllib2.py", line 407, in open
response = meth(req, response)
File "/usr/lib/python2.7/urllib2.py", line 520, in http_response
'http', request, response, code, msg, hdrs)
File "/usr/lib/python2.7/urllib2.py", line 445, in error
return self._call_chain(*args)
File "/usr/lib/python2.7/urllib2.py", line 379, in _call_chain
result = func(*args)
File "/usr/lib/python2.7/urllib2.py", line 528, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
urllib2.HTTPError: HTTP Error 500: Internal Server Error
save your time, use this requests lib for httpRequests,
simple app
import requests
url = 'http://url.com'
query = {'field': value}
res = requests.post(url, data=query)
print(res.text)

Finding File size on a Website using python3

This is My Code >>
When I use this Code , I Am getting the correct output
url = "http://songs.djmazadownload.com/music/indian_movies/Creature%20%282014%29/01%20-%20Creature%20-%20Sawan%20Aaya%20Hai%20%5BDJMaza.Info%5D.mp3"
site = urllib.request.urlopen(url)
print ( ( round((site.length / 1024),2) / 1024) , "Mb")
But , When I use this code
url = "http://songs.djmazadownload.com/music/indian_movies/Creature (2014)/01 - Creature - Sawan Aaya Hai [DJMaza.Info].mp3"
site = urllib.request.urlopen(url)
print ( ( round((site.length / 1024),2) / 1024) , "Mb")
I am Getting some error .
Traceback (most recent call last):
File "C:\Python_Mass_downloader\New folder\download.py", line 35, in <module>
site = urllib.request.urlopen(url)
File "C:\Python34\lib\urllib\request.py", line 153, in urlopen
return opener.open(url, data, timeout)
File "C:\Python34\lib\urllib\request.py", line 461, in open
response = meth(req, response)
File "C:\Python34\lib\urllib\request.py", line 571, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Python34\lib\urllib\request.py", line 499, in error
return self._call_chain(*args)
File "C:\Python34\lib\urllib\request.py", line 433, in _call_chain
result = func(*args)
File "C:\Python34\lib\urllib\request.py", line 579, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 400: Bad Request
My Full Code is >>
def audios(link):
print("Enter the Minimum size of file to be downloaded\n")
size1 = input('>\t')
url = urllib.request.urlopen(link)
content = url.read()
soup = BeautifulSoup(content)
links = [a['href'] for a in soup.find_all('a',href=re.compile('http.*\.(mp3|wav|ogg|wma|flac)'))]
print (str(len(links)) + " Audios Found ")
print("\n".join(links))
#For Downloading
dest = "C:\\Downloads\\" # or '~/Downloads/' on linux
for i in range(len(links)):
a_link = links[i]
#a_link2 = urllib.request.urlopen(a_link)
site = urllib.request.urlopen(a_link)
size2 = int((site.length / 1024) / 1024)
if size 1 >= size2:
obj = SmartDL(a_link, dest)
obj.start()
path = obj.get_dest()
**So I need to Find the size of the file before downloading . But the links i got from the page is in this format >> http://songs.djmazadownload.com/music/indian_movies/Creature (2014)/01 - Creature - Sawan Aaya Hai [DJMaza.Info].mp3
How to get the links in other format ?
http://songs.djmazadownload.com/music/indian_movies/Creature%20%282014%29/01%20-%20Creature%20-%20Sawan%20Aaya%20Hai%20%5BDJMaza.Info%5D.mp3
**
use
from urllib.parse import quote_plus
....
url = quote_plus(url)
site = urllib.request.urlopen(url)
print ( ( round((site.length / 1024),2) / 1024) , "Mb")
...
before opening and tell e what you get.
you also have syntax error here:
if size 1 >= size2:
#should be:
if size1 >= size2:

404 being returned by urllib2.urlopen(req) even though HTTP 200 being sent

I have the below python script that is making a successful rest web service request but then throwing a 404 after the server sends a HTTP 200 back. I can see the rest web service web app log returning the below JSON response successfully. Any ideas on how to further troubleshoot this or what could be causing the below error?
Last Python line executed:
result = urllib2.urlopen(req)
JSON response from Rest API Endpoint:
response{"status":"SUCCESS","reason":null,"location":null,"details":null,"errorDetails":{}}
Python Script:
#!/home/user/activepython-2.7.2.5_x86_64/bin/python
import sys
import os
import logging
import subprocess
import tempfile
import json
import urllib2
# define SVN
svn_repo = sys.argv[1]
svn_txn = sys.argv[2]
svn_opt = '-t'
# handle temp file
tmp_file = tempfile.NamedTemporaryFile(prefix="app_",
suffix=".tmp", dir="/tmp", delete=False)
delete_file = True
rest_url = 'https://host/rest/api/endpoint'
# setup logging
log_level = logging.DEBUG
logger = logging.getLogger("myapp")
logger.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(log_level)
logger.addHandler(handler)
def get_svn_changes():
cmd = "/home/user/bin/svnlook changed --copy-info %s %s %s" % (svn_opt, svn_txn, svn_repo)
output, return_code = command_output(cmd)
return output
def get_author():
cmd = "/home/csvn/bin/svnlook author %s %s %s" % (svn_opt, svn_txn, svn_repo)
author, return_code = command_output(cmd)
return author.strip()
def call_webservice():
req = urllib2.Request(rest_url)
req.add_header('Accept', 'arpplication/json')
req.add_header('Content-Type', 'application/json')
logger.debug("file=" + tmp_file.name)
data = json.dumps({"name" : "file", "value" : tmp_file.name})
logger.debug("data=" + data)
req.add_data(data)
logger.debug("request")
result = urllib2.urlopen(req)
logger.debug("result")
json_result = json.load(result)
logger.debug("json_result")
result_data = json.loads(json_result['response'])
logger.debug("result_data")
return result_data
if __name__ == "__main__":
exit_code = 0;
out_message = ''
author = get_author()
try:
tmp_file.write("author=%s\n" % author)
output = get_svn_changes()
tmp_file.write(output)
tmp_file.close()
output = call_webservice()
if (output['status'] == 'ERROR'):
out_message = output['reason']
exit_code = 1
except Exception, ex:
out_message = str(ex)
exit_code = 1
finally:
if (exit_code == 1):
sys.stderr.write("Error: %s" % out_message)
if delete_file:
os.remove(tmp_file.name)
sys.exit(exit_code)
Traceback Exception:
//startlogger output
file=/tmp/app_rrOgN0.tmp
data={"name": "file", "value": "/tmp/app_rrOgN0.tmp"}
request
//stop logger output
Traceback (most recent call last):
File "/home/csvn/data/repositories/repo/hooks/pre-commit", line 85, in <module>
output = call_webservice()
File "/home/csvn/data/repositories/repo/hooks/pre-commit", line 59, in call_webservice
result = urllib2.urlopen(req)
File "/home/activepython-2.7.2.5_x86_64/lib/python2.7/urllib2.py", line 126, in urlopen
return _opener.open(url, data, timeout)
File "/home/activepython-2.7.2.5_x86_64/lib/python2.7/urllib2.py", line 400, in open
response = meth(req, response)
File "/home/activepython-2.7.2.5_x86_64/lib/python2.7/urllib2.py", line 513, in http_response
'http', request, response, code, msg, hdrs)
File "/home/activepython-2.7.2.5_x86_64/lib/python2.7/urllib2.py", line 438, in error
return self._call_chain(*args)
File "/home/activepython-2.7.2.5_x86_64/lib/python2.7/urllib2.py", line 372, in _call_chain
result = func(*args)
File "/home/activepython-2.7.2.5_x86_64/lib/python2.7/urllib2.py", line 521, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
HTTPError: HTTP Error 404: Not Found
Error: HTTP Error 404: Not Found
You misspelled the Accept header:
req.add_header('Accept', 'arpplication/json')
Correct the spelling of application:
req.add_header('Accept', 'application/json')

Categories

Resources