Finding File size on a Website using python3 - python

This is My Code >>
When I use this Code , I Am getting the correct output
url = "http://songs.djmazadownload.com/music/indian_movies/Creature%20%282014%29/01%20-%20Creature%20-%20Sawan%20Aaya%20Hai%20%5BDJMaza.Info%5D.mp3"
site = urllib.request.urlopen(url)
print ( ( round((site.length / 1024),2) / 1024) , "Mb")
But , When I use this code
url = "http://songs.djmazadownload.com/music/indian_movies/Creature (2014)/01 - Creature - Sawan Aaya Hai [DJMaza.Info].mp3"
site = urllib.request.urlopen(url)
print ( ( round((site.length / 1024),2) / 1024) , "Mb")
I am Getting some error .
Traceback (most recent call last):
File "C:\Python_Mass_downloader\New folder\download.py", line 35, in <module>
site = urllib.request.urlopen(url)
File "C:\Python34\lib\urllib\request.py", line 153, in urlopen
return opener.open(url, data, timeout)
File "C:\Python34\lib\urllib\request.py", line 461, in open
response = meth(req, response)
File "C:\Python34\lib\urllib\request.py", line 571, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Python34\lib\urllib\request.py", line 499, in error
return self._call_chain(*args)
File "C:\Python34\lib\urllib\request.py", line 433, in _call_chain
result = func(*args)
File "C:\Python34\lib\urllib\request.py", line 579, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 400: Bad Request
My Full Code is >>
def audios(link):
print("Enter the Minimum size of file to be downloaded\n")
size1 = input('>\t')
url = urllib.request.urlopen(link)
content = url.read()
soup = BeautifulSoup(content)
links = [a['href'] for a in soup.find_all('a',href=re.compile('http.*\.(mp3|wav|ogg|wma|flac)'))]
print (str(len(links)) + " Audios Found ")
print("\n".join(links))
#For Downloading
dest = "C:\\Downloads\\" # or '~/Downloads/' on linux
for i in range(len(links)):
a_link = links[i]
#a_link2 = urllib.request.urlopen(a_link)
site = urllib.request.urlopen(a_link)
size2 = int((site.length / 1024) / 1024)
if size 1 >= size2:
obj = SmartDL(a_link, dest)
obj.start()
path = obj.get_dest()
**So I need to Find the size of the file before downloading . But the links i got from the page is in this format >> http://songs.djmazadownload.com/music/indian_movies/Creature (2014)/01 - Creature - Sawan Aaya Hai [DJMaza.Info].mp3
How to get the links in other format ?
http://songs.djmazadownload.com/music/indian_movies/Creature%20%282014%29/01%20-%20Creature%20-%20Sawan%20Aaya%20Hai%20%5BDJMaza.Info%5D.mp3
**

use
from urllib.parse import quote_plus
....
url = quote_plus(url)
site = urllib.request.urlopen(url)
print ( ( round((site.length / 1024),2) / 1024) , "Mb")
...
before opening and tell e what you get.
you also have syntax error here:
if size 1 >= size2:
#should be:
if size1 >= size2:

Related

Issue with BingX Swap Api Python HTTP Error 405: Method Not Allowed

I'm using urllib.request.
I'm trying to use the BingX Swap Api with Python and I'm getting this error:
I'm trying to use the BingX Swap Api with Python and I'm getting this error:
I'm trying to use the BingX Swap Api with Python and I'm getting this error:
I'm trying to use the BingX Swap Api with Python and I'm getting this error:
Traceback (most recent call last):
File "Desktop\MyBot\MyApi.py", line 118, in <module>
main()
File "Desktop\MyBot\MyApi.py", line 103, in main
print("getLatestPrice:", getLatestPrice())
^^^^^^^^^^^^^^^^
File "Desktop\MyBot\MyApi.py", line 70, in getLatestPrice
return post(url, paramsStr)
^^^^^^^^^^^^^^^^^^^^
File "Desktop\MyBot\MyApi.py", line 19, in post
return urllib.request.urlopen(req).read()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 216, in urlopen
return opener.open(url, data, timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 525, in open
response = meth(req, response)
^^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 634, in http_response
response = self.parent.error(
^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 563, in error
return self._call_chain(*args)
^^^^^^^^^^^^^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 496, in _call_chain
result = func(*args)
^^^^^^^^^^^
File "Python\Python311\Lib\urllib\request.py", line 643, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 405: Method Not Allowed
I don't know what I'm doing wrong, I think that my code is ok.
This is My Code:
import urllib.request
import json
import base64
import hmac
import time
APIURL = "https://api-swap-rest.bingbon.pro"
APIKEY = "xxxxxxxxxxxxx"
SECRETKEY = "xxxxxxxxxxxxx"
def genSignature(path, method, paramsMap):
sortedKeys = sorted(paramsMap)
paramsStr = "&".join(["%s=%s" % (x, paramsMap[x]) for x in sortedKeys])
paramsStr = method + path + paramsStr
return hmac.new(SECRETKEY.encode("utf-8"), paramsStr.encode("utf-8"), digestmod="sha256").digest()
def post(url, body):
req = urllib.request.Request(url, data=body.encode("utf-8"), headers={'User-Agent': 'Mozilla/5.0'})
return urllib.request.urlopen(req).read()
def getLatestPrice():
paramsMap = {
"symbol": "BTC-USDT",
}
sortedKeys = sorted(paramsMap)
paramsStr = "&".join(["%s=%s" % (x, paramsMap[x]) for x in sortedKeys])
paramsStr += "&sign=" + urllib.parse.quote(base64.b64encode(genSignature("/api/v1/market/getLatestPrice", "GET", paramsMap)))
url = "%s/api/v1/market/getLatestPrice" % APIURL
return post(url, paramsStr)
def main():
print("getLatestPrice:", getLatestPrice())
if __name__ == "__main__":
main()

Raise HTTPError(req.full_url, code, msg, hdrs, fp) urllib.error.HTTPError: HTTP Error 400: Bad Request

So i was trying to follow this tutorial https://www.youtube.com/watch?v=Lg4T9iJkwhE&t=155s to achieve image detection with YOLO.
and when trying to run this code to auto download images of cell phones but it didn't work
import os
import urllib.request as ulib
from bs4 import BeautifulSoup as Soup
import json
url_a = 'https://www.google.com/search?ei=1m7NWePfFYaGmQG51q7IBg&hl=en&q={}'
url_b = '\&tbm=isch&ved=0ahUKEwjjovnD7sjWAhUGQyYKHTmrC2kQuT0I7gEoAQ&start={}'
url_c = '\&yv=2&vet=10ahUKEwjjovnD7sjWAhUGQyYKHTmrC2kQuT0I7gEoAQ.1m7NWePfFYaGmQG51q7IBg'
url_d = '\.i&ijn=1&asearch=ichunk&async=_id:rg_s,_pms:s'
url_base = ''.join((url_a, url_b, url_c, url_d))
headers = {'User-Agent': 'Chrome/41.0.2228.0 Safari/537.36'}
def get_links(search_name):
search_name = search_name.replace(' ', '+')
url = url_base.format(search_name, 0)
request = ulib.Request(url, None, headers)
json_string = ulib.urlopen(request).read()
page = json.loads(json_string)
new_soup = Soup(page[1][1], 'lxml')
images = new_soup.find_all('img')
links = [image['src'] for image in images]
return links
def save_images(links, search_name):
directory = search_name.replace(' ', '_')
if not os.path.isdir(directory):
os.mkdir(directory)
for i, link in enumerate(links):
savepath = os.path.join(directory, '{:06}.png'.format(i))
ulib.urlretrieve(link, savepath)
if __name__ == '__main__':
search_name = 'cell phones'
links = get_links(search_name)
save_images(links, search_name)
I got bunch of errors like this :
C:\Python36\python.exe "C:/dark/darkflow-master/new_model_data/part5 - get_images.py"
Traceback (most recent call last):
File "C:/dark/darkflow-master/new_model_data/part5 - get_images.py", line 39, in <module>
links = get_links(search_name)
File "C:/dark/darkflow-master/new_model_data/part5 - get_images.py", line 19, in get_links
json_string = ulib.urlopen(request).read()
File "C:\Python36\lib\urllib\request.py", line 223, in urlopen
return opener.open(url, data, timeout)
File "C:\Python36\lib\urllib\request.py", line 532, in open
response = meth(req, response)
File "C:\Python36\lib\urllib\request.py", line 642, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Python36\lib\urllib\request.py", line 570, in error
return self._call_chain(*args)
File "C:\Python36\lib\urllib\request.py", line 504, in _call_chain
result = func(*args)
File "C:\Python36\lib\urllib\request.py", line 650, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 400: Bad Request
Process finished with exit code 1
Someone please help me fix this mess,thanks.

urllib not working with api request

im trying to write this code where I request this information from openweathermap.org thourgh an api and try to print the temperature and location at the current time.
Most of the code is kind off a mix of the tips I could find on the internet.
Now I get this error and im stuck. Can anyone help me on the right path again?
Heres my code:
import urllib.request, urllib.parse, urllib.error
import json
while True:
zipcode = input('Enter zipcode: ')
if len(zipcode) < 1: break
url = 'http://api.openweathermap.org/data/2.5/weather?
zip='+zipcode+',nl&appid=db071ece9a338a36e9d7a660ec4f0e37?'
print('Retrieving', url)
uh = urllib.request.urlopen(url)
data = uh.read().decode()
print('Retrieved', len(data), 'characters')
try:
js = json.loads(data)
except:
js = None
temp = js["main"]["temp"]
loc = js["name"]
print("temperatuur:", temp)
print("locatie:", loc)
So the url is this: http://api.openweathermap.org/data/2.5/weather?zip=3032,nl&appid=db071ece9a338a36e9d7a660ec4f0e37
The error im getting is:
Enter zipcode: 3343
Retrieving http://api.openweathermap.org/data/2.5/weather?zip=3343,nl&appid=db071ece9a338a36e9d7a660ec4f0e37?
Traceback (most recent call last):
File "weatherapi2.py", line 12, in
uh = urllib.request.urlopen(url)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 223, in urlopen
return opener.open(url, data, timeout)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 532, in open
response = meth(req, response)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 642, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 570, in error
return self._call_chain(*args)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 504, in _call_chain
result = func(*args)
File "C:\Users\ErfanNariman\AppData\Local\Programs\Python\Python36\lib\urllib\request.py", line 650, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 401: Unauthorized
After some troubleshooting i found your issue. You have an extra '?' at the end of your url in your python.
Remove that and your request works fine. Tried it with this code and worked-
import urllib.request, urllib.parse, urllib.error
import json
while True:
zipcode = input('Enter zipcode: ')
if len(zipcode) < 1: break
url = 'http://api.openweathermap.org/data/2.5/weather?zip='+zipcode+',nl&appid=db071ece9a338a36e9d7a660ec4f0e37'
print('Retrieving', url)
uh = urllib.request.urlopen(url)
data = uh.read().decode()
print('Retrieved', len(data), 'characters')
try:
js = json.loads(data)
except:
js = None
temp = js["main"]["temp"]
loc = js["name"]
print("temperatuur:", temp)
print("locatie:", loc)

Using Python Script to post data to web server

I am using Python 2.7.3 and I am trying to post data to my local web server. The data I am posting is temperature readings from my raspberry pi. I know the url is right because if I use the postman chrome plugin the data is successfully posted and I get a return message. In postman I can only use form-data though and NOT x-www-form-urlencoded which is how my python script has the content type setup. Can I change it to form-data?
Python Code:
import os
import glob
import time
import threading
import urllib
import urllib2
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
def read_temp_raw():
f = open(device_file, 'r')
lines = f.readlines()
f.close()
return lines
def read_temp():
lines = read_temp_raw()
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = read_temp_raw()
equals_pos = lines[1].find('t=')
if equals_pos != -1:
temp_string = lines[1][equals_pos+2:]
temp_c = float(temp_string) / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
temperature = {'tempf':temp_f, 'tempc':temp_c}
return temperature
def post():
threading.Timer(1800.0, post).start()
temperature = read_temp()
data = temperature
data['room'] = 'Server Room'
print(data)
data=urllib.urlencode(data)
path='http://client.pathtophppage' #the url you want to POST to
req=urllib2.Request(path, data)
req.add_header("Content-type", "application/x-www-form-urlencoded")
page=urllib2.urlopen(req).read()
post()
And the Error:
pi#raspberrypi ~/Documents $ python Temperature.py
{'tempc': 22.0, 'tempf': 71.6, 'room': 'Server Room'}
Traceback (most recent call last):
File "Temperature.py", line 49, in <module>
post()
File "Temperature.py", line 45, in post
page=urllib2.urlopen(req).read()
File "/usr/lib/python2.7/urllib2.py", line 127, in urlopen
return _opener.open(url, data, timeout)
File "/usr/lib/python2.7/urllib2.py", line 407, in open
response = meth(req, response)
File "/usr/lib/python2.7/urllib2.py", line 520, in http_response
'http', request, response, code, msg, hdrs)
File "/usr/lib/python2.7/urllib2.py", line 439, in error
result = self._call_chain(*args)
File "/usr/lib/python2.7/urllib2.py", line 379, in _call_chain
result = func(*args)
File "/usr/lib/python2.7/urllib2.py", line 626, in http_error_302
return self.parent.open(new, timeout=req.timeout)
File "/usr/lib/python2.7/urllib2.py", line 407, in open
response = meth(req, response)
File "/usr/lib/python2.7/urllib2.py", line 520, in http_response
'http', request, response, code, msg, hdrs)
File "/usr/lib/python2.7/urllib2.py", line 445, in error
return self._call_chain(*args)
File "/usr/lib/python2.7/urllib2.py", line 379, in _call_chain
result = func(*args)
File "/usr/lib/python2.7/urllib2.py", line 528, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
urllib2.HTTPError: HTTP Error 500: Internal Server Error
save your time, use this requests lib for httpRequests,
simple app
import requests
url = 'http://url.com'
query = {'field': value}
res = requests.post(url, data=query)
print(res.text)

urllib2.HTTPError: HTTP Error 500: Internal Server Error

if data.find('!exploits') != -1:
nick = data.split('!')[ 0 ].replace(':','')
results = api.exploitdb.search(arg)
sck.send('PRIVMSG ' + chan + " :" + ' Results found: %s' % results['total'] + '\r\n')
for exploit in results['matches'][:5]:
sck.send('PRIVMSG ' + chan + "" + '%s:' % (exploit['description'] + '\r\n'))
This little script searches exploit-db for known exploits, but the script seems not to work when I try to use it within IRC, but its fine when I run it alone,
by alone I mean just this:
from shodan import WebAPI
SHODAN_API_KEY = "MY API KEY"
api = WebAPI(SHODAN_API_KEY)
results = api.exploitdb.search('PHP')
print 'Results found: %s' % results['total']
for exploit in results['matches'][:5]:
print '%s:' % (exploit['description'])
that one works perfect, but I want to use it with IRC
but I get this error:
Traceback (most recent call last):
File "C:\Users\Rabia\Documents\scripts\client.py", line 232, in <module>
results = api.exploitdb.search(arg)
File "C:\Python26\lib\site-packages\shodan\api.py", line 63, in search
return self.parent._request('exploitdb/search', dict(q=query, **kwargs))
File "C:\Python26\lib\site-packages\shodan\api.py", line 116, in _request
data = urlopen(self.base_url + function + '?' + urlencode(params)).read()
File "C:\Python26\lib\urllib2.py", line 126, in urlopen
return _opener.open(url, data, timeout)
File "C:\Python26\lib\urllib2.py", line 397, in open
response = meth(req, response)
File "C:\Python26\lib\urllib2.py", line 510, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Python26\lib\urllib2.py", line 435, in error
return self._call_chain(*args)
File "C:\Python26\lib\urllib2.py", line 369, in _call_chain
result = func(*args)
File "C:\Python26\lib\urllib2.py", line 518, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
urllib2.HTTPError: HTTP Error 500: Internal Server Error
Your code should be ok.
That's should be the server problem .
Internal Error--the server could not fulfill the request because of an unexpected condition
You can check the return status of what its meaning.

Categories

Resources