I'm trying to connect to one webpage. This is my code:
class AddNewSite():
def __init__(self, url, user=None, email=None, category=None,
subcategory=None, date_end=None):
self.url = url
self.user = user
self.email = email
req = Request(self.url, headers={'user-agent': 'Mozilla/5.0
(Macintosh; Intel Mac OS X 10_9_3) \
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 \
Safari/537.36'})
self.page = urlopen(req).read()
self.soup = BeautifulSoup(self.page)
self.meta = self.soup.find_all('meta')
In my views.py (It is a Django project) I have:
try:
page = AddNewSite(siteurl, user)
except Exception as e:
print(e)
messages.add_message(request, messages.ERROR,
'ERROR')
return render(request, 'mainapp/subcategory.html', context)
Everything works fine on localhost. It connects to site without eny errors. On production server I get an exception. Below I paste error from logs.
HTTPConnectionPool(host='exeo.pl', port=80): Max retries exceeded with url:
/ (Caused by
NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection
object at 0x80897ff98>: Failed to establish a new connection: [Errno 60]
Operation timed out',))
HTTPConnectionPool(host='exeo.pl', port=80): Max retries exceeded with url:
/ (Caused by
NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection
object at 0x80940aeb8>: Failed to establish a new connection: [Errno 60]
Operation timed out',))
HTTPConnectionPool(host='exeo.pl', port=80): Max retries exceeded with url:
/ (Caused by
NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection
object at 0x80949fdd8>: Failed to establish a new connection: [Errno 60]
Operation timed out',))
<urllib.request.Request object at 0x808b92048>
Why problem exists only on production server?
Related
I'm trying to mock an http request that uses an http proxy
import requests
import responses
import logging
import http.client as http_client
http_client.HTTPConnection.debuglevel = 1
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
proxies = {
'http': 'http://proxy_host.com/'
}
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
responses.add(responses.GET, 'http://proxy_host.com/', body = "Proxy request succeeded", status=200)
responses.add(responses.GET, 'http://actual_host.com/', body = "Actual request succeeded", status=200)
response = requests.get('http://actual_host.com/', proxies = proxies, headers = headers)
I get this error message:
requests.exceptions.ProxyError: HTTPConnectionPool(host='proxy_host.com', port=80): Max retries exceeded with url: http://actual_host.com/ (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError('<urllib3.connection.HTTPConnection object at 0x10ab62820>: Failed to establish a new connection: [Errno 8] nodename nor servname provided, or not known')))
I'm not sure if there is anything else that I'm supposed to add to my code to make this work. The documentation for the responses library doesn't make any mention of proxying, so I'm not sure if this is even possible
I'm trying to use requests to connect my python client to HP ALM so I can export Defects and Requirements.
My problem is that when I try to connect to ALM I get this error.
requests.exceptions.SSLError: HTTPSConnectionPool(host='hpalm.xxx.com', port=443): Max retries exceeded with url: /qcbin/authentication-point/authenticate (Caused by SSLError(SSLError("bad handshake: Error([('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')])")))
my function is the following
def connection():
#HardCoded login to be deleted when tested and able to run with login verification
user = userPass.user()
pwd = userPass.passwd()
#Not sure if needed, need to test
encPwd = base64.standard_b64encode(pwd.encode('utf-8'))
print(encPwd)
userToEncode=user+':'+pwd
print("user2Encode : {0}", userToEncode)
#requests lib to ALM connection
headers = {
'cache-control': "no-cache",
'Accept': "application/json",
'Content-Type': "application/json"
}
authurl = almURL + "/authentication-point/authenticate"
res = requests.post(authurl, auth=HTTPBasicAuth(user,encPwd),headers = headers)
so far I've tried to follow this example:https://github.com/vkosuri/py-hpalm/blob/master/hpalm/hpalm.py
but when I do the verify=False I get :
InsecureRequestWarning: Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings
InsecureRequestWarning)
and the following print of the log:
{'Date': 'Fri, 05 Mar 2021 17:36:51 GMT', 'X-Frame-Options': 'SAMEORIGIN', 'Content-Type': 'text/html; charset=ISO-8859-1', 'Cache-Control': 'must-revalidate,no-cache,no-store', 'Content-Length': '5937', 'Connection': 'close'}
any ideas what I'm doing wrong?
Thank you
There are alot of answers here on stack overflow about connecting to QC.
I would recommended you go through,
https://github.com/macroking/ALM-Integration/blob/master/ALM_Integration_Util.py
HP ALM results attachment and status update using python
These will help you understand the login process
I'm trying to fetch a request from urllib3 and my code works. However, few websites like https://hackershala.com and etc which uses different TLS version are not being able to be fetched.
I tried changing useragent, but it didn't work for obvious reasons.
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='hackershala.com', port=443): Max retries exceeded with url: / (Caused by SSLError(SSLError(1, '[SSL: TLSV1_ALERT_PROTOCOL_VERSION] tlsv1 alert protocol version (_ssl.c:852)'),))
My code is
import urllib3
http = urllib3.PoolManager()
url = input("Website URL: ")
r = http.request("GET", url, headers={
'User-Agent': 'Mozilla/5.0'
})
rp = r.status
print(rp)
You should probably add this line to configure your urllib3
import requests
import urllib3
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = 'ALL:#SECLEVEL=1'
I am trying to fill google form using python script. I am getting error because of ssl module.
Here is code that I wrote
def send_attendance(url, data):
"""It takes google form url which is to be submitted and also data which is a list of data to be submitted in the form iteratively."""
user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)'
header={'User-Agent' : user_agent}
for d in data:
try:
requests.get(url,data=d,headers=header),
print("Form Submitted.")
time.sleep(5)
except Exception as err:
print("Error occured {}".format(err))
Error occured HTTPSConnectionPool(host='docs.google.com', port=443): Max retries exceeded with url: /forms/d/1JxbxYl7ZnWTEKtYqVMQJOi_6_cZvTYrDbGsPeNNnGSY/formResponse (Caused by SSLError("Can't connect to HTTPS URL because the SSL module is not available."))
I am trying to a web-scraping. Firstly the code was working but later it does not. The code is
import requests
import hashlib
from bs4 import BeautifulSoup
def sha512(x):
m = hashlib.sha512(x.encode())
return m.hexdigest()
session = requests.Session()
session.cookies["user-agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.117 Safari/537.36"
r = session.post("https://ringzer0ctf.com/login", data={"username":"myusername","password":"mypass"})
r = session.get("https://ringzeractf.com/challenges/13")
soup = BeautifulSoup(r.text, 'html.parser')
It gives error like
requests.exceptions.ConnectionError: HTTPSConnectionPool(host='ringzeractf.com', port=443): Max retries exceeded
with url: /challenges/13 (Caused by NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0x04228490>: Failed to establish a new connection: [Errno 11001] getaddrinfo failed'))
Your URL in the GET request is wrong. Change ringzeractf to ringzer0ctf