I have the following code in a Python desktop application that authorizes users before using the AppHarbor API. I am following the steps mentioned in the knowledge base and have the following authentication code:
def OnAuthenticate(self, event):
client_id = "" # My App's client id
client_secret_key = "" # My App's secret key
consumer = oauth2.Consumer(key=client_id, secret=client_secret_key)
request_token_url = "https://appharbor.com/user/authorizations/new?client_id="+client_id+"&redirect_uri=http://localhost:8095"
client = oauth2.Client( consumer )
resp, content = client.request(request_token_url, "GET")
...
However, on sending the request, the response is incorrect, this is the error:
client.request(request_token_url, "GET")
TypeError: must be string or buffer, not None
Is there something that I am missing here?
Edit: Following is the stack trace that is thrown up:
resp, content = client.request(request_token_url, "GET")
File "C:\Python27\Lib\site-packages\oauth2-1.5.211-py2.7.egg\oauth2\__init__.py", line 682, in request
connection_type=connection_type)
File "C:\Python27\lib\site-packages\httplib2-0.7.4-py2.7.egg\httplib2\__init__.py", line 1544, in request
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
File "C:\Python27\lib\site-packages\httplib2-0.7.4-py2.7.egg\httplib2\__init__.py", line 1342, in _request
(response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
File "C:\Python27\Lib\site-packages\oauth2-1.5.211-py2.7.egg\oauth2\__init__.py", line 662, in request
req.sign_request(self.method, self.consumer, self.token)
File "C:\Python27\Lib\site-packages\oauth2-1.5.211-py2.7.egg\oauth2\__init__.py", line 493, in sign_request
self['oauth_body_hash'] = base64.b64encode(sha(self.body).digest())
TypeError: must be string or buffer, not None
Upon debugging into the call, I reached httplib2._request function that issued a request
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
This resulted in the following page with error response = 302 (presented in content object)
<html><head><title>Object moved</title></head><body>
<h2>Object moved to <a href="https://appharbor.com/session/new?returnUrl=%2Fuser%
2Fauthorizations%2Fnew%3Foauth_body_hash%3D2jmj7l5rSw0yVb%252FvlWAYkK%252FYBwk%
253D%26oauth_nonce%3D85804131%26oauth_timestamp%3D1340873274%
26oauth_consumer_key%3D26bacb38-ce5a-4699-9342-8e496c16dc49%26oauth_signature_method%
3DHMAC-SHA1%26oauth_version%3D1.0%26redirect_uri%3Dhttp%253A%252F%252Flocalhost%
253A8095%26client_id%3D26bacb38-ce5a-4699-9342-8e496c16dc49%26oauth_signature%
3DXQtYvWIsvML9ZM6Wfs1Wp%252Fy3No8%253D">here</a>.</h2>
</body></html>
The function next removed the body from the content to call another request with body set to None, resulting in the error that was thrown.
(response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
I'm not familiar with the Python lib, but have you considered whether this is because you need to take the user through the three-legged flow Twitter flow and use the url you mention in your question as the authorize_url? Once you have the code, you retrieve the token by POST'ing to this url: https://appharbor.com/tokens.
You might also want to take a closer look at the desktop OAuth .NET sample to get a better understanding of how this works.
Related
I'm using the below snippet to sign the request and get request tokens for JIRA OAuth process.
import base64
import urlparse
from tlslite.utils import keyfactory
import oauth2 as oauth
consumer_key = 'oauth-sample-consumer'
consumer_secret = 'dont_care'
request_token_url = 'https://localhost:8090/jira/plugins/servlet/oauth/request-token'
access_token_url = 'https://localhost:8090/jira/plugins/servlet/oauth/access-token'
authorize_url = 'https://localhost:8090/jira/plugins/servlet/oauth/authorize'
class SignatureMethod_RSA_SHA1(oauth.SignatureMethod):
name = 'RSA-SHA1'
def signing_base(self, request, consumer, token):
if not hasattr(request, 'normalized_url') or request.normalized_url is None:
raise ValueError("Base URL for request is not set.")
sig = (
oauth.escape(request.method),
oauth.escape(request.normalized_url),
oauth.escape(request.get_normalized_parameters()),
)
key = '%s&' % oauth.escape(consumer.secret)
if token:
key += oauth.escape(token.secret)
raw = '&'.join(sig)
return key, raw
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
with open('../rsa.pem', 'r') as f:
data = f.read()
privateKeyString = data.strip()
privatekey = keyfactory.parsePrivateKey(privateKeyString)
signature = privatekey.hashAndSign(raw)
return base64.b64encode(signature)
if __name__=='__main__':
consumer = oauth.Consumer(consumer_key, consumer_secret)
client = oauth.Client(consumer)
client.set_signature_method(SignatureMethod_RSA_SHA1())
resp, content = client.request(request_token_url, "POST")
if resp['status'] != '200':
raise Exception("Invalid response %s: %s" % (resp['status'], content))
I have added the public key to JIRA consumer application. Now executing the above snippet always gives me this error:
Traceback (most recent call last):
File "views.py", line 80, in <module>
resp, content = client.request(request_token_url, "GET")
File "/usr/local/lib/python2.7/dist-packages/oauth2/__init__.py", line 682, in request
connection_type=connection_type)
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 1570, in request
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 1317, in _request
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 1252, in _conn_request
conn.connect()
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 1044, in connect
raise SSLHandshakeError(e)
httplib2.SSLHandshakeError: [Errno 1] _ssl.c:503: error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed
I actually deleted my public key and again entered it in my consumer app to make sure there are no white spaces.
JIRA doesn't give any option to upload a public key file, so it has to be copied anyhow.
I got it solved using this certifi package
sudo pip install certifi
In code:
client.ca_certs = certifi.where()
I am trying to create an automated test using python for testing a youtube API request and response and all of those happening in random quick timely manner.
What I have been getting is an unstable HTTP response from the server.
I am not using the same object for every connection as I made it in separate methods, but when testing it, I test it all in same method (i.e. call create,edit, and delete consecutively)
Here is the Error that I got.
File "/var/lib/jenkins/shiningpanda/jobs/2a430f4f/virtualenvs/d41d8cd9/local/lib/python2.7/site-packages/oauth2client/client.py", line 490, in new_request
redirections, connection_type)
File "/var/lib/jenkins/shiningpanda/jobs/2a430f4f/virtualenvs/d41d8cd9/local/lib/python2.7/site-packages/httplib2/__init__.py", line 1570, in request
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
File "/var/lib/jenkins/shiningpanda/jobs/2a430f4f/virtualenvs/d41d8cd9/local/lib/python2.7/site-packages/httplib2/__init__.py", line 1317, in _request
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
File "/var/lib/jenkins/shiningpanda/jobs/2a430f4f/virtualenvs/d41d8cd9/local/lib/python2.7/site-packages/httplib2/__init__.py", line 1286, in _conn_request
response = conn.getresponse()
File "/usr/lib/python2.7/httplib.py", line 1018, in getresponse
raise ResponseNotReady()
ResponseNotReady
I was thinking that I should have a time.delay() between each HTTP request?
What do you guys think and suggest me to do on this case, since I am still learning on this matter?
Thank you for all the sugesstion and help ;)
Basic Idea of the Code I am using :
yt_service = gdata.youtube.service.YouTubeService()
yt_service.email = 'exampple#gmail.com'
yt_service.password = 'password'
def GetAndPrintUserUploads(username):
yt_service = gdata.youtube.service.YouTubeService()
uri = 'http://gdata.youtube.com/feeds/api/users/%s/uploads' % username
PrintVideoFeed(yt_service.GetYouTubeVideoFeed(uri))
and for testing it I authenticate the user and print user upload a few time after that consecutively
I recently wrote a Python script that uploads local, newline-delimited JSON files to a BigQuery table. It's very similar to the example provided in the official documentation here. The problem I'm having is that non-ASCII characters in the file I'm trying to upload are making my POST request barf.
Here's the relevant part of the script...
def upload(dataFilePath, loadJob, recipeJSON, logger):
body = '--xxx\n'
body += 'Content-Type: application/json; charset=UTF-8\n\n'
body += loadJob
body += '\n--xxx\n'
body += 'Content-Type: application/octet-stream\n\n'
dataFile = io.open(dataFilePath, 'r', encoding = 'utf-8')
body += dataFile.read()
dataFile.close()
body += '\n--xxx--\n'
credentials = buildCredentials(recipeJSON['keyPath'], recipeJSON['accountEmail'])
http = httplib2.Http()
http = credentials.authorize(http)
service = build('bigquery', 'v2', http=http)
projectId = recipeJSON['projectId']
url = BIGQUERY_URL_BASE + projectId + "/jobs"
headers = {'Content-Type': 'multipart/related; boundary=xxx'}
response, content = http.request(url, method="POST", body=body, headers=headers)
...and here's the stack trace I get when it runs...
Traceback (most recent call last):
File "/usr/local/uploader/upload_data.py", line 179, in <module>
main(sys.argv)
File "/usr/local/uploader/upload_data.py", line 170, in main
if (upload(unprocessedFile, loadJob, recipeJSON, logger)):
File "/usr/local/uploader/upload_data.py", line 100, in upload
response, content = http.request(url, method="POST", body=body, headers=headers)
File "/usr/local/lib/python2.7/site-packages/oauth2client/util.py", line 128, in positional_wrapper
return wrapped(*args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/oauth2client/client.py", line 490, in new_request
redirections, connection_type)
File "/usr/local/lib/python2.7/site-packages/httplib2/__init__.py", line 1570, in request
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
File "/usr/local/lib/python2.7/site-packages/httplib2/__init__.py", line 1317, in _request
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
File "/usr/local/lib/python2.7/site-packages/httplib2/__init__.py", line 1253, in _conn_request
conn.request(method, request_uri, body, headers)
File "/usr/local/lib/python2.7/httplib.py", line 973, in request
self._send_request(method, url, body, headers)
File "/usr/local/lib/python2.7/httplib.py", line 1007, in _send_request
self.endheaders(body)
File "/usr/local/lib/python2.7/httplib.py", line 969, in endheaders
self._send_output(message_body)
File "/usr/local/lib/python2.7/httplib.py", line 833, in _send_output
self.send(message_body)
File "/usr/local/lib/python2.7/httplib.py", line 805, in send
self.sock.sendall(data)
File "/usr/local/lib/python2.7/ssl.py", line 229, in sendall
v = self.send(data[count:])
File "/usr/local/lib/python2.7/ssl.py", line 198, in send
v = self._sslobj.write(data)
UnicodeEncodeError: 'ascii' codec can't encode characters in position 4586-4611: ordinal not in range(128)
I'm using Python 2.7 and the following libraries:
distribute (0.6.36)
google-api-python-client (1.1)
httplib2 (0.8)
oauth2client (1.1)
pyOpenSSL (0.13)
python-gflags (2.0)
wsgiref (0.1.2)
Has anyone else had this problem?
It seems like httplib2's request method takes "body" as a string, which means that it later needs to be encoded before being sent over the wire. I've been searching for a way to override the encoding to UTF-8, but no luck so far.
Thanks in advance!
EDIT:
I was able to resolve this by doing two things:
1.) Reading the contents of my file raw with no decoding. (I could have also just encoded the "body" in my first attempt above...)
2.) Encoding to bytes the url and headers.
The code ended up looking like this:
def upload(dataFilePath, loadJob, recipeJSON, logger):
part_one = '--xxx\n'
part_one += 'Content-Type: application/json; charset=UTF-8\n\n'
part_one += loadJob
part_one += '\n--xxx\n'
part_one += 'Content-Type: application/octet-stream\n\n'
dataFile = io.open(dataFilePath, 'rb')
part_two = dataFile.read()
dataFile.close()
part_three = '\n--xxx--\n'
body = part_one.encode('utf-8')
body += part_two
body += part_three.encode('utf-8')
credentials = buildCredentials(recipeJSON['keyPath'], recipeJSON['accountEmail'])
http = httplib2.Http()
http = credentials.authorize(http)
service = build('bigquery', 'v2', http=http)
projectId = recipeJSON['projectId']
url = BIGQUERY_URL_BASE + projectId + "/jobs"
headers = {'Content-Type'.encode('utf-8'): 'multipart/related; boundary=xxx'.encode('utf-8')}
response, content = http.request(url.encode('utf-8'), method="POST", body=body, headers=headers)
io.open() will open the file as unicode text. Either use plain open(), or use binary mode:
dataFile = io.open(dataFilePath, 'rb')
You are sending the file contents straight out over the network, so you need to send bytes, not unicode, and as you found out, mixing Unicode and bytes leads to painful errors as python tries to automatically encode back to bytes using the ASCII codec when concatenating the two different types. There is no need to decode to Unicode at all here.
I am trying to use the python-rest-client ( http://code.google.com/p/python-rest-client/wiki/Using_Connection ) to perform testing of some RESTful webservices. Since I'm just learning, I've been pointing my tests at the sample services provided at http://www.predic8.com/rest-demo.htm.
I have no problems with creating entries, updating entries, or retrieving entries (POST and GET requests). When I try make a DELETE request, it fails. I can use the Firefox REST Client to perform DELETE requests and they work. I can also make DELETE requests on other services, but I've been driving myself crazy trying to figure out why it doesn't work in this case. I'm using Python 3 with updated Httplib2, but I also tried Python 2.5 so that I could use the python-rest-client with the included version of Httplib2. I see the same problem in either case.
The code is simple, matching the documented use:
from restful_lib import Connection
self.base_url = "http://www.thomas-bayer.com"
self.conn = Connection(self.base_url)
response = self.conn.request_delete('/sqlrest/CUSTOMER/85')
I've looked at the resulting HTTP requests from the browser tool and from my code and I can't see why one works and the other doesn't. This is the trace I receive:
Traceback (most recent call last):
File "/home/fmk/python/rest-client/src/TestExampleService.py", line 68, in test_CRUD
self.Delete()
File "/home/fmk/python/rest-client/src/TestExampleService.py", line 55, in Delete
response = self.conn.request_delete('/sqlrest/CUSTOMER/85')
File "/home/fmk/python/rest-client/src/restful_lib.py", line 64, in request_delete
return self.request(resource, "delete", args, headers=headers)
File "/home/fmk/python/rest-client/src/restful_lib.py", line 138, in request
resp, content = self.h.request("%s://%s%s" % (self.scheme, self.host, '/'.join(request_path)), method.upper(), body=body, headers=headers )
File "/home/fmk/python/rest-client/src/httplib2/__init__.py", line 1175, in request
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
File "/home/fmk/python/rest-client/src/httplib2/__init__.py", line 931, in _request
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
File "/home/fmk/python/rest-client/src/httplib2/__init__.py", line 897, in _conn_request
response = conn.getresponse()
File "/usr/lib/python3.2/http/client.py", line 1046, in getresponse
response.begin()
File "/usr/lib/python3.2/http/client.py", line 346, in begin
version, status, reason = self._read_status()
File "/usr/lib/python3.2/http/client.py", line 316, in _read_status
raise BadStatusLine(line)
http.client.BadStatusLine: ''
What's breaking? What do I do about it? Actually, I'd settle for advice on debugging it. I've changed the domain in my script and pointed it at my own machine so I could view the request. I've viewed/modified the Firefox requests in BurpProxy to make them match my script requests. The modified Burp requests still work and the Python requests still don't.
Apparently the issue is that the server expects there to be some message body for DELETE requests. That's an unusual expectation for a DELETE, but by specifying Content-Length:0 in the headers, I'm able to successfully perform DELETEs.
Somewhere along the way (in python-rest-client or httplib2), the Content-Length header is wiped out if I try to do:
from restful_lib import Connection
self.base_url = "http://www.thomas-bayer.com"
self.conn = Connection(self.base_url)
response = self.conn.request_delete('/sqlrest/CUSTOMER/85', headers={'Content-Length':'0'})
Just to prove the concept, I went to the point in the stack trace where the request was happening:
File "/home/fmk/python/rest-client/src/httplib2/__init__.py", line 897, in _conn_request
response = conn.getresponse()
I printed the headers parameter there to confirm that the content length wasn't there, then I added:
if(method == 'DELETE'):
headers['Content-Length'] = '0'
before the request.
I think the real answer is that the service is wonky, but at least I got to know httplib2 a little better. I've seen some other confused people looking for help with REST and Python, so hopefully I'm not the only one who got something out of this.
The following script correctly produces 404 response from the server:
#!/usr/bin/env python3
import http.client
h = http.client.HTTPConnection('www.thomas-bayer.com', timeout=10)
h.request('DELETE', '/sqlrest/CUSTOMER/85', headers={'Content-Length': 0})
response = h.getresponse()
print(response.status, response.version)
print(response.info())
print(response.read()[:77])
python -V => 3.2
curl -X DELETE http://www.thomas-bayer.com/sqlrest/CUSTOMER/85
curl: (52) Empty reply from server
Status-Line is not optional; HTTP server must return it. Or at least send 411 Length Required response.
curl -H 'Content-length: 0' -X DELETE \
http://www.thomas-bayer.com/sqlrest/CUSTOMER/85
Returns correctly 404.
I have this very simple code to check if a site is up or down.
import httplib2
h = httplib2.Http()
response, content = h.request("http://www.folksdhhkjd.com")
if response.status == 200:
print "Site is Up"
else:
print "Site is down"
When I enter a valid URL then it properly prints Site is Up because the status is 200 as expected. But, when I enter an invalid URL, should it not print Site is down? Instead it prints an exception something like this
Traceback (most recent call last):
File "C:\Documents and Settings\kripya\Desktop\1.py", line 3, in <module>
response, content = h.request("http://www.folksdhhkjd.com")
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1436, in request
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1188, in _request
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1129, in _conn_request
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
ServerNotFoundError: Unable to find the server at www.folksdhhkjd.com
How can I override this exception and print my custom defined "Site is down" message? Any guidance, please?
EDIT
Also one more question... what is the difference between using
h = httplib2.Http('.cache')
and
h = httplib2.Http()
try:
response, content = h.request("http://www.folksdhhkjd.com")
if response.status==200:
print "Site is Up"
except httplib2.ServerNotFoundError:
print "Site is Down"
The issue with your code is that if the host doesn't respond, the request doesn't return ANY status code, and so the library throws an error (I think it's a peculiarity of the library itself, doing some sort of DNS resolution before trying to make the request).
h = httplib2.Http('.cache')
Caches the stuff it retrieves in a directory called .cache so if you do the same request twice it might not have to actually get everything twice; a file starting with a dot is hidden in POSIX filesystems (like on Linux).
h = httplib2.Http()
Doesn't cache it's results, so you have to get everything requested every time.