How to test twisted web resource with trial? - python

I'm developing a twisted.web server - it consists of some resources that apart from rendering stuff use adbapi to fetch some data and write some data to postgresql database. I'm trying to figoure out how to write a trial unittest that would test resource rendering without using net (in other words: that would initialize a resource, produce it a dummy request etc.).
Lets assume the View resource is a simple leaf that in render_GET returns NOT_DONE_YET and tinkers with adbapi to produce simple text as a result. Now, I've written this useless code and I can't come up how to make it actually initialize the resource and produce some sensible response:
from twisted.trial import unittest
from myserv.views import View
from twisted.web.test.test_web import DummyRequest
class ExistingView(unittest.TestCase):
def test_rendering(self):
slug = "hello_world"
view = View(slug)
request = DummyRequest([''])
output = view.render_GET(request)
self.assertEqual(request.responseCode, 200)
The output is... 1. I've also tried such approach: output = request.render(view) but same output = 1. Why? I'd be very gratefull for some example how to write such unittest!

Here's a function that will render a request and convert the result into a Deferred that fires when rendering is complete:
def _render(resource, request):
result = resource.render(request)
if isinstance(result, str):
request.write(result)
request.finish()
return succeed(None)
elif result is server.NOT_DONE_YET:
if request.finished:
return succeed(None)
else:
return request.notifyFinish()
else:
raise ValueError("Unexpected return value: %r" % (result,))
It's actually used in Twisted Web's test suite, but it's private because it has no unit tests itself. ;)
You can use it to write a test like this:
def test_rendering(self):
slug = "hello_world"
view = View(slug)
request = DummyRequest([''])
d = _render(view, request)
def rendered(ignored):
self.assertEquals(request.responseCode, 200)
self.assertEquals("".join(request.written), "...")
...
d.addCallback(rendered)
return d

Here is a DummierRequest class that fixes almost all my problems. Only thing left is it does not set any response code! Why?
from twisted.web.test.test_web import DummyRequest
from twisted.web import server
from twisted.internet.defer import succeed
from twisted.internet import interfaces, reactor, protocol, address
from twisted.web.http_headers import _DictHeaders, Headers
class DummierRequest(DummyRequest):
def __init__(self, postpath, session=None):
DummyRequest.__init__(self, postpath, session)
self.notifications = []
self.received_cookies = {}
self.requestHeaders = Headers()
self.responseHeaders = Headers()
self.cookies = [] # outgoing cookies
def setHost(self, host, port, ssl=0):
self._forceSSL = ssl
self.requestHeaders.setRawHeaders("host", [host])
self.host = address.IPv4Address("TCP", host, port)
def addCookie(self, k, v, expires=None, domain=None, path=None, max_age=None, comment=None, secure=None):
"""
Set an outgoing HTTP cookie.
In general, you should consider using sessions instead of cookies, see
L{twisted.web.server.Request.getSession} and the
L{twisted.web.server.Session} class for details.
"""
cookie = '%s=%s' % (k, v)
if expires is not None:
cookie = cookie +"; Expires=%s" % expires
if domain is not None:
cookie = cookie +"; Domain=%s" % domain
if path is not None:
cookie = cookie +"; Path=%s" % path
if max_age is not None:
cookie = cookie +"; Max-Age=%s" % max_age
if comment is not None:
cookie = cookie +"; Comment=%s" % comment
if secure:
cookie = cookie +"; Secure"
self.cookies.append(cookie)
def getCookie(self, key):
"""
Get a cookie that was sent from the network.
"""
return self.received_cookies.get(key)
def getClientIP(self):
"""
Return the IPv4 address of the client which made this request, if there
is one, otherwise C{None}.
"""
return "192.168.1.199"

Related

requests - Gateway Timeout

this is a test script to request data from Rovi API, provided by the API itself.
test.py
import requests
import time
import hashlib
import urllib
class AllMusicGuide(object):
api_url = 'http://api.rovicorp.com/data/v1.1/descriptor/musicmoods'
key = 'my key'
secret = 'secret'
def _sig(self):
timestamp = int(time.time())
m = hashlib.md5()
m.update(self.key)
m.update(self.secret)
m.update(str(timestamp))
return m.hexdigest()
def get(self, resource, params=None):
"""Take a dict of params, and return what we get from the api"""
if not params:
params = {}
params = urllib.urlencode(params)
sig = self._sig()
url = "%s/%s?apikey=%s&sig=%s&%s" % (self.api_url, resource, self.key, sig, params)
resp = requests.get(url)
if resp.status_code != 200:
# THROW APPROPRIATE ERROR
print ('unknown err')
return resp.content
from another script I import the module:
from roviclient.test import AllMusicGuide
and create an instance of the class inside a mood function:
def mood():
test = AllMusicGuide()
print (test.get('[moodids=moodids]'))
according to documentation, the following is the syntax for requests:
descriptor/musicmoods?apikey=apikey&sig=sig [&moodids=moodids] [&format=format] [&country=country] [&language=language]
but running the script I get the following error:
unknown err
<h1>Gateway Timeout</h1>:
what is wrong?
"504, try once more. 502, it went through."
Your code is fine, this is a network issue. "Gateway Timeout" is a 504. The intermediate host handling your request was unable to complete it. It made its own request to another server on your behalf in order to handle yours, but this request took too long and timed out. Usually this is because of network congestion in the backend; if you try a few more times, does it sometimes work?
In any case, I would talk to your network administrator. There could be any number of reasons for this and they should be able to help fix it for you.

Use database to store session instead of Cookie with Flask

I have a python project with Flask.
I'm using SQL Alchemy (according to this page of the documentation : http://flask.pocoo.org/docs/0.10/patterns/sqlalche) to handle my database actions.
I'm using Flask.session to store user's information (authentication status, preferences, ...)
Default Flask's Session behaviour is to store sessions in user's cookie, and to sign this cookie with secret_key so users can't alter it, but they can read it.
I don't like that my users are able to "see" session's content. Does Flask offer a built-in way to store session's content in ORM (SQLAlchemy), or do I have to implement that myself ?
Thanks !
This was adapted from http://flask.pocoo.org/snippets/75/.
If you need to store a lot of session data it makes sense to move the data from the cookie to the server. In that case you might want to use redis as the storage backend for the actual session data.
The following code implements a session backend using redis. It allows you to either pass in a redis client or will connect to the redis instance on localhost. All the keys are prefixed with a specified prefix which defaults to session:.
import pickle
from datetime import timedelta
from uuid import uuid4
from redis import Redis
from werkzeug.datastructures import CallbackDict
from flask.sessions import SessionInterface, SessionMixin
class RedisSession(CallbackDict, SessionMixin):
def __init__(self, initial=None, sid=None, new=False):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.new = new
self.modified = False
class RedisSessionInterface(SessionInterface):
serializer = pickle
session_class = RedisSession
def __init__(self, redis=None, prefix='session:'):
if redis is None:
redis = Redis()
self.redis = redis
self.prefix = prefix
def generate_sid(self):
return str(uuid4())
def get_redis_expiration_time(self, app, session):
if session.permanent:
return app.permanent_session_lifetime
return timedelta(days=1)
def open_session(self, app, request):
sid = request.cookies.get(app.session_cookie_name)
if not sid:
sid = self.generate_sid()
return self.session_class(sid=sid, new=True)
val = self.redis.get(self.prefix + sid)
if val is not None:
data = self.serializer.loads(val)
return self.session_class(data, sid=sid)
return self.session_class(sid=sid, new=True)
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
if not session:
self.redis.delete(self.prefix + session.sid)
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain)
return
redis_exp = self.get_redis_expiration_time(app, session)
cookie_exp = self.get_expiration_time(app, session)
val = self.serializer.dumps(dict(session))
self.redis.setex(self.prefix + session.sid, val,
int(redis_exp.total_seconds()))
response.set_cookie(app.session_cookie_name, session.sid,
expires=cookie_exp, httponly=True,
domain=domain)
Here is how to enable it:
app = Flask(__name__)
app.session_interface = RedisSessionInterface()
If you get an attribute error that total_seconds is missing it means you're using a version of Python older than 2.7. In this case you can use this function as a replacement for the total_seconds method:
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds

Twisted: Advise on using txredisapi library required

Below I provided a code example which simply respond to HTTP GET request with the data from Redis:
Request: http://example.com:8888/?auth=zefDWDd5mS7mcbfoDbDDf4eVAKb1nlDmzLwcmhDOeUc
Response: get: u'"True"'
The purpose of this code is to serve as a REST server (that's why I'm using lazyConnectionPool) responding to the requests, and using data from Redis (read/ write).
What I need to do:
Run multiple requests to Redis inside render_GET of the IndexHandler (like GET, HMGET, SET, etc)
Run multiple requests in a transaction inside render_GET of the IndexHandler
I've tried multiple ways to do that (including examples from the txredisapi library), but due to lack of experience failed to do that. Could you please advise on questions 1) and 2).
Thanks in advance.
import txredisapi as redis
from twisted.application import internet
from twisted.application import service
from twisted.web import server
from twisted.web.resource import Resource
class Root(Resource):
isLeaf = False
class BaseHandler(object):
isLeaf = True
def __init__(self, db):
self.db = db
Resource.__init__(self)
class IndexHandler(BaseHandler, Resource):
def _success(self, value, request, message):
request.write(message % repr(value))
request.finish()
def _failure(self, error, request, message):
request.write(message % str(error))
request.finish()
def render_GET(self, request):
try:
auth = request.args["auth"][0]
except:
request.setResponseCode(404, "not found")
return ""
d = self.db.hget(auth, 'user_add')
d.addCallback(self._success, request, "get: %s\n")
d.addErrback(self._failure, request, "get failed: %s\n")
return server.NOT_DONE_YET
# Redis connection parameters
REDIS_HOST = '10.10.0.110'
REDIS_PORT = 6379
REDIS_DB = 1
REDIS_POOL_SIZE = 1
REDIS_RECONNECT = True
# redis connection
_db = redis.lazyConnectionPool(REDIS_HOST, REDIS_PORT, REDIS_DB, REDIS_POOL_SIZE)
# http resources
root = Root()
root.putChild("", IndexHandler(_db))
application = service.Application("web")
srv = internet.TCPServer(8888, server.Site(root), interface="127.0.0.1")
srv.setServiceParent(application)
Regarding first question:
There is a few ways to generalize to making multiple database requests in a single HTTP request.
For example you can make multiple requests:
d1 = self.db.hget(auth, 'user_add')
d2 = self.db.get('foo')
Then you can get a callback to trigger when all of these simultaneous requests are finished (see twisted.internet.defer.DeferredList).
Or you can use inlineCallbacks if you need sequential requests. For example:
#inlineCallbacks
def do_redis(self):
foo = yield self.db.get('somekey')
bar = yield self.db.hget(foo, 'bar') # Get 'bar' field of hash foo
But you will need to read more about combining inlineCallbacks with twisted.web (there are SO questions on that topic you should look up).
Regarding question 2:
Transactions are really ugly to do without using inlineCallbacks. There is an example at txredisapi homepage that shows it using inlineCallbacks.

using cookies with twisted.web.client

I'm trying to make a web client application using twisted but having some trouble with cookies. Does anyone have an example I can look at?
While it's true that getPage doesn't easily allow direct access to the request or response headers (just one example of how getPage isn't a super awesome API), cookies are actually supported.
cookies = {cookies: tosend}
d = getPage(url, cookies=cookies)
def cbPage(result):
print 'Look at my cookies:', cookies
d.addCallback(cbPage)
Any cookies in the dictionary when it is passed to getPage will be sent. Any new cookies the server sets in response to the request will be added to the dictionary.
You might have missed this feature when looking at getPage because the getPage signature doesn't have a cookies parameter anywhere in it! However, it does take **kwargs, and this is how cookies is supported: any extra arguments passed to getPage that it doesn't know about itself, it passes on to HTTPClientFactory.__init__. Take a look at that method's signature to see all of the things you can pass to getPage.
Turns out there is no easy way afaict
The headers are stored in twisted.web.client.HTTPClientFactory but not available from twisted.web.client.getPage() which is the function designed for pulling back a web page. I ended up rewriting the function:
from twisted.web import client
def getPage(url, contextFactory=None, *args, **kwargs):
fact = client._makeGetterFactory(
url,
HTTPClientFactory,
contextFactory=contextFactory,
*args, **kwargs)
return fact.deferred.addCallback(lambda data: (data, fact.response_headers))
from twisted.internet import reactor
from twisted.web import client
def getPage(url, contextFactory=None, *args, **kwargs):
return client._makeGetterFactory(
url,
CustomHTTPClientFactory,
contextFactory=contextFactory,
*args, **kwargs).deferred
class CustomHTTPClientFactory(client.HTTPClientFactory):
def __init__(self,url, method='GET', postdata=None, headers=None,
agent="Twisted PageGetter", timeout=0, cookies=None,
followRedirect=1, redirectLimit=20):
client.HTTPClientFactory.__init__(self, url, method, postdata,
headers, agent, timeout, cookies,
followRedirect, redirectLimit)
def page(self, page):
if self.waiting:
self.waiting = 0
res = {}
res['page'] = page
res['headers'] = self.response_headers
res['cookies'] = self.cookies
self.deferred.callback(res)
if __name__ == '__main__':
def cback(result):
for k in result:
print k, '==>', result[k]
reactor.stop()
def eback(error):
print error.getTraceback()
reactor.stop()
d = getPage('http://example.com', agent='example web client',
cookies={ 'some' : 'cookie' } )
d.addCallback(cback)
d.addErrback(eback)
reactor.run()

How to "keep-alive" with cookielib and httplib in python?

In python, I'm using httplib because it "keep-alive" the http connection (as oppose to urllib(2)). Now, I want to use cookielib with httplib but they seem to hate each other!! (no way to interface them together).
Does anyone know of a solution to that problem?
HTTP handler for urllib2 that supports keep-alive
You should consider using the Requests library instead at the earliest chance you have to refactor your code. In the mean time;
HACK ALERT! :)
I'd go other suggested way, but I've done a hack (done for different reasons though), which does create an interface between httplib and cookielib.
What I did was creating a fake HTTPRequest with minimal required set of methods, so that CookieJar would recognize it and process cookies as needed. I've used that fake request object, setting all the data needed for cookielib.
Here is the code of the class:
class HTTPRequest( object ):
"""
Data container for HTTP request (used for cookie processing).
"""
def __init__( self, host, url, headers={}, secure=False ):
self._host = host
self._url = url
self._secure = secure
self._headers = {}
for key, value in headers.items():
self.add_header(key, value)
def has_header( self, name ):
return name in self._headers
def add_header( self, key, val ):
self._headers[key.capitalize()] = val
def add_unredirected_header(self, key, val):
self._headers[key.capitalize()] = val
def is_unverifiable( self ):
return True
def get_type( self ):
return 'https' if self._secure else 'http'
def get_full_url( self ):
port_str = ""
port = str(self._host[1])
if self._secure:
if port != 443:
port_str = ":"+port
else:
if port != 80:
port_str = ":"+port
return self.get_type() + '://' + self._host[0] + port_str + self._url
def get_header( self, header_name, default=None ):
return self._headers.get( header_name, default )
def get_host( self ):
return self._host[0]
get_origin_req_host = get_host
def get_headers( self ):
return self._headers
Please note, the class has support for HTTPS protocol only (all I needed at the moment).
The code, which used this class was (please note another hack to make response compatible with cookielib):
cookies = CookieJar()
headers = {
# headers that you wish to set
}
# construct fake request
fake_request = HTTPRequest( host, request_url, headers )
# add cookies to fake request
cookies.add_cookie_header(fake_request)
# issue an httplib.HTTPConnection based request using cookies and headers from the fake request
http_connection.request(type, request_url, body, fake_request.get_headers())
response = http_connection.getresponse()
if response.status == httplib.OK:
# HACK: pretend we're urllib2 response
response.info = lambda : response.msg
# read and store cookies from response
cookies.extract_cookies(response, fake_request)
# process response...

Categories

Resources