Spyne: Why am I getting empty responses for json requests? - python

I have a working application that accepts SOAP requests, processes the requests, forwards the SOAP request to an API, processes the response, and then forwards the response to the client.
I'm trying to change this application so that it will be JSON between my application and the client but still use SOAP between API and my application
Now, it can successfully accept JSON requests from client and send/receive SOAP with API. However, all the responses to client are empty.
The only case that I receive a non-empty response is when there are validation errors with my JSON request.
Here are some code that might be relevant
app = Application([MyServer],
MY_NAMESPACE,
in_protocol=JsonDocument(validator='soft'),
out_protocol=JsonDocument())
application_server = csrf_exempt(MyDjangoApplication(app))
definition of MyDjangoApplication
class MyDjangoApplication(DjangoApplication):
def __call__(self, request, *args, **kwargs):
retval = self.HttpResponseObject()
def start_response(status, headers):
# Status is one of spyne.const.http
status, reason = status.split(' ', 1)
retval.status_code = int(status)
for header, value in headers:
retval[header] = value
environ = request.META.copy()
if request.method == 'POST':
response = self.handle_rpc(environ, start_response)
else:
home_path = reverse('proxy:list_method')
uri = MY_ENDPOINT_URL or request.build_absolute_uri(home_path)
# to generate wsdl content
response = self._WsgiApplication__handle_wsdl_request(environ, start_response, uri)
if request.path == home_path and _is_wsdl_request(environ):
fn = None
elif 'method_name' in kwargs:
fn = view_method
else:
fn = list_method
if fn:
return fn(request, app=self, *args, **kwargs)
self.set_response(retval, response)
return retval
Definition of MyServer
class MyServer(ServiceBase):
#rpc(MyTestMethodRequest, Sign, **method(_returns=MyTestMethodResponse))
#check_method()
def TestMethod(ctx, request, signature):
response = {
'Data': "test"
}
return response
Definitions of MyTestMethodRequest, MyTestMethodResponse:
class MyTestMethodRequest(ComplexModel):
__namespace__ = MY_NAMESPACE
MyString = String(encoding=STR_ENCODING)
class MyTestMethodResponse(ComplexModel):
__namespace__ = MY_NAMESPACE
Data = String(encoding=STR_ENCODING)
Definition of check_method:
def check_method(error_handler=None):
def _check_method(func):
method_name = func.__name__
def __check_method(ctx, request, signature, *args, **kwargs):
if hasattr(request, '__dict__'):
request = request.__dict__
if hasattr(signature, '__dict__'):
signature = signature.__dict__
response = func(ctx, request or {}, signature or {}, *args, **kwargs)
# setting output protocol
output_message = generate_out_string(ctx, [response])
return response
__check_method.__name__ = method_name
__check_method.__doc__ = func.__doc__
return __check_method
return _check_method
Definition of generate_out_string:
def generate_out_string(ctx, objects):
ctx.out_protocol = ctx.in_protocol
return _generate_out_string(ctx, objects)
def _generate_out_string(ctx, objects):
protocol = ctx.out_protocol
ctx.out_object = objects
protocol.serialize(ctx, protocol.RESPONSE)
protocol.create_out_string(ctx)
out_string = list(ctx.out_string)
return out_string[0] if out_string else ''
Note: Most of these definitions have been simplified (I have removed lines which I think are not relevant)

Looking at the code you posted, I can't say I understand what good all those additional decorators and modifiers around arguments do.
Removing them should fix all of your problems.
So let:
class MyTestMethodRequest(ComplexModel):
__namespace__ = MY_NAMESPACE
MyString = Unicode
class MyTestMethodResponse(ComplexModel):
__namespace__ = MY_NAMESPACE
Data = Unicode
Assuming you have the following service:
class MyService(ServiceBase):
#rpc(MyTestMethodRequest, Sign, _returns=MyTestMethodResponse)
def TestMethod(ctx, request, signature):
return MyTestMethodResponse(data="test")
You can have:
app_json = Application([MyService],
MY_NAMESPACE,
in_protocol=JsonDocument(validator='soft'),
out_protocol=JsonDocument())
and
app_soap = Application([MyService],
MY_NAMESPACE,
in_protocol=Soap11(validator='lxml'),
out_protocol=Soap11())
which in turn you can pass to DjangoApplication as usual.:
app_json_dja = csrf_exempt(DjangoApplication(app_json))
app_soap_dja = csrf_exempt(DjangoApplication(app_soap))
which in turn you can mount in Django's url router.
I hope this helps!

Related

How Do You Write Files to IPFS via the HTTP API in Python

Could someone demonstrate writing a file to IPFS via the HTTP API (/files/write) and Python?
My code is getting messier every time I modify it.
https://pastebin.com/W9eNz1Pb
def api(*argv, **kwargs):
url = "http://127.0.0.1:5001/api/v0/"
for arg in argv:
arg = arg.replace(" ", "/")
if arg[:-1] != "/":
arg += "/"
url += arg
url = url[0:-1]
if kwargs:
url+="?"
for val in kwargs:
if val != "post":
url = url + val + "=" + kwargs[val] + "&"
url = url[0:-1]
print(url)
try:
if "post" in kwargs:
print("POST DATA")
with urllib.request.urlopen(url=url, data=urllib.parse.urlencode(kwargs["post"]).encode("ascii")) as response:
return response.read()
else:
with urllib.request.urlopen(url, timeout=300) as response:
return response.read()
except:
return b"""{"ERROR": "CANNOT CONNECT TO IPFS!"}"""
class file(object):
def __init__(self, p):
self.p = p
if self.p[0] != "/":
self.p = "/" + self.p
def read(self):
return api("files", "read", arg=self.p).decode()
def write(self, s, *argv):
if argv:
return api("files", "write", arg=self.p, offset=str(argv[0]), create="True", parents="True", post={"Data": s})
else:
return api("files", "write", arg=self.p, truncate="True", create="True", parents="True", post={"Data": s})
file.read() works perfectly. But file.write() is being a pain in the rear.
Here's a minimal example to write a file via the /files/write HTTP API in Python:
import requests, urllib
NODE = "http://localhost:5001"
FILE_PATH = "./example" # path to file you're trying to add
MFS_PATH = "/example" # mfs path you're trying to write to
response = requests.post(NODE+"/api/v0/files/write?arg=%s&create=true" % urllib.parse.quote(MFS_PATH), files={FILE_PATH:open(FILE_PATH, 'rb')})
make sure ipfs daemon is running
ipfs init
ipfs daemon
your url endpoint is wrong. If you check documentaion, for adding file, url should be
url = "http://127.0.0.1:5001/api/v0/add"
create a function to upload so u can use this logic in other parts of your project:
def add_to_ipfs(filepath):
from pathlib import Path
import requests
# rb means open in binary. read binary
with Path(filepath).open("rb") as fp:
image_binary=fp.read()
# we need to make post request to this endpoint.
url = "http://127.0.0.1:5001/api/v0/add"
# check the response object
response = requests.post(url, files={"file": image_binary})
ipfs_hash=response.json()["Hash"]
# "./img/myImage.png" -> "myImage.png" split by "/" into array, take the last element
filename=filepath.split("/")[-1:][0]
image_uri=f"https://ipfs.io/ipfs/{ipfs_hash}?filename={filename}"
print("image uri on ipfs",image_uri)
return image_uri
this is the Response type from ipfs
{
"Bytes": "<int64>",
"Hash": "<string>",
"Name": "<string>",
"Size": "<string>"
}

How do you send many documents to a Scout Server in Python using the Python Scout Client?

Im trying to index PDF text to a python lib called Scout. I have tried doing the same thing with elasticsearch too. In both cases I can't figure out how to post text to an index in bulk, using python.
After a lot of research, I believe I need to use async http request. The only problem is, I don't understand async calls nor do I understand what a Scout python 'client' really is. I'm a self-taught programmer and still have many things I don't understand. my thought is the client cant stay open for a loop to keep using the connection. I have seen coding concepts like "await" and "sessions" in many books on programming. However, I don't know how to implement these concepts. Can someone help me write some python code that will successfully post new documents to a running scout server and explain how it's done?
Here is My attempt:
from scout_client import Scout
# import libraries to help read and create PDF
import PyPDF2
from fpdf import FPDF
import base64
import os
from flask import Flask, jsonify, request, render_template, json
# before you start, Run the Server.py file and create a Sqlite DB
# Step one loop though PDF in 'books' folder
for k in range(14,15):
# open the pdf file
read_pdf = PyPDF2.PdfFileReader("books/%s.pdf"%(k))
# Test to see if Step one is complete and succesful
#print (read_pdf)
# Step Two Gain intel on how many Pages are in the Document
# get the page numbers
num = read_pdf.getNumPages()
print ("PDF pages:", num)
# Step Three understand the data by page
# create a dictionary object for page data
all_pages = []
# Step For Create a new index in Scout Server
# client.create_index('test3')
# iterate the page numbers
for page in range(num):
data = read_pdf.getPage(page)
#page_mode = read_pdf.getPageMode()
# extract the page's text
page_text = data.extractText()
# put the text data into the dict
all_pages.append(page_text)
# initiate the Client from scout_client.py
client = Scout('http://localhost:8000')
# THe issue: I tryed for loops, and while loops but cant get past: urllib.error.HTTPError: HTTP Error 400: BAD REQUEST
i = 1
while i <= num:
client.create_document(all_pages[i],['test3'])
print(i,"....done")
i += 1
I get an error:
Traceback (most recent call last):
File "test.py", line 37, in <module>
client.create_document(all_pages[i],['test3'])
File "../Searchtest4/scout/scout_client.py", line 149, in create_document
return self.post('/documents/', post_data, attachments)
File "../Searchtest4/scout/scout_client.py", line 53, in post
return self.post_json(url, data)
File "../Searchtest4/scout/scout_client.py", line 63, in post_json
return json.loads(urlopen(request).read().decode('utf8'))
File "../lib/python3.7/urllib/request.py", line 222, in urlopen
return opener.open(url, data, timeout)
File "../lib/python3.7/urllib/request.py", line 531, in open
response = meth(req, response)
File "../lib/python3.7/urllib/request.py", line 641, in http_response
'http', request, response, code, msg, hdrs)
File "../lib/python3.7/urllib/request.py", line 569, in error
return self._call_chain(*args)
File "../lib/python3.7/urllib/request.py", line 503, in _call_chain
result = func(*args)
File "../lib/python3.7/urllib/request.py", line 649, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
**urllib.error.HTTPError: HTTP Error 400: BAD REQUEST**
Here is the server that runs fine (server.py):
import logging
import optparse
import os
import sys
from flask import Flask
from werkzeug.serving import run_simple
from scout.exceptions import InvalidRequestException
from scout.models import database
from scout.models import Attachment
from scout.models import BlobData
from scout.models import Document
from scout.models import Index
from scout.models import IndexDocument
from scout.models import Metadata
from scout.views import register_views
logger = logging.getLogger('scout')
def create_server(config=None, config_file=None):
app = Flask(__name__)
# Configure application using a config file.
if config_file is not None:
app.config.from_pyfile(config_file)
# (Re-)Configure application using command-line switches/environment flags.
if config is not None:
app.config.update(config)
# Initialize the SQLite database.
initialize_database(app.config.get('DATABASE') or 'scout.db',
pragmas=app.config.get('SQLITE_PRAGMAS') or None)
register_views(app)
#app.errorhandler(InvalidRequestException)
def handle_invalid_request(exc):
return exc.response()
#app.before_request
def connect_database():
if database.database != ':memory:':
database.connect()
#app.teardown_request
def close_database(exc):
if database.database != ':memory:' and not database.is_closed():
database.close()
return app
def initialize_database(database_file, pragmas=None):
database.init(database_file, pragmas=pragmas)
try:
meth = database.execution_context
except AttributeError:
meth = database
with meth:
database.create_tables([
Attachment,
BlobData,
Document,
Index,
IndexDocument,
Metadata])
def run(app):
if app.config['DEBUG']:
app.run(host=app.config['HOST'], port=app.config['PORT'], debug=True)
else:
run_simple(
hostname=app.config['HOST'],
port=app.config['PORT'],
application=app,
threaded=True)
def panic(s, exit_code=1):
sys.stderr.write('\033[91m%s\033[0m\n' % s)
sys.stderr.flush()
sys.exit(exit_code)
def get_option_parser():
parser = optparse.OptionParser()
parser.add_option(
'-H',
'--host',
default='127.0.0.1',
dest='host',
help='The hostname to listen on. Defaults to 127.0.0.1.')
parser.add_option(
'-p',
'--port',
default=8000,
dest='port',
help='The port to listen on. Defaults to 8000.',
type='int')
parser.add_option(
'-u',
'--url-prefix',
dest='url_prefix',
help='URL path to prefix Scout API.')
parser.add_option(
'-s',
'--stem',
dest='stem',
help='Specify stemming algorithm for content.')
parser.add_option(
'-d',
'--debug',
action='store_true',
dest='debug',
help='Run Flask app in debug mode.')
parser.add_option(
'-c',
'--config',
dest='config',
help='Configuration module (python file).')
parser.add_option(
'--paginate-by',
default=50,
dest='paginate_by',
help='Number of documents displayed per page of results, default=50',
type='int')
parser.add_option(
'-k',
'--api-key',
dest='api_key',
help='Set the API key required to access Scout.')
parser.add_option(
'-C',
'--cache-size',
default=64,
dest='cache_size',
help='SQLite page-cache size (MB). Defaults to 64MB.',
type='int')
parser.add_option(
'-f',
'--fsync',
action='store_true',
dest='fsync',
help='Synchronize database to disk on every write.')
parser.add_option(
'-j',
'--journal-mode',
default='wal',
dest='journal_mode',
help='SQLite journal mode. Defaults to WAL (recommended).')
parser.add_option(
'-l',
'--logfile',
dest='logfile',
help='Log file')
return parser
def parse_options():
option_parser = get_option_parser()
options, args = option_parser.parse_args()
if options.logfile:
handler = logging.FileHandler(options.logfile)
logger.addHandler(handler)
config_file = os.environ.get('SCOUT_CONFIG') or options.config
config = {'DATABASE': os.environ.get('SCOUT_DATABASE')}
if len(args) == 0 and not config['DATABASE']:
panic('Error: missing required path to database file.')
elif len(args) > 1:
panic('Error: [%s] only accepts one argument, which is the path '
'to the database file.' % __file__)
elif args:
config['DATABASE'] = args[0]
pragmas = [('journal_mode', options.journal_mode)]
if options.cache_size:
pragmas.append(('cache_size', -1024 * options.cache_size))
if not options.fsync:
pragmas.append(('synchronous', 0))
config['SQLITE_PRAGMAS'] = pragmas
# Handle command-line options. These values will override any values
# that may have been specified in the config file.
if options.api_key:
config['AUTHENTICATION'] = options.api_key
if options.debug:
config['DEBUG'] = True
config['HOST'] = options.host or '127.0.0.1'
config['PORT'] = options.port or 8000
config['URL_PREFIX'] = options.url_prefix or ''
if options.paginate_by:
if options.paginate_by < 1 or options.paginate_by > 1000:
panic('paginate-by must be between 1 and 1000')
config['PAGINATE_BY'] = options.paginate_by
if options.stem:
if options.stem not in ('simple', 'porter'):
panic('Unrecognized stemmer. Must be "porter" or "simple".')
config['STEM'] = options.stem
return create_server(config, config_file)
def main():
app = parse_options()
run(app)
if __name__ == '__main__':
main()
and the so-called client (scout_client.py):
import base64
import json
try:
from email.generator import _make_boundary as choose_boundary
except ImportError:
from mimetools import choose_boundary
import mimetypes
import os
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
from urllib.request import Request
from urllib.request import urlopen
except ImportError:
from urllib2 import Request
from urllib2 import urlopen
import zlib
ENDPOINT = None
KEY = None
class Scout(object):
def __init__(self, endpoint=ENDPOINT, key=KEY):
self.endpoint = endpoint.rstrip('/')
self.key = key
def get_full_url(self, url):
return self.endpoint + url
def get_raw(self, url, **kwargs):
headers = {'Content-Type': 'application/json'}
if self.key:
headers['key'] = self.key
if kwargs:
if '?' not in url:
url += '?'
url += urlencode(kwargs, True)
request = Request(self.get_full_url(url), headers=headers)
fh = urlopen(request)
return fh.read()
def get(self, url, **kwargs):
return json.loads(self.get_raw(url, **kwargs))
def post(self, url, data=None, files=None):
if files:
return self.post_files(url, data, files)
else:
return self.post_json(url, data)
def post_json(self, url, data=None):
headers = {'Content-Type': 'application/json'}
if self.key:
headers['key'] = self.key
data = json.dumps(data or {})
if not isinstance(data, bytes):
data = data.encode('utf-8')
request = Request(self.get_full_url(url), data=data, headers=headers)
return json.loads(urlopen(request).read().decode('utf8'))
def post_files(self, url, json_data, files=None):
if not files or not isinstance(files, dict):
raise ValueError('One or more files is required. Files should be '
'passed as a dictionary of filename: file-like-'
'object.')
boundary = choose_boundary()
form_files = []
for i, (filename, file_obj) in enumerate(files.items()):
try:
data = file_obj.read()
except AttributeError:
data = bytes(file_obj)
mimetype = mimetypes.guess_type(filename)[0]
form_files.append((
'file_%s' % i,
filename,
mimetype or 'application/octet-stream',
data))
part_boundary = '--' + boundary
parts = [
part_boundary,
'Content-Disposition: form-data; name="data"',
'',
json.dumps(json_data)]
for field_name, filename, mimetype, data in form_files:
parts.extend((
part_boundary,
'Content-Disposition: file; name="%s"; filename="%s"' % (
field_name, filename),
'Content-Type: %s' % mimetype,
'',
data))
parts.append('--' + boundary + '--')
parts.append('')
headers = {'Content-Type': 'multipart/form-data; boundary=%s' %
boundary}
if self.key:
headers['key'] = self.key
data = '\r\n'.join(parts)
if not isinstance(data, bytes):
data = data.encode('utf-8')
request = Request(self.get_full_url(url), data=data, headers=headers)
return json.loads(urlopen(request).read())
def delete(self, url):
headers = {}
if self.key:
headers['key'] = self.key
request = Request(self.get_full_url(url), headers=headers)
request.get_method = lambda: 'DELETE'
fh = urlopen(request)
return json.loads(fh.read())
def get_indexes(self, **kwargs):
return self.get('/', **kwargs)['indexes']
def create_index(self, name):
return self.post('/', {'name': name})
def rename_index(self, old_name, new_name):
return self.post('/%s/' % old_name, {'name': new_name})
def delete_index(self, name):
return self.delete('/%s/' % name)
def get_index(self, name, **kwargs):
return self.get('/%s/' % name, **kwargs)
def get_documents(self, **kwargs):
return self.get('/documents/', **kwargs)
def create_document(self, content, indexes, identifier=None,
attachments=None, **metadata):
if not isinstance(indexes, (list, tuple)):
indexes = [indexes]
post_data = {
'content': content,
'identifier': identifier,
'indexes': indexes,
'metadata': metadata}
return self.post('/documents/', post_data, attachments)
def update_document(self, document_id=None, content=None, indexes=None,
metadata=None, identifier=None, attachments=None):
if not document_id and not identifier:
raise ValueError('`document_id` must be provided.')
data = {}
if content is not None:
data['content'] = content
if indexes is not None:
if not isinstance(indexes, (list, tuple)):
indexes = [indexes]
data['indexes'] = indexes
if metadata is not None:
data['metadata'] = metadata
if not data and not attachments:
raise ValueError('Nothing to update.')
return self.post('/documents/%s/' % document_id, data, attachments)
def delete_document(self, document_id=None):
if not document_id:
raise ValueError('`document_id` must be provided.')
return self.delete('/documents/%s/' % document_id)
def get_document(self, document_id=None):
if not document_id:
raise ValueError('`document_id` must be provided.')
return self.get('/documents/%s/' % document_id)
def attach_files(self, document_id, attachments):
return self.post_files('/documents/%s/attachments/' % document_id,
{}, attachments)
def detach_file(self, document_id, filename):
return self.delete('/documents/%s/attachments/%s/' %
(document_id, filename))
def update_file(self, document_id, filename, file_object):
return self.post_files('/documents/%s/attachments/%s/' %
(document_id, filename),
{}, {filename: file_object})
def get_attachments(self, document_id, **kwargs):
return self.get('/documents/%s/attachments/' % document_id, **kwargs)
def get_attachment(self, document_id, filename):
return self.get('/documents/%s/attachments/%s/' %
(document_id, filename))
def download_attachment(self, document_id, filename):
return self.get_raw('/documents/%s/attachments/%s/download/' %
(document_id, filename))
def search_attachments(self, **kwargs):
return self.get('/documents/attachments/', **kwargs)
class SearchProvider(object):
def content(self, obj):
raise NotImplementedError
def identifier(self, obj):
raise NotImplementedError
def metadata(self, obj):
raise NotImplementedError
class SearchSite(object):
def __init__(self, client, index):
self.client = client
self.index = index
self.registry = {}
def register(self, model_class, search_provider):
self.registry.setdefault(model_class, [])
self.registry[model_class].append(search_provider())
def unregister(self, model_class, search_provider=None):
if search_provider is None:
self.registry.pop(model_class, None)
elif model_class in self.registry:
self.registry[model_class] = [
sp for sp in self.registry[model_class]
if not isinstance(sp, search_provider)]
def store(self, obj):
if type(obj) not in self.registry:
return False
for provider in self.registry[type(obj)]:
content = provider.content(obj)
try:
metadata = provider.metadata(obj)
except NotImplementedError:
metadata = {}
try:
identifier = provider.identifier(obj)
except NotImplementedError:
pass
else:
metadata['identifier'] = identifier
self.client.create_document(content, self.index, **metadata)
return True
def remove(self, obj):
if type(obj) not in self.registry:
return False
for provider in self.registry[type(obj)]:
self.client.delete_document(provider.identifier(obj))
return True
Finally the Documentation for Scout:
https://scout.readthedocs.io/en/latest/server.html#index-detail-index-name
https://charlesleifer.com/blog/meet-scout-a-search-server-powered-by-sqlite/
Any Detailed Help is much appreciated:)
So i find a lib called scout and...got it to work!
from scout_client import Scout
# import libraries to help read and create PDF
import PyPDF2
from fpdf import FPDF
import base64
import os
from flask import Flask, jsonify, request, render_template, json
client = Scout('http://localhost:8000')
for k in range(7,18):
read_pdf = PyPDF2.PdfFileReader("books/%s.pdf"%(k))
num = read_pdf.getNumPages()
print ("PDF pages:", num)
all_pages = []
for page in range(num):
data = read_pdf.getPage(page)
page_text = data.extractText()
all_pages.append(page_text)
import requests
for z in all_pages:
url = 'http://localhost:8000/documents/'
data = {'content': z, 'indexes': ['test13']}
headers = {
'Content-Type': 'application/json',
}
response = requests.post(url, data=json.dumps(data), headers=headers)
print(response)
I can now loop though as many PDF's as I want locally
Post to the server for indexing
and search for keywords
Now I just need help with Making a basic front end with a search bar that calls data from a JSON response in python and flask.

PyQt: why sendind PATCH request with JSON data returns null-string?

I sending a PATCH request using this code:
self.sendurl = QtCore.QUrl("http://"+ server + ":" + port + "/path/" + str(i['id']))
self.rdata = {'status': 'online'}
self.rdata = json.dumps(self.rdata)
self.request = QtNetwork.QNetworkRequest()
self.manager = QtNetwork.QNetworkAccessManager()
self.request.setUrl(self.sendurl)
self.request.setHeader(0, 'application/json')
self.rdata = bytes(self.rdata, 'UTF-8')
self.data = QtCore.QByteArray(self.rdata)
self.buffer = QtCore.QBuffer()
self.buffer.open(QtCore.QBuffer.ReadWrite)
self.buffer.writeData(self.data)
self.buffer.seek(0)
self.patchbytes = bytes('PATCH', 'UTF-8')
self.patchverb = QtCore.QByteArray(self.patchbytes)
self.response = QtCore.QByteArray()
self.response = self.manager.sendCustomRequest(self.request, self.patchverb, self.buffer)
self.response = self.response.readAll().data().decode('utf-8')
self.response = str(self.response)
print(self.response)
self.response = json.loads(self.response)
Normal response contain JSON object like this: {'status':'online', 'request':'ok'}. But I get this:
I think I'm just sending an incorrect request, but I do not understand what's wrong with it. Can anybody help me?
Ok, I found the answer to my question. I just need to wait for the request to complete.
After this line
self.response = self.manager.sendCustomRequest(self.request, self.patchverb, self.buffer)
there should be this code:
self.loop = QtCore.QEventLoop()
self.manager.finished.connect(self.loop.exit)
self.loop.exec()

How to get content from tornado future object

I'm really confused by the tornado frame work and the 'future' object.
So I want to get a async response by making a http call
Code is:
class TestAsyncHttp(object):
def __init__(self):
self._http_client = httpclient.AsyncHTTPClient()
#gen.coroutine
def get_response(self, params)
response = yield self._request(
method='POST',
endpoint='test'
data=params
)
raise gen.Return(response)
#gen.coroutine
def _request(self, method, endpoint, data):
url = self._make_url(endpoint) #this includes the port..
headers = self._load_headers()
request = httpclient.HTTPRequest(
url,
method=method,
headers=header,
body=json.dump(data)
)
response = yield self._http_client.fetch(request)
raise gen.Return(response)
The thing is, after I finished this one, how can I test it?
I tried to write a scrip which contains...:
import json
with open('test/request.json') as json_file:
request_json = json.loads(json_file.read())
def get_response():
x = TestAsyncHttp()
ret = yield x.get_response(request_json)
body = ret.body
print body['value']
get_response
But then I 'python "path-to-the-script"'
There's nothing output.
If I just stepped into the "python" environment, I got "future" object doesn't have getitem
..How can I get the content from a future..?
Thanks!
Use run_sync to run an async coroutine in a synchronous fashion:
def get_response():
x = TestAsyncHttp()
ret = IOLoop.current().run_sync(lambda: x.get_response(request_json))
body = ret.body
print body['value']
The lambda is required here simply to pass the request_json parameter. If get_response took no arguments, you could instead do:
ret = IOLoop.current().run_sync(x.get_response)

Parse Header in Post Call Python

I am using rest client in my mozilla browser to call an auth service.
When i pass my credentials in Body, i get an "auth-token" . I then set this token in the header in the browser HEADERS tab.
I have to parse this header which i am setting in the browser in my python script as a variable. Further, after getting this value in my script i have to authenticate the token for its validity.
However i am unable to get the tokens value in my script. My auth function is ready. I just have to fetch the token
How should i fetch this token value from the header ??
Code:
def check_authentication(auth):
print "Auth" , auth
chek_auth_url = ("http://10.168.2.161/auth/v/%s" % (auth))
auth = requests.get(chek_auth_url)
if auth.status_code == 200:
return True
I have to pass the token as a paramter in this function and call in this function in main for authentication.
def crossdomain(origin=None, methods=None, headers=None, max_age=21600, attach_to_all=True, automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
#h['Access-Control-Allow-Headers'] = "Content-Type"
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
#app.route('/test', methods=['POST', 'OPTIONS'])
#crossdomain(origin='*', headers='Content-Type')
def get_storage():
*check_authentication is called here and token is passed as a parameter*
*if token is valid further task i hav to do*
if __name__ == '__main__':
app.run(host='192.168.56.1', port=8080, threaded=True)
Self-Help is the best help..
Finally i found a fix:
The token value is fetched in the variable tokenValue. I can now do my further coding.
tokenValue = request.headers.get("token")
if tokenValue == None:
return "x-auth-token not passed in header, please pass the token."
else:
print "Token passed is", tokenValue

Categories

Resources