I am implementing stomp consumer as a library. By calling this library in other application i should be able to get the data in ActiveMQ. I am implementing it as below, but I have a problem in returning the frame.body. I am not able to retrieve the data from outside the class.
from twisted.internet import defer
from stompest.async import Stomp
from stompest.async.listener import SubscriptionListener
from stompest.config import StompConfig
from socket import gethostname
from uuid import uuid1
import json
class Consumer(object):
def __init__(self, amq_uri):
self.amq_uri = amq_uri
self.hostname = gethostname()
self.config = StompConfig(uri=self.amq_uri)
#defer.inlineCallbacks
def run(self, in_queue):
client = yield Stomp(self.config)
headers = {
StompSpec.ACK_HEADER: StompSpec.ACK_CLIENT_INDIVIDUAL,
StompSpec.ID_HEADER: self.hostname,
'activemq.prefetchSize': '1000',
}
yield client.connect(headers=self._return_client_id())
client.subscribe(
in_queue,
headers,
listener=SubscriptionListener(self.consume)
)
try:
client = yield client.disconnected
except StompConnectionError:
yield client.connect(headers=self._return_client_id())
client.subscribe(
in_queue,
headers,
listener=SubscriptionListener(self.consume)
)
while True:
try:
yield client.disconnected
except StompProtocolError:
pass
except StompConnectionError:
yield client.connect(headers=self._return_client_id())
client.subscribe(
in_queue,
headers,
listener=SubscriptionListener(self.consume)
)
def _return_client_id(self):
client_id = {}
client_id['client-id'] = gethostname() + '-' + str(uuid1())
return client_id
def consume(self, client, frame):
data = json.loads(frame.body)
print 'Received Message Type {}'.format(type(data))
print 'Received Message {}'.format(data)
## I want to return data here. I am able to print the frame.body here.
# Call from another application
import Queue
from twisted.internet import reactor
amq_uri = 'tcp://localhost:61613'
in_queue = '/queue/test_queue'
c = Consumer(amq_uri)
c.run(in_queue)
print "data is from outside function", data # Should be able to get the data which is returned by consume here
reactor.run()
Can someone please let me know how can i achieve this.
Thanks
I found a solution to my problem. Instead of using async stomp library, i used sync stomp library. Implemented it as below,
class Consumer(object):
def __init__(self, amq_uri):
self.amq_uri = amq_uri
self.hostname = gethostname()
self.config = StompConfig(uri=self.amq_uri)
def run(self, in_queue, return_dict):
client = Stomp(self.config)
headers = {
StompSpec.ACK_HEADER: StompSpec.ACK_CLIENT_INDIVIDUAL,
StompSpec.ID_HEADER: self.hostname
}
client.connect()
client.subscribe(in_queue, headers)
try:
frame = client.receiveFrame()
data = json.dumps(frame.body)
except Exception as exc:
print exc
client.ack(frame)
client.disconnect()
return_dict['data'] = data
return data
Related
I am having a Pickle issue with SSL client to server communication using multiprocessing.
I have an SSL client that connects to the server:
SSLClient.py
import socket
import struct
import ssl
import copyreg
from os import path
import socket
import os
from pathlib import Path
from loguru import logger as log
from utils.misc import read_py_config
from datetime import datetime
from cryptography.fernet import Fernet
fernetkey = '1234567'
fernet = Fernet(fernetkey)
class SSLclient:
license = None
licenseencrypted = None
uuid = None
def __init__(self):
try:
path = Path(__file__).parent / "/lcl" #get unique license key
with path.open() as file:
self.licenseencrypted = file.read().rstrip()
self.license = fernet.decrypt(str.encode(self.licenseencrypted)).decode('ascii')
self.host, self.port = "127.0.0.1", 65416
except Exception as e:
log.error("Could not decode license key")
def connect(self):
self.client_crt = os.path.join(os.path.dirname(__file__), 'key/c-.crt')
self.client_key = os.path.join(os.path.dirname(__file__), 'key/ck-.key')
self.server_crt = os.path.join(os.path.dirname(__file__), 'key/s-.crt')
self.sni_hostname = "example.com"
self._context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH, cafile=self.server_crt)
self._context.load_cert_chain(certfile=self.client_crt, keyfile=self.client_key)
self._sock = None
self._ssock = None
## ---- Client Communication Setup ----
HOST = self.host # The server's hostname or IP address
PORT = self.port # The port used by the server
try:
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._ssock = self._context.wrap_socket(self._sock, server_side=False, server_hostname=self.sni_hostname)
self._ssock.connect((HOST, PORT))
log.info("Socket successfully created")
except socket.error as err:
log.error("socket creation failed with error %s" %(err))
return False
log.info('Waiting for connection')
return True
def closesockconnection(self):
self._ssock.close()
def checkvalidsite(self):
#check if site is active
jsonobj = {
"uuid": self.license,
"ipaddress" : self.external_ip,
"req": "checkvalidsite"
}
send_msg(self._ssock, json.dumps(jsonobj).encode('utf-8'))
active = False
while True:
Response = recv_msg(self._ssock)
if not Response:
return False
if Response is not None:
Response = Response.decode('utf-8')
Response = json.loads(Response)
req = Response['req']
if req == "checkvalidsite":
active = Response['active']
self.info1 = Response['info1']
self.info2 = Response['info2']
return active
# ---- To Avoid Message Boundary Problem on top of TCP protocol ----
def send_msg(sock: socket, msg): # ---- Use this to send
try:
# Prefix each message with a 4-byte length (network byte order)
msg = struct.pack('>I', len(msg)) + msg
sock.sendall(msg)
except Exception as e:
log.error("Sending message " + str(e))
def recv_msg(sock: socket): # ---- Use this to receive
try:
# Read message length and unpack it into an integer
raw_msglen = recvall(sock, 4)
if not raw_msglen:
return None
msglen = struct.unpack('>I', raw_msglen)[0]
# Read the message data
return recvall(sock, msglen)
except Exception as e:
log.error("Receiving message " + str(e))
return False
def recvall(sock: socket, n: int):
try:
# Helper function to receive n bytes or return None if EOF is hit
data = bytearray()
while len(data) < n:
packet = sock.recv(n - len(data))
if not packet:
return None
data.extend(packet)
return data
except Exception as e:
log.error("Receiving all message " + str(e))
raise Exception(e)
I then have a server that is Multithreaded and accepts the connection and communicates with the client.
Server.py
import socket
import os
from socket import AF_INET, SOCK_STREAM, SO_REUSEADDR, SOL_SOCKET, SHUT_RDWR
import ssl
from os import path
from _thread import *
import struct # Here to convert Python data types into byte streams (in string) and back
import traceback
from threading import Thread
import json
import mysql.connector as mysql
import time
from loguru import logger as log
import threading
from cryptography.fernet import Fernet
fernetkey = '12213423423'
fernet = Fernet(fernetkey)
threadLocal = threading.local()
# ---- To Avoid Message Boundary Problem on top of TCP protocol ----
def send_msg(sock: socket, msg): # ---- Use this to send
try:
# Prefix each message with a 4-byte length (network byte order)
msg = struct.pack('>I', len(msg)) + msg
sock.sendall(msg)
except Exception as e:
log.error("Error send_msg " + str(e))
def recv_msg(sock: socket): # ---- Use this to receive
try:
# Read message length and unpack it into an integer
raw_msglen = recvall(sock, 4)
if not raw_msglen:
return None
msglen = struct.unpack('>I', raw_msglen)[0]
# Read the message data
return recvall(sock, msglen)
except Exception as e:
log.error("Receiving message " + str(e))
return False
def recvall(sock: socket, n: int):
try:
# Helper function to receive n bytes or return None if EOF is hit
data = bytearray()
while len(data) < n:
packet = sock.recv(n - len(data))
if not packet:
return None
data.extend(packet)
return data
except Exception as e:
log.error("Receiving all message " + str(e))
raise Exception(e)
# ---- Server Communication Setup
class Newclient:
def __init__(self):
self.addr = None
self.conn = None
self.uuid = None
class Server:
def __init__(self):
self.HOST = '127.0.0.1' # Standard loopback interface address (localhost)
self.PORT = 65416 # Port to listen on (non-privileged ports are > 1023)
self.ThreadCount = 0
self.threads = []
self.sock = None
def checkvalidsite(self, uuid, ipaddress, cursor, db_connection):
sql = "select * from myexample where uuid ='" + uuid + "'"
cursor.execute(sql)
results = cursor.fetchall()
active = False
for row in results:
active = row["active"]
siteid = row["info1"]
clientid = row["info2"]
return active, siteid, clientid
def Serverthreaded_client(self, newclient):
conn = newclient.conn
try:
while True:
# data = conn.recv(2048) # receive message from client
data = recv_msg(conn)
uuid = None
ipaddress = None
req = None
if not data :
return False
if data is not None:
data = json.loads(data.decode('utf-8'))
uuid = data['uuid']
req = data['req']
if uuid is not None and req is not None:
newclient.uuid = uuid
cursor, db_connection = setupDBConnection()
if req == "checkvalidsite":
ipaddress = data['ipaddress']
active, info1, info2 = self.checkvalidsite(uuid, ipaddress, cursor, db_connection)
data = {
"req": "checkvalidsite",
"uuid": uuid,
"active": active,
"info1" : info1,
"info2" : info2
}
if not data:
break
# conn.sendall(str.encode(reply))
send_msg(conn, json.dumps(data).encode('utf-8'))
log.info("Server response sent")
#conn.close()
closeDBConnection(cursor, db_connection)
else:
#send no message
a=1
except Exception as e:
log.warning(str(e))
log.warning(traceback.format_exc())
finally:
log.info("UUID Closing connection")
conn.shutdown(socket.SHUT_RDWR)
conn.close()
#conn.close()
def Serverconnect(self):
try: # create socket
self.server_cert = path.join(path.dirname(__file__), "keys/server.crt")
self.server_key = path.join(path.dirname(__file__), "keys/server.key")
self.client_cert = path.join(path.dirname(__file__), "keys/client.crt")
self._context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
self._context.verify_mode = ssl.CERT_REQUIRED
###self._context.load_cert_chain(self.server_cert, self.server_key)
self._context.load_cert_chain(certfile=self.server_cert, keyfile=self.server_key)
###self._context.load_verify_locations(self.client_cert)
self._context.load_verify_locations(cafile=self.client_cert)
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) ###<-- socket.socket() ???
log.info("Socket successfully created")
except socket.error as err:
log.warning("socket creation failed with error %s" %(err))
try: # bind socket to an address
self.sock.bind((self.HOST, self.PORT))
except socket.error as e:
log.warning(str(e))
log.info('Waiting for a Connection..')
self.sock.listen(3)
def Serverwaitforconnection(self):
while True:
Client, addr = self.sock.accept()
conn = self._context.wrap_socket(Client, server_side=True)
log.info('Connected to: ' + addr[0] + ':' + str(addr[1]))
log.info("SSL established. Peer: {}".format(conn.getpeercert()))
newclient = Newclient()
newclient.addr = addr
newclient.conn = conn
thread = Thread(target=self.Serverthreaded_client, args =(newclient, ))
thread.start()
self.threads.append(newclient)
self.ThreadCount += 1
log.info('Thread Number: ' + str(self.ThreadCount))
def startserver():
server = Server()
server.Serverconnect()
server.Serverwaitforconnection()
serverthread = Thread(target=startserver)
serverthread.daemon = False
serverthread.start()
The server accepts the connection with SSL then waits for a message. It investigates the message command, executes the respective function and returns the data from the database as a response (checkvalidsite in this example).
All good so far (as far as I can tell).
I also have the main program that calls the SSLClient and connects.
Main program
remoteclient = SSLclient()
successfulconnection = remoteclient.connect()
siteactive = remoteclient.checkvalidsite()
So far all is well. However I also have the main program reading in frames from multiple cameras. Can be 20 cameras for example. In order to do this I created multiprocessing to deal with the camera load. Each camera or two cameras per, are assigned to a processor (depending on the number of cores in the machine).
(code below has been stripped out to simplify reading)
x = range(3, 6)
for n in x:
processes = multiprocessing.Process(target=activateMainProgram, args=(queue1, queue2, queue3, queue4, remoteclient, ))
processes.start()
When I try pass the remoteclient (SSLClient) as an argument I get the error:
cannot pickle 'SSLContext' object
I then (after reading online) added the code to the SSLClient:
def save_sslcontext(obj):
return obj.__class__, (obj.protocol,)
copyreg.pickle(ssl.SSLContext, save_sslcontext)
but then I get the error:
cannot pickle 'SSLContext' object
There are 2 options I experimented with:
Trying to get the pickle working (which would be ideal) as the processes themselves each need to communicate with the server. So the processes need to call functions from the SSLClient file. But I cannot get over the pickle issue and can't find a solution online
I then placed the remoteclient = SSLClient code outside the main function. Hoping it would run first and then be accessible to the processes. This worked, however what I learnt was that when a process is called (as it does not share memory) it reprocesses the entire file. Meaning if I have 10 processes each with 2 cameras then I would have 10 connections to the server (1 per process). This means on the server side I would also have 10 threads running each connection. Though it works, it seems significantly inefficient.
Being a noob and self taught in Python I am not sure how to resolve the issue and after 3 days, I figured I would reach out for assistance. If I could get assistance with the pickle issue of the SSLClient then I will have one connection that is shared with all processes and 1 thread in the server to deal with them.
P.s. I have cobbled all of the code together myself and being new to Python if you see that I am totally going down the wrong, incorrect, non-professional track, feel free to yell.
Much appreciated.
Update:
If I change the SSLClient code to:
def save_sslcontext(obj):
return obj.__class__, (obj.protocol,)
copyreg.pickle(ssl.SSLContext, save_sslcontext)
Then I get the error:
[WinError 10038] An operation was attempted on something that is not a socket
Not sure what is better..
I test the sample crawler in 500lines. It is a simple crawle with a callback, I replace the socket by ssl and change the port to 443. The code is below:
class Fetcher:
def __init__(self, url):
self.response = b''
self.url = url
self.sock = None
def fetch(self):
global concurrency_achieved
concurrency_achieved = max(concurrency_achieved, len(urls_todo))
self.sock = ssl.wrap_socket(socket.socket(socket.AF_INET, socket.SOCK_STREAM))
self.sock.setblocking(False)
try:
self.sock.connect(('xkcd.com', 443))
except BlockingIOError as e:
pass
selector.register(self.sock.fileno(), EVENT_WRITE, self.connected)
def connected(self, key, mask):
selector.unregister(key.fd)
get = 'GET {} HTTP/1.0\r\nHost: xkcd.com\r\n\r\n'.format(self.url)
self.sock.send(get.encode())
selector.register(self.sock.fileno(), EVENT_READ, self.read_response)
def read_response(self, key, mask):
global stopped
chunk = self.sock.recv(4096)
if chunk:
self.response += chunk
else:
selector.unregister(key.fd)
print(self.response)
start = time.time()
fetcher = Fetcher('/')
fetcher.fetch()
while not stopped:
events = selector.select()
for event_key, event_mask in events:
callback = event_key.data
callback(event_key, event_mask)
It cannot get response. I don't know why. Maybe I use ssl in wrong way. Can anyone help me?
Request from python-qpid-proton client to JMS server through ActiveMQ.
message.body contains a Protobuf.
JMS server can't recognize message.body and raise an exception: "Protocol message contained an invalid tag (zero)."
It works fine with Stomp protocol (but it's another story).
code:
import sys
import socket
import uuid
from time import time
from proton import Message
from proton.handlers import MessagingHandler
from proton.reactor import Container
from proto import common
def make_message(message_body):
message = Message()
message.body = message_body
message.correlation_id = 1
message.properties = {
'type': 'ru.messages.CommonProtos$LoginRequest',
'content-length': len(message_body),
'messageId': str(uuid.uuid4()),
'errorMsg': '',
'timestamp': str(int(time())),
'user': 'vasja'
}
return message
def make_login_message():
login_request = common.LoginRequest()
login_request.ip.append(socket.gethostbyname(socket.gethostname()))
login_request.username = "vasja"
login_request.password = "123"
login_request.clientVersion = "1.1.1"
login_request.subscriber = "test"
login_request.clientTime = int(time())
login_request.pkName = "test"
login_request.hostName = "vasja"
return make_message(login_request.SerializeToString())
class AMQPClient(MessagingHandler):
def __init__(self, server, request, receive=None):
super(AMQPClient, self).__init__()
self.server = server
self.request = request
self.receive = (
request + "receive") if receive is None else (request + receive)
def on_start(self, event):
conn = event.container.connect(self.server)
event.container.create_receiver(conn, self.receive)
event.container.create_sender(conn, self.request)
def on_sendable(self, event):
message = make_login_message()
event.sender.send(message)
event.sender.close()
def on_message(self, event):
print(event.message.body)
event.connection.close()
def main():
Container(AMQPClient(server="192.168.77.100:5672", request="test")).run()
if __name__ == "__main__":
main()
I am using python twisted to get streaming data from twitter streaming api.There are two steps in short. 1) get access_token 2) use access_token to make request for the data.
step 1 work completely fine but at step 2 i am gettin gthis error of bad request status 400. why is so? I think its because twitter uses HTTP1.1 and twisted is using HTTP1.0 by deafult. then how to upgrade connections to HTTP1.1
EDIT: Here is my error message
HTTP/1.0 400 Bad Request
content-length: 0
date: Sun, 12 Mar 2017 14:57:13 GMT
server: tsa
x-connection-hash: dca361a2b4214ad66203e9912b05cf7f
[Failure instance: Traceback (failure with no frames): <class 'twisted.internet.error.ConnectionDone'>: Connection was closed cleanly.
.
#!/usr/bin/python
import oauth2 as oauth
import urlparse
import time
import webbrowser
from twisted.internet import reactor, protocol, ssl
from twisted.web import http
CONSUMER_KEY = 'xxxx'
CONSUMER_SECRET = 'xxxx'
CONSUMER = oauth.Consumer(CONSUMER_KEY, CONSUMER_SECRET)
ACCESS_TOKEN_FILE = 'OAUTH_ACCESS_TOKEN'
TWITTER_REQUEST_TOKEN_URL = 'https://twitter.com/oauth/request_token'
TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token'
TWITTER_AUTHORIZE_URL = 'https://twitter.com/oauth/authorize'
TWITTER_STREAM_API_HOST = 'stream.twitter.com'
TWITTER_STREAM_API_PATH = '/1.1/statuses/sample.json'
class TwitterStreamer(http.HTTPClient):
def connectionMade(self):
self.sendCommand('GET', self.factory.url)
self.sendHeader('Host', self.factory.host)
self.sendHeader('User-Agent', self.factory.agent)
self.sendHeader('Authorization', self.factory.oauth_header)
self.endHeaders()
def handleStatus(self, version, status, message):
if status != '200':
self.factory.tweetError(ValueError("bad status"))
def lineReceived(self, line):
self.factory.tweetReceived(line)
def connectionLost(self, reason):
self.factory.tweetError(reason)
class TwitterStreamerFactory(protocol.ClientFactory):
protocol = TwitterStreamer
def __init__(self, oauth_header):
self.url = TWITTER_STREAM_API_PATH
self.agent = 'Twisted/TwitterStreamer'
self.host = TWITTER_STREAM_API_HOST
self.oauth_header = oauth_header
def clientConnectionFailed(self, _, reason):
self.tweetError(reason)
def tweetReceived(self, tweet):
print tweet
def tweetError(self, error):
print error
def save_access_token(key, secret):
with open(ACCESS_TOKEN_FILE, 'w') as f:
f.write("ACCESS_KEY=%s\n" % key)
f.write("ACCESS_SECRET=%s\n" % secret)
def load_access_token():
with open(ACCESS_TOKEN_FILE) as f:
lines = f.readlines()
str_key = lines[0].strip().split('=')[1]
str_secret = lines[1].strip().split('=')[1]
return oauth.Token(key=str_key, secret=str_secret)
def fetch_access_token():
CONSUMER_KEY = 'xxxxxxxx'
CONSUMER_SECRET = 'xxxxxxxxx'
ACCESS_KEY="xxxxxxx"
ACCESS_SECRET="xxxxxxxxx"
consumer = oauth.Consumer(key=CONSUMER_KEY, secret=CONSUMER_SECRET)
access_token = oauth.Token(key=ACCESS_KEY, secret=ACCESS_SECRET)
return (access_token.key, access_token.secret)
def build_authorization_header(access_token):
url = "https://%s%s" % (TWITTER_STREAM_API_HOST, TWITTER_STREAM_API_PATH)
params = {
'oauth_version': "1.0",
'oauth_nonce': oauth.generate_nonce(),
'oauth_timestamp': str(int(time.time())),
'oauth_token': access_token.key,
'oauth_consumer_key': CONSUMER.key
}
# Sign the request.
# For some messed up reason, we need to specify is_form_encoded to prevent
# the oauth2 library from setting oauth_body_hash which Twitter doesn't like.
req = oauth.Request(method="GET", url=url, parameters=params, is_form_encoded=True)
req.sign_request(oauth.SignatureMethod_HMAC_SHA1(), CONSUMER, access_token)
# Grab the Authorization header
header = req.to_header()['Authorization'].encode('utf-8')
print "Authorization header:"
print " header = %s" % header
return header
if __name__ == '__main__':
# Check if we have saved an access token before.
try:
f = open(ACCESS_TOKEN_FILE)
except IOError:
# No saved access token. Do the 3-legged OAuth dance and fetch one.
(access_token_key, access_token_secret) = fetch_access_token()
# Save the access token for next time.
save_access_token(access_token_key, access_token_secret)
# Load access token from disk.
access_token = load_access_token()
# Build Authorization header from the access_token.
auth_header = build_authorization_header(access_token)
# Twitter stream using the Authorization header.
twsf = TwitterStreamerFactory(auth_header)
reactor.connectSSL(TWITTER_STREAM_API_HOST, 443, twsf, ssl.ClientContextFactory())
reactor.run()
UPDATE: Working code:
import base64, urllib
from twisted.internet import reactor
from twisted.internet.defer import Deferred
from twisted.protocols import basic
from twisted.python.failure import DefaultException
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
import json
import oauth2 as oauth
import time
from twisted.web import server,resource
from twisted.internet import endpoints
from twisted.web.server import Site
CONSUMER_KEY = 'xxxxxxxxxxxx'
CONSUMER_SECRET = 'xxxxxxxxxxxxxx'
TWITTER_STREAM_API_HOST = 'stream.twitter.com'
TWITTER_STREAM_API_PATH = '/1.1/statuses/sample.json'
ACCESS_TOKEN_FILE = 'OAUTH_ACCESS_TOKEN'
CONSUMER = oauth.Consumer(CONSUMER_KEY, CONSUMER_SECRET)
def callback(result):
print result
def errback(error):
print error
class StreamingParser(basic.LineReceiver):
delimiter = '\r\n'
def __init__(self, user_callback, user_errback):
self.user_callback = user_callback
self.user_errback = user_errback
def lineReceived(self, line):
d = Deferred()
d.addCallback(self.user_callback)
d.addErrback(self.user_errback)
line = line.strip()
print line,'........'
try:
d.callback(json.loads(line))
except ValueError, e:
if self.user_errback:
d.errback(e)
def connectionLost(self, reason):
if self.user_errback:
d = Deferred()
d.addErrback(self.user_errback)
d.errback(DefaultException(reason.getErrorMessage()))
def _get_response(response, callback, errback):
print 'got response......'
response.deliverBody(StreamingParser(callback, errback))
return Deferred()
def _shutdown(reason, errback):
d = Deferred()
d.addErrback(errback)
d.errback(reason)
if reactor.running:
reactor.stop()
def save_access_token(key, secret):
with open(ACCESS_TOKEN_FILE, 'w') as f:
f.write("ACCESS_KEY=%s\n" % key)
f.write("ACCESS_SECRET=%s\n" % secret)
def load_access_token():
with open(ACCESS_TOKEN_FILE) as f:
lines = f.readlines()
str_key = lines[0].strip().split('=')[1]
str_secret = lines[1].strip().split('=')[1]
return oauth.Token(key=str_key, secret=str_secret)
def fetch_access_token():
ACCESS_KEY="xxxxx-xxxx"
ACCESS_SECRET="xxxxxxxxxxxx"
access_token = oauth.Token(key=ACCESS_KEY, secret=ACCESS_SECRET)
return (access_token.key, access_token.secret)
def make_header(access_token):
url = "https://%s%s" % (TWITTER_STREAM_API_HOST, TWITTER_STREAM_API_PATH)
params = {
# "Authorization": "Oauth %s" % auth,
"oauth_version": "1.0",
"oauth_nonce": oauth.generate_nonce(),
"oauth_timestamp": str(int(time.time())),
"oauth_token": access_token.key,
"oauth_consumer_key": CONSUMER.key
}
req = oauth.Request(method="GET", url=url, parameters=params, is_form_encoded=True)
req.sign_request(oauth.SignatureMethod_HMAC_SHA1(), CONSUMER, access_token)
header = req.to_header()['Authorization'].encode('utf-8')
print "Authorization header:"
print " header = %s" % header
return header
def start_streaming():
print 'streaming started...........'
try:
f = open(ACCESS_TOKEN_FILE)
except IOError:
access_token_key, access_token_secret = fetch_access_token()
save_access_token(access_token_key, access_token_secret)
access_token = load_access_token()
auth_header = make_header(access_token)
url = 'https://stream.twitter.com/1.1/statuses/sample.json'
headers = Headers({
'User-Agent': ['TwistedSTreamReciever'],
'Authorization': [auth_header]})
agent = Agent(reactor)
d = agent.request('GET', url, headers, None)
d.addCallback(_get_response, callback, errback)
d.addBoth(_shutdown, errback)
# reactor.run()
class _Stream(resource.Resource):
isLeaf = True
def render_GET(self, request):
start_streaming()# Streaming started here.......
time.sleep(8) # wait for 8 seconds...
########.........??? stop streaming here??
return "<html>streaming started...........%s</html>" % (time.ctime(),)
if __name__ == "__main__":
resource = _Stream()
factory = Site(resource)
endpoint = endpoints.TCP4ServerEndpoint(reactor, 8880)
endpoint.listen(factory)
reactor.run()
To give up on reading a particular streaming response (which it seems may be necessary - I'm guessing these Twitter streams never end on their own) and close the connection associated with that request/response (because HTTP has no other way to give up on a response), use the body delivery protocol's transport.loseConnection method. So, for example:
def _get_response(response, callback, errback):
print 'got response......'
proto = StreamingParser(callback, errback)
save_stream_by_name(stream_name, proto)
response.deliverBody(proto)
return Deferred()
When you're done with that stream:
pop_stream_by_name(stream_name).transport.loseConnection()
#!/usr/bin/python27
import tornado
import tornado.websocket
import tornado.wsgi
import motor
import json
import jsonpickle
import uuid
from py2neo import neo4j, cypher
from tornado.ioloop import IOLoop
class MainHandler(tornado.web.RequestHandler):
def get(self):
db = self.settings['db']
Class ChatWebSocket(tornado.websocket.WebSocketHandler):
ids={}
frd={}
def my_callback(result, error):
print 'result', repr(result)
IOLoop.instance().stop()
def on_message(self, message):
def get_current_user(self):
print(self)
to=db.today
data=json.loads(message)
print(data)
s=str(data['who'])
if (data['st']=='me'):
if ''+s+'' in ChatWebSocket.ids:
del ChatWebSocket.ids[''+s+'']
ChatWebSocket.ids[''+s+'']=self
print("sf")
else:
ChatWebSocket.ids[''+s+'']=self
print(ChatWebSocket.ids)
elif (data['st']=='st'):
graph_db = neo4j.GraphDatabaseService()
query = "START a=node:node_auto_index(uid='"+data['who']+"') MATCH a-[:frd]-q RETURN q.uid as id"
data1, metadata = cypher.execute(graph_db, query)
print(data1)
print(data)
for y in data1:
for j in y:
if ''+str(j)+'' in ChatWebSocket.ids:
ChatWebSocket.ids[''+str(j)+''].write_message({"s":"st","id":data['id'],"con":data['con'],"time":data['time'],"on":data['on']})
else:
print("shit")
elif(data['st']=='com'):
graph_db = neo4j.GraphDatabaseService()
query = "START a=node:node_auto_index(uid='"+data['who']+"') MATCH a-[:frd]-q RETURN q.uid as id"
data1, metadata = cypher.execute(graph_db, query)
print(data1)
print(data)
for y in data1:
for j in y:
if ''+str(j)+'' in ChatWebSocket.ids:
ChatWebSocket.ids[''+str(j)+''].write_message({"con":data['con'],"id":data['id'],"s":"com"})
else:
print("shit")
elif(data['st']=='cl'):
print("cl")
if ''+s+'' in ChatWebSocket.ids:
del ChatWebSocket.ids[''+s+'']
print(ChatWebSocket.ids)
print("closed")
elif(data['st']=='ty'):
s=str(data['whom'])
if ChatWebSocket.ids[''+s+''].write_message({"g":str(data['who']),"s":"ty"}):
print("ok")
else:
doc={'who':data['who'],'whom':data['whom'],'mes':data['mes'],'id':data['who']+data['whom']}
to.insert(doc)
s=str(data['whom'])
if ChatWebSocket.ids[''+s+''].write_message({"g":str(data['who']),"mes":data['mes'],"s":"m"}):
print("ok")
db=motor.MotorClient().open_sync().chat tornado_app = tornado.web.Application([(r'/websocket', ChatWebSocket),
(r'.*', tornado.web.FallbackHandler), (r'/', MainHandler)],db=db)
tornado_app.listen(5528)
tornado.ioloop.IOLoop.instance().start()
here is what i am doing:when websocket opens clients sends his uniqueid which python receives onmessage and create a dict with {uniqueid:websocketobject} when somebody send the message it sends the unique id the python get the websocket object as
websocket object=dict{uniqueid} it then sends it to appropiate clients but sometimes it does not work and says none type attribute,even if i have saved the websocket object in that.