Maintaining another persistent TCP connection with TCPServer - python

I am connecting to a XMPP server using slixmpp, I need access to this connection while serving a HTTP protocol, I am trying to maintain a persistent connection, rather than connecting connecting to XMPP server for each HTTP request. I am using TCPServer to get the functionality of HTTP. I wrote this code.
import logging
from slixmpp import ClientXMPP
from slixmpp.exceptions import IqError, IqTimeout
import socketserver
from time import sleep
class EchoBot(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.add_event_handler("session_start", self.session_start)
self.add_event_handler("message", self.message)
def session_start(self, event):
self.send_presence()
self.get_roster()
def message(self, msg):
print(msg)
if msg['type'] in ('chat', 'normal'):
msg.reply("Thanks for sending\n%(body)s" % msg).send()
class MyTCPHandler(socketserver.BaseRequestHandler):
xmpp = EchoBot('xxx#fcm.googleapis.com', 'xyz')
def __init__(self,request, client_address,server):
super().__init__(request, client_address,server)
self.xmpp.connect(address=('fcm-xmpp.googleapis.com',5235),use_ssl=True,disable_starttls=True)
self.xmpp.process(forever=True)
def handle(self):
self.data = self.request.recv(1024).strip()
print("{} wrote:".format(self.client_address[0]))
print(self.data)
# just send back the same data, but upper-cased
self.request.sendall(self.data.upper())
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG,format='%(levelname)-8s %(message)s')
HOST, PORT = "localhost", 9999
server = socketserver.TCPServer((HOST, PORT), MyTCPHandler)
server.serve_forever()
This works for first time. MyTCPHandler handle function works only first time, second time, it doesn't return any response. I am using telnet localhost 9999 to test the connection. What might be going wrong here? Is there a better way to achieve the result I'm looking for?
if I comment these three lines TCPServer works as expected.
# xmpp = EchoBot('xxx#fcm.googleapis.com', 'xyz')
def __init__(self,request, client_address,server):
super().__init__(request, client_address,server)
# self.xmpp.connect(address=('fcm-xmpp.googleapis.com',5235),use_ssl=True,disable_starttls=True)
# self.xmpp.process(forever=True)

I solved the problem using asyncio
import logging
from slixmpp import ClientXMPP
from slixmpp.exceptions import IqError, IqTimeout
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
log = logging.getLogger(__name__)
import asyncio
import base64
import slixmpp
from aiohttp import web
XMPP = None
class EchoBot(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.connected_future = asyncio.Future()
self.add_event_handler("session_start", self.session_start)
self.add_event_handler("message", self.message)
def session_start(self, event):
self.send_presence()
self.get_roster()
def message(self, msg):
if msg['type'] in ('chat', 'normal'):
msg.reply("Thanks for sending\n%(body)s" % msg).send()
def reset_future(self):
"Reset the future in case of disconnection"
self.connected_future = asyncio.Future()
async def handle(request):
"Handle the HTTP request and block until the vcard is fetched"
err_404 = web.Response(status=404, text='Not found')
print(await request.json())
try:
XMPP.send_raw('<message id="gsgsfssdfds"> <gcm xmlns="google:mobile:data">{ "notification": {"title": "change","body": "body changed","sound":"default"},"to" : "efsfdsf","message_id":"flajlfdjlfdklajflda","priority":"high","delivery_receipt_requested":true}</gcm></message>')
except Exception as e:
print(e)
log.warning("cannot send message")
return err_404
return web.Response(text="yes")
async def init(loop, host: str, port: str, avatar_prefix: str):
"Initialize the HTTP server"
app = web.Application(loop=loop)
app.router.add_route('POST', '/', handle)
srv = await loop.create_server(app.make_handler(), host, port)
log.info("Server started at http://%s:%s", host, port)
return srv
def main(namespace):
"Start the xmpp client and delegate the main loop to asyncio"
loop = asyncio.get_event_loop()
global XMPP
XMPP = EchoBot('xxx#gcm.googleapis.com', 'ysfafdafdsfa')
XMPP.connect(use_ssl=True,disable_starttls=False)
#XMPP.connect()
loop.run_until_complete(init(loop, namespace.host, namespace.port,
namespace.avatar_prefix))
XMPP.reset_future()
loop.run_until_complete(XMPP.connected_future)
try:
loop.run_forever()
except KeyboardInterrupt:
import sys
def parse_args():
"Parse the command-line arguments"
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('--jid', '-j', dest='jid', default=JID,
help='JID to use for fetching the vcards')
parser.add_argument('--password', '-p', dest='password', default=PASSWORD,
help='Password linked to the JID')
parser.add_argument('--host', dest='host', default=HOST,
help='Host on which the HTTP server will listen')
parser.add_argument('--port', dest='port', default=PORT,
help='Port on which the HTTP server will listen')
parser.add_argument('--avatar_prefix', dest='avatar_prefix',
default=AVATAR_PREFIX,
help='Prefix path for the avatar request')
return parser.parse_args()
HOST = '127.0.0.1'
PORT = 8765
JID = 'changeme#example.com'
PASSWORD = 'changemetoo'
AVATAR_PREFIX = 'avatar/'
if __name__ == "__main__":
print(parse_args())
main(parse_args())

Related

Python consume RabbitMQ and run SocketIO server

Setup
I have a python application, which should consume messages from a RabbitMQ and act as a SocketIO server to a Vue2 APP. When it receives messages from RabbitMQ it should send out a message over SocketIO to the Vue2 APP. Therefore I wrote 2 classes RabbitMQHandler and SocketIOHandler. I am starting the RabbitMQHandler in a separate thread so that both the RabbitMQ consume and the wsgi server can run in parallel.
Code
import random
import threading
import socketio
import eventlet
import sys
import os
import uuid
import pika
from dotenv import load_dotenv
import logging
class RabbitMQHandler():
def __init__(self, RABBITMQ_USER, RABBITMQ_PW, RABBITMQ_IP):
self.queue_name = 'myqueue'
self.exchange_name = 'myqueue'
credentials = pika.PlainCredentials(RABBITMQ_USER, RABBITMQ_PW)
self.connection = pika.BlockingConnection(pika.ConnectionParameters(RABBITMQ_IP, 5672, '/', credentials))
self.channel = self.connection.channel()
self.channel.queue_declare(queue=self.queue_name)
self.channel.exchange_declare(exchange=self.exchange_name, exchange_type='fanout')
self.channel.queue_bind(exchange=self.exchange_name, queue=self.queue_name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.close()
def run(self, callback):
logging.info('start consuming messages...')
self.channel.basic_consume(queue=self.queue_name,auto_ack=True, on_message_callback=callback)
self.channel.start_consuming()
class SocketIOHandler():
def __init__(self):
self.id = str(uuid.uuid4())
# create a Socket.IO server
self.sio = socketio.Server(async_mode='eventlet', cors_allowed_origins='*')
# wrap with a WSGI application
self.app = socketio.WSGIApp(self.sio)
self.sio.on('connect_to_backend', self.handle_connect)
self.sio.on('random_number', self.handle_random_number)
def handle_connect(self, sid, msg):
logging.info('new socket io message')
self.emit('connect_success', {
'success': True,
})
def handle_random_number(self, sid, msg):
logging.info('handle_random_number')
self.emit('response_random_number', { 'number': random.randint(0,10)})
def emit(self, event, msg):
logging.info('socket server: {}'.format(self.id))
logging.info('sending event: "{}"'.format(event))
self.sio.emit(event, msg)
logging.info('sent event: "{}"'.format(event))
def run(self):
logging.info('start web socket on port 8765...')
eventlet.wsgi.server(eventlet.listen(('', 8765)), self.app)
def start_rabbitmq_handler(socketio_handler, RABBITMQ_USER, RABBITMQ_PW, RABBITMQ_IP):
def callback(ch, method, properties, body):
logging.info('rabbitmq handler')
socketio_handler.emit('response_random_number', { 'number': random.randint(0,10)})
with RabbitMQHandler(RABBITMQ_USER, RABBITMQ_PW, RABBITMQ_IP) as rabbitmq_handler:
rabbitmq_handler.run(callback=callback)
threads = []
def main():
global threads
load_dotenv()
RABBITMQ_USER = os.getenv('RABBITMQ_USER')
RABBITMQ_PW = os.getenv('RABBITMQ_PW')
RABBITMQ_IP = os.getenv('RABBITMQ_IP')
socketio_handler = SocketIOHandler()
rabbitmq_thread = threading.Thread(target=start_rabbitmq_handler, args=(socketio_handler, RABBITMQ_USER, RABBITMQ_PW, RABBITMQ_IP))
threads.append(rabbitmq_thread)
rabbitmq_thread.start()
socketio_handler.run()
if __name__ == '__main__':
try:
logging.basicConfig(level=logging.INFO)
logging.getLogger("pika").propagate = False
main()
except KeyboardInterrupt:
try:
for t in threads:
t.exit()
sys.exit(0)
except SystemExit:
for t in threads:
t.exit()
os._exit(0)
Problem
The Problem is, that when the RabbitMQHandler receives a message the event response_random_number does not get through to the Vue2 APP. Even though it is emited in the callback function. When I send the random_number event from the Vue2 APP to the python application I do get the response_random_number event back from the python application in the Vue2 APP.
So all connections work on their own, but not together. My guess would be, that there is some sort of threading communication error. I added the id to the SocketIOHandler class to make sure it is the same instanced object and the prints are the same.
The logs 'socket server: ...', sending event: ... and sent event: ... tell me, that the function is being called correctly.

Paramiko. Reverse Forward Tunnel Question- Help Appreciated

So I've been working with the Paramiko Libary. I have a client and two servers Server A & Server B. The client connects to Server A, and then requests a reverse-forwarded tunnel to Server B, there is a lot more functionality to write into it but my problem at the moment is very fundamental and likely has a very simply answer I'm just somehow overlooking or not understanding.
What I am trying to do at this point is have Server A send some information to Server B every time it connects to it, which due to a timer on the client should after a connection is closed be each minute. (reconnecting each time)
I want to have Server A send, Server B some information every time it connects to it. Now my question relates to how I'd achieve that.
My first thought was to have the client send a command to Server A after the reverse tunnel is connected, I suspect and here my understanding may be wrong, thus why I'm checking here. The command (which is a string) will be forwarded by Server A to Server B, whilst I am looking for the response to that command to be sent Server B.
The other option as I see it is to have Server A push the data to Server B. But I don't know how to check for when a reverse-forwarded-tunnel is created, I could do it for any connection but then that seems inefficient, as the client will get some data, then the data will once again be sent as the reverse forward tunnel is created. (Again likely overlooking something simple here)
So I'm curious given my code's present state what could I do, could change that would let me check for when a reverse-forward-tunnel is made to Server B so I can send the data I want to send to it.
Thank you for taking the time to try and help me here, and yes I understand hardcoding in passwords etc is a bad idea for application security.
The code below is the client & server code (which again need some work but are getting there.)
client Code
import getpass
import os
import socket
import select
import sys
import threading
import paramiko
from paramiko import Channel
import schedule
import time
import string
import random
from optparse import OptionParser
IP = '127.0.0.1'
USER = 'user'
PASSWORD = 'CrabRave'
PORT = 900
REMOTE_PORT = 443
REMOTE_IP = ... ###Remote IP will go here.
def handler(chan, host, port):
sock = socket.socket()
try:
sock.connect((IP, PORT))
except Exception as e:
Timer()
def ssh_client(IP, PORT, USER, PASSWORD):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(IP, PORT, USER, PASSWORD)
ssh_session = client.get_transport().open_session()
def reverse_forward_tunnel(PORT, REMOTE_IP, REMOTE_PORT, transport):
transport.request_port_forward("", PORT)
while True:
chan = transport.accept(1000)
if chan is None:
continue
thr = threading.Thread(
target=handler, args=(chan, REMOTE_IP, REMOTE_PORT))
thr.setDaemon(True)
thr.start()
def Timer():
if Channel.is_active():
schedule.every(1).seconds.do(Timer)
else: schedule.every(1).minutes.do(main)
def main():
client = ssh_client
try:
ssh_client(IP, PORT, USER, PASSWORD)
except Exception as E:
Timer()
try:
reverse_forward_tunnel(PORT, REMOTE_IP, REMOTE_PORT, client.get_transport())
except KeyboardInterrupt:
Timer()
try: Timer()
except Exception as E:
Timer
if __name__ == '__main__':
main()
Server Code
from ABC import abstractmethod
from sys import platform
from Shell import Shell
from cmd import Cmd
from src.server_base import ServerBase
from src.ssh_server_interface import SshServerInterface
from src.shell import Shell
from src.ssh_server import SshServer
import base64
import os
import socket
import sys
import paramiko
import threading
import string
import random
my_key = ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(100))
class Shell(Cmd):
use_rawinput=False
promt='My Shell> '
def __init__(self, stdin=None, stdout=None):
super(Shell, self).__init__(completkey='tab', stdin=stdin, stdout =stdout)
def print(self, value):
if self.stdout and not self.stdout.closed:
self.stdout.write(value)
self.stdout.flush()
def printline(self, value):
self.print(value + '\r\n')
def emptyline(self):
self.print('\r\n')
class ServerBase(ABC):
def __init__(self):
self._is_running = threading.Event()
self._socket = None
self.client_shell = None
self._listen_thread = None
def start(self, address='127.0.0.1', port=900, timeout=1):
if not self._is_running.is_set():
self._is_running.set()
self._socket(socket.AF_INET, socket.SOCK_STREAM)
if platform == "linux" or platform == "linux2":
self._socket.setsocketopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, True)
self._socket.settimeout(timeout)
self._socket.bind((address, port))
self._listen_thread = threading.Thread(target=self._listen)
self._listen_thread.start()
def stop(self):
if self._is_running.is_set():
self._is_running.clear()
self._listen_thread.join()
self._socket.close()
def listen(self):
while self._is_running.is_set():
try:
self._socket.listen()
client, addr = self._socket.accept()
self.connection_function(client)
except socket.timeout:
pass
#abstractmethod
def connection_function(self, client):
pass
class SshServerInterface(paramiko.ServerInterface):
def check_channel_request(self, kind, chanid):
if kind == "session":
return paramiko.OPEN_SUCCEEDED
def check_auth_password(self, username: str, password: str) -> int:
if (username == "user") and (password == "CrabRave"):
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_channel_pty_request(self, channel: Channel, term: bytes, width: int, height: int, pixelwidth: int, pixelheight: int, modes: bytes):
return True
def check_channel_shell_request(self, channel: Channel) -> bool:
return True
def check_channel_env_request(self, channel: Channel, name: bytes, value: bytes) -> bool:
return True
def check_port_forward_request(self, address: str, port: int) -> int:
return port
class SshServer(ServerBase):
def __init__(self, host_key_file, host_key_file_password=None):
super(SshServer, self).__init__()
self._host_key = paramiko.RSAKey.from_private_key_file(StringIO.StringIO(my_key))
def connection_function(self, client):
try:
session = paramiko.Transport(client)
session.add_server_key(self._host_key)
server = SshServerInterface()
try:
session.start_server(server=server)
except paramiko.SSHException:
return
channel = session.accept()
stdio = channel.makefile('rwU')
self.client = Shell(stdio, stdio)
self.client_shell.cmdloop()
session.close()
except:
pass
if __name__ == '__main__':
server = SshServer(my_key)
server.start()

Consume RabbitMq using Pika and push using Socket.io

I am building a service that receives messages from rabbitmq using pika. and push messages to clients using socket.io.
The socket.io server and pika server are both blocking the main thread.
This will be the same also for celery with flask or Django.
What is the proper approach to solving this and run them both under the same context?
You can use the Pub/Sub model, Start the consume process in another thread register user that want to receive from the queue and send data to subscribed users.
import json
import pika
import gevent
from flask import Flask
from flask_sockets import Sockets
connection_url = 'localhost'
channel_queue = 'test'
class PubSubListener(threading.Thread):
def __init__(self, queue_name):
threading.Thread.__init__(self)
self.clients = []
self.queue_name = queue_name
connection = pika.BlockingConnection(pika.ConnectionParameters(connection_url))
self.channel = connection.channel()
self.channel.queue_declare(queue=self.queue_name)
threading.Thread(target=self.channel.basic_consume(queue=self.queue_name,
auto_ack=True,
on_message_callback=self._callback))
def run(self):
self.channel.start_consuming()
def publish(self, body):
self.channel.basic_publish(exchange='',
routing_key=self.queue_name,
body=body)
def subscribe(self, client):
self.clients.append(client)
def _callback(self, channel, method, properties, body):
time.sleep(0.001)
message = json.loads(body)
print(message)
self.send(message)
def send(self, data):
for client in self.clients:
try:
client.send(data)
except Exception:
self.clients.remove(client)
pslistener = PubSubListener(channel_queue)
app = Flask(__name__)
sockets = Sockets(app)
#sockets.route('/echo')
def echo_socket(ws):
pslistener.subscribe(ws)
while not ws.closed:
gevent.sleep(0.1)
#app.route('/')
def hello():
return 'Hello World!'
if __name__ == "__main__":
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
pslistener.start()
print("Started")
server = pywsgi.WSGIServer(('', 5000), app, handler_class=WebSocketHandler)
server.serve_forever()

How to disable ssl verification for aiohttp test server

I want to write tests for a web client. As it was recommended by the aiohttp library author, I set up a local test server. This setup (boiling down this example) works well with plain http/port 80.
import ssl
import socket as skt
import unittest as ut
import aiohttp as ah
import aiohttp.web as ahweb
import aiohttp.test_utils as ahtest
EXPECTED_SERVER_URL = 'https://anywhere.com'
async def function_to_test(client_session):
async with client_session.get(EXPECTED_SERVER_URL) as response:
return await response.json()
class ResolverMock(ah.resolver.AbstractResolver):
def __init__(self, server_port, host, request_port):
self.__server_port = server_port
self.__host = host.rstrip('/').rpartition('://')[-1]
self.__request_port = request_port
async def resolve(self, host, request_port=0, family=skt.AF_INET):
if host != self.__host or request_port != self.__request_port:
raise OSError('No test server known for %s' % host)
return [dict(hostname=host, host='127.0.0.1', port=self.__server_port,
family=skt.AF_INET, proto=0, flags=skt.AI_NUMERICHOST)]
async def close(self) -> None:
pass
class ServerMock(ahtest.RawTestServer):
def __init__(self, **kwargs):
super().__init__(self.__handle_request, **kwargs)
async def __handle_request(self, _request):
return ahweb.json_response({})
class Fetch(ut.IsolatedAsyncioTestCase):
async def test_request_server(self):
async with ServerMock(port=443) as server_mock:
ssl_ctx = False
resolver = ResolverMock(server_port=server_mock.port, host=EXPECTED_SERVER_URL, request_port=443)
connector = ah.TCPConnector(resolver=resolver, ssl=ssl_ctx, use_dns_cache=False)
async with ah.ClientSession(connector=connector, trust_env=True) as session:
response = await function_to_test(session)
self.assertEqual(response, {})
if __name__ == '__main__':
ut.main()
But it fails for https/port 443 with
aiohttp.client_exceptions.ClientConnectorSSLError: Cannot connect to host anywhere.com:443 ssl:default [[SSL: WRONG_VERSION_NUMBER] wrong version number (_ssl.c:1108)]
Using a non_validating ssl context did not change anything:
ssl_ctx = ssl.create_default_context()
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_NONE
I have no idea how I could set up a compatible protocol or whatever (I am far away from being an ssl expert) on the test server. What do I miss? How can I make this work for https?

Python SocketServer: sending to multiple clients?

Well, I'm trying to build a small python prgram with a SocketServer that is supposed to send messages it receives to all connected clients. I'm stuck, I don't know how to store clients on the serverside, and I don't know how to send to multiple clients. Oh and, my program fails everytime more then 1 client connects, and everytime a client sends more then one message...
Here's my code until now:
print str(self.client_address[0])+' connected.'
def handle(self):
new=1
for client in clients:
if client==self.request:
new=0
if new==1:
clients.append(self.request)
for client in clients:
data=self.request.recv(1024)
client.send(data)
class Host:
def __init__(self):
self.address = ('localhost', 0)
self.server = SocketServer.TCPServer(self.address, EchoRequestHandler)
ip, port = self.server.server_address
self.t = threading.Thread(target=self.server.serve_forever)
self.t.setDaemon(True)
self.t.start()
print ''
print 'Hosted with IP: '+ip+' and port: '+str(port)+'. Clients can now connect.'
print ''
def close(self):
self.server.socket.close()
class Client:
name=''
ip=''
port=0
def __init__(self,ip,port,name):
self.name=name
self.hostIp=ip
self.hostPort=port
self.s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((self.hostIp, self.hostPort))
def reco(self):
self.s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((self.hostIp, self.hostPort))
def nick(self,newName):
self.name=newName
def send(self,message):
message=self.name+' : '+message
len_sent=self.s.send(message)
response=self.s.recv(len_sent)
print response
self.reco()
def close(self):
self.s.close()
Obviously I have no idea what I'm doing, so any help would be great.
Thanks in advance!
Edit: I'm using Python 2.7 on Windows Vista.
You want to look at asyncore here. The socket operations you're calling on the client side are blocking (don't return until some data is received or a timeout occurs) which makes it hard to listen for messages sent from the host and let the client instances enqueue data to send at the same time. asyncore is supposed to abstract the timeout-based polling loop away from you.
Here's a code "sample" -- let me know if anything is unclear:
from __future__ import print_function
import asyncore
import collections
import logging
import socket
MAX_MESSAGE_LENGTH = 1024
class RemoteClient(asyncore.dispatcher):
"""Wraps a remote client socket."""
def __init__(self, host, socket, address):
asyncore.dispatcher.__init__(self, socket)
self.host = host
self.outbox = collections.deque()
def say(self, message):
self.outbox.append(message)
def handle_read(self):
client_message = self.recv(MAX_MESSAGE_LENGTH)
self.host.broadcast(client_message)
def handle_write(self):
if not self.outbox:
return
message = self.outbox.popleft()
if len(message) > MAX_MESSAGE_LENGTH:
raise ValueError('Message too long')
self.send(message)
class Host(asyncore.dispatcher):
log = logging.getLogger('Host')
def __init__(self, address=('localhost', 0)):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.bind(address)
self.listen(1)
self.remote_clients = []
def handle_accept(self):
socket, addr = self.accept() # For the remote client.
self.log.info('Accepted client at %s', addr)
self.remote_clients.append(RemoteClient(self, socket, addr))
def handle_read(self):
self.log.info('Received message: %s', self.read())
def broadcast(self, message):
self.log.info('Broadcasting message: %s', message)
for remote_client in self.remote_clients:
remote_client.say(message)
class Client(asyncore.dispatcher):
def __init__(self, host_address, name):
asyncore.dispatcher.__init__(self)
self.log = logging.getLogger('Client (%7s)' % name)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.name = name
self.log.info('Connecting to host at %s', host_address)
self.connect(host_address)
self.outbox = collections.deque()
def say(self, message):
self.outbox.append(message)
self.log.info('Enqueued message: %s', message)
def handle_write(self):
if not self.outbox:
return
message = self.outbox.popleft()
if len(message) > MAX_MESSAGE_LENGTH:
raise ValueError('Message too long')
self.send(message)
def handle_read(self):
message = self.recv(MAX_MESSAGE_LENGTH)
self.log.info('Received message: %s', message)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
logging.info('Creating host')
host = Host()
logging.info('Creating clients')
alice = Client(host.getsockname(), 'Alice')
bob = Client(host.getsockname(), 'Bob')
alice.say('Hello, everybody!')
logging.info('Looping')
asyncore.loop()
Which results in the following output:
INFO:root:Creating host
INFO:root:Creating clients
INFO:Client ( Alice):Connecting to host at ('127.0.0.1', 51117)
INFO:Client ( Bob):Connecting to host at ('127.0.0.1', 51117)
INFO:Client ( Alice):Enqueued message: Hello, everybody!
INFO:root:Looping
INFO:Host:Accepted client at ('127.0.0.1', 55628)
INFO:Host:Accepted client at ('127.0.0.1', 55629)
INFO:Host:Broadcasting message: Hello, everybody!
INFO:Client ( Alice):Received message: Hello, everybody!
INFO:Client ( Bob):Received message: Hello, everybody!
You can use socketserver to broadcast messages to all connected clients. However, the ability is not built into the code and will need to be implemented by extending some of the classes already provided. In the following example, this is implemented using the ThreadingTCPServer and StreamRequestHandler classes. They provide a foundation on which to build but still require some modifications to allow what you are trying to accomplish. The documentation should help explain what each function, class, and method are trying to do in order to get the job done.
Server
#! /usr/bin/env python3
import argparse
import pickle
import queue
import select
import socket
import socketserver
def main():
"""Start a chat server and serve clients forever."""
parser = argparse.ArgumentParser(description='Execute a chat server demo.')
parser.add_argument('port', type=int, help='location where server listens')
arguments = parser.parse_args()
server_address = socket.gethostbyname(socket.gethostname()), arguments.port
server = CustomServer(server_address, CustomHandler)
server.serve_forever()
class CustomServer(socketserver.ThreadingTCPServer):
"""Provide server support for the management of connected clients."""
def __init__(self, server_address, request_handler_class):
"""Initialize the server and keep a set of registered clients."""
super().__init__(server_address, request_handler_class, True)
self.clients = set()
def add_client(self, client):
"""Register a client with the internal store of clients."""
self.clients.add(client)
def broadcast(self, source, data):
"""Resend data to all clients except for the data's source."""
for client in tuple(self.clients):
if client is not source:
client.schedule((source.name, data))
def remove_client(self, client):
"""Take a client off the register to disable broadcasts to it."""
self.clients.remove(client)
class CustomHandler(socketserver.StreamRequestHandler):
"""Allow forwarding of data to all other registered clients."""
def __init__(self, request, client_address, server):
"""Initialize the handler with a store for future date streams."""
self.buffer = queue.Queue()
super().__init__(request, client_address, server)
def setup(self):
"""Register self with the clients the server has available."""
super().setup()
self.server.add_client(self)
def handle(self):
"""Run a continuous message pump to broadcast all client data."""
try:
while True:
self.empty_buffers()
except (ConnectionResetError, EOFError):
pass
def empty_buffers(self):
"""Transfer data to other clients and write out all waiting data."""
if self.readable:
self.server.broadcast(self, pickle.load(self.rfile))
while not self.buffer.empty():
pickle.dump(self.buffer.get_nowait(), self.wfile)
#property
def readable(self):
"""Check if the client's connection can be read without blocking."""
return self.connection in select.select(
(self.connection,), (), (), 0.1)[0]
#property
def name(self):
"""Get the client's address to which the server is connected."""
return self.connection.getpeername()
def schedule(self, data):
"""Arrange for a data packet to be transmitted to the client."""
self.buffer.put_nowait(data)
def finish(self):
"""Remove the client's registration from the server before closing."""
self.server.remove_client(self)
super().finish()
if __name__ == '__main__':
main()
Of course, you also need a client that can communicate with your server and use the same protocol the server speaks. Since this is Python, the decision was made to utilize the pickle module to facilitate data transfer among server and clients. Other data transfer methods could have been used (such as JSON, XML, et cetera), but being able to pickle and unpickle data serves the needs of this program well enough. Documentation is included yet again, so it should not be too difficult to figure out what is going on. Note that server commands can interrupt user data entry.
Client
#! /usr/bin/env python3
import argparse
import cmd
import pickle
import socket
import threading
def main():
"""Connect a chat client to a server and process incoming commands."""
parser = argparse.ArgumentParser(description='Execute a chat client demo.')
parser.add_argument('host', type=str, help='name of server on the network')
parser.add_argument('port', type=int, help='location where server listens')
arguments = parser.parse_args()
client = User(socket.create_connection((arguments.host, arguments.port)))
client.start()
class User(cmd.Cmd, threading.Thread):
"""Provide a command interface for internal and external instructions."""
prompt = '>>> '
def __init__(self, connection):
"""Initialize the user interface for communicating with the server."""
cmd.Cmd.__init__(self)
threading.Thread.__init__(self)
self.connection = connection
self.reader = connection.makefile('rb', -1)
self.writer = connection.makefile('wb', 0)
self.handlers = dict(print=print, ping=self.ping)
def start(self):
"""Begin execution of processor thread and user command loop."""
super().start()
super().cmdloop()
self.cleanup()
def cleanup(self):
"""Close the connection and wait for the thread to terminate."""
self.writer.flush()
self.connection.shutdown(socket.SHUT_RDWR)
self.connection.close()
self.join()
def run(self):
"""Execute an automated message pump for client communications."""
try:
while True:
self.handle_server_command()
except (BrokenPipeError, ConnectionResetError):
pass
def handle_server_command(self):
"""Get an instruction from the server and execute it."""
source, (function, args, kwargs) = pickle.load(self.reader)
print('Host: {} Port: {}'.format(*source))
self.handlers[function](*args, **kwargs)
def preloop(self):
"""Announce to other clients that we are connecting."""
self.call('print', socket.gethostname(), 'just entered.')
def call(self, function, *args, **kwargs):
"""Arrange for a handler to be executed on all other clients."""
assert function in self.handlers, 'You must create a handler first!'
pickle.dump((function, args, kwargs), self.writer)
def do_say(self, arg):
"""Causes a message to appear to all other clients."""
self.call('print', arg)
def do_ping(self, arg):
"""Ask all clients to report their presence here."""
self.call('ping')
def ping(self):
"""Broadcast to all other clients that we are present."""
self.call('print', socket.gethostname(), 'is here.')
def do_exit(self, arg):
"""Disconnect from the server and close the client."""
return True
def postloop(self):
"""Make an announcement to other clients that we are leaving."""
self.call('print', socket.gethostname(), 'just exited.')
if __name__ == '__main__':
main()
why use SocketServer? a simple client doesn't meet your needs?
import socket
HOST = ''
PORT = 8000
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((HOST, PORT))
sock.listen(5)
while True:
conn, addr = sock.accept()
print 'connecting to', addr
while True:
data = conn.recv(1024)
if not data:
break
conn.send(data)
To take multiple clients simultaneously, you will have to add SocketServer.ForkingMixIn or ThreadingMixIn.

Categories

Resources