I'm working on a project which uses python asyncio socket server. The problem is that the implementation of the server doesn't call .close() on the transport when the server stops. This seems to leave clients connected and causes crashes in other parts of the code.
Python documents say that transports need to be closed explicitly, but in this project I don't know where I can close them because there is no reference to the transports that are created for each client.
https://docs.python.org/3/library/asyncio-dev.html#close-transports-and-event-loops
Here is the code:
"""
Socket server forwarding request to internal server
"""
import logging
try:
# we prefer to use bundles asyncio version, otherwise fallback to trollius
import asyncio
except ImportError:
import trollius as asyncio
from opcua import ua
from opcua.server.uaprocessor import UaProcessor
logger = logging.getLogger(__name__)
class BinaryServer(object):
def __init__(self, internal_server, hostname, port):
self.logger = logging.getLogger(__name__)
self.hostname = hostname
self.port = port
self.iserver = internal_server
self.loop = internal_server.loop
self._server = None
self._policies = []
def set_policies(self, policies):
self._policies = policies
def start(self):
class OPCUAProtocol(asyncio.Protocol):
"""
instanciated for every connection
defined as internal class since it needs access
to the internal server object
FIXME: find another solution
"""
iserver = self.iserver
loop = self.loop
logger = self.logger
policies = self._policies
def connection_made(self, transport):
self.peername = transport.get_extra_info('peername')
self.logger.info('New connection from %s', self.peername)
self.transport = transport
self.processor = UaProcessor(self.iserver, self.transport)
self.processor.set_policies(self.policies)
self.data = b""
def connection_lost(self, ex):
self.logger.info('Lost connection from %s, %s', self.peername, ex)
self.transport.close()
self.processor.close()
def data_received(self, data):
logger.debug("received %s bytes from socket", len(data))
if self.data:
data = self.data + data
self.data = b""
self._process_data(data)
def _process_data(self, data):
buf = ua.utils.Buffer(data)
while True:
try:
backup_buf = buf.copy()
try:
hdr = ua.Header.from_string(buf)
except ua.utils.NotEnoughData:
logger.info("We did not receive enough data from client, waiting for more")
self.data = backup_buf.read(len(backup_buf))
return
if len(buf) < hdr.body_size:
logger.info("We did not receive enough data from client, waiting for more")
self.data = backup_buf.read(len(backup_buf))
return
ret = self.processor.process(hdr, buf)
if not ret:
logger.info("processor returned False, we close connection from %s", self.peername)
self.transport.close()
return
if len(buf) == 0:
return
except Exception:
logger.exception("Exception raised while parsing message from client, closing")
self.transport.close()
break
coro = self.loop.create_server(OPCUAProtocol, self.hostname, self.port)
self._server = self.loop.run_coro_and_wait(coro)
print('Listening on {}:{}'.format(self.hostname, self.port))
def stop(self):
self.logger.info("Closing asyncio socket server")
self.loop.call_soon(self._server.close)
self.loop.run_coro_and_wait(self._server.wait_closed())
As you can see when we call stop() on this server class the asyncio server calls it's close method. However if clients are connected the created transports never get closed.
The project repository is here https://github.com/FreeOpcUa/python-opcua/ , you can take a look at Issue 137.
What is the correct way to close the transport object?
I solve this by applying this approach:
#self.OPCUAServer - this is my opcua server
nodes = []
nodes.append(self.OPCUAServer.get_node("ns=0; s=Measurements")) #Adding two root nodes
nodes.append(self.OPCUAServer.get_node("ns=1; s=Calibrations")) #to the list
self.OPCUAServer.delete_nodes(nodes, True) # Recursively call delete_nodes with this list
self.OPCUAServer.stop()
Related
I am trying to make a socket server that's able to have multiple clients connected using the asyncio sockets and is able to easily switch between which client it communicates while still having all the clients connected. I thought there would be some type of FD of the clients like there is in sockets, but I looked through the docs and did not find anything, or I missed it.
Here is my server code:
import socket
import asyncio
host = "localhost"
port = 9998
list_of_auths = ['desktop-llpeu0p\\tomiss', 'desktop-llpeu0p\\tomisss',
'desktop-llpeu0p\\tomissss', 'desktop-llpeu0p\\tomisssss']
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('socket initiated.')
confirmed = 'CONFIRMED'
deny = 'denied'
#(so i dont forget) to get recv in async do: var = (await reader.read(4096)).decode('utf-8') if -1 then it will read all
#(so i dont forget) to write sendall do: writer.write(var.encode('utf-8')) should be used with await writer.drain()
async def handle_client(reader, writer):
idrecv = (await reader.read(255)).decode('utf-8')
if idrecv in list_of_auths:
writer.write(confirmed.encode('utf-8'))
else:
writer.write(deny.encode('utf-8'))
writer.close()
request = None
while request != 'quit':
print("second checkpoint")
writer.close()
async def run_server():
print("first checkpoint")
server = await asyncio.start_server(handle_client, host, port)
async with server:
await server.serve_forever()
asyncio.run(run_server())
This code allows multiple clients to connect at once; However, it only lets me communicate with the last one that connected.
I would suggest to implement it like so:
class SocketHandler(asyncio.Protocol):
def __init__(self):
asyncio.Protocol.__init__(self)
self.transport = None
self.peername = None
# your other code
def connection_made(self, transport):
""" incoming connection """
global ALL_CONNECTIONS
self.transport = transport
self.peername = transport.get_extra_info('peername')
ALL_CONNECTIONS.append(self)
# your other code
def connection_lost(self, exception):
self.close()
# your other code
def data_received(self, data):
# your code handling incoming data
def close(self):
try:
self.transport.close()
except AttributeError:
pass
# global list to store all connections
ALL_CONNECTIONS = []
def send_to_all(message):
""" sending a message to all connected clients """
global ALL_CONNECTIONS
for sh in ALL_CONNECTIONS:
# here you can also check sh.peername to know which client it is
if sh.transport is not None:
sh.transport.write(message)
port = 5060
loop = asyncio.get_event_loop()
coro = loop.create_server(SocketHandler, '', port)
server = loop.run_until_complete(coro)
loop.run_forever()
This way, each connection to the server is represented by an instance of SocketHandler. Whenever you process some data inside this instance, you know which client connection it is.
I have a problem concerning threading in python (2.7.8). The problem resides in a python "chat" code written (I'll include the code to my threading class and my client class (different python files), since I think the problem is in one of those two, and not in the server code). When I run the Client.py file, I am able to communicate with another client (running the same python code) through a server, but the problem is that I have to refresh the .send_payload(msg) command in order to receive the message that the other client has sent (or simply by pressing enter in the chat, and hence sending "" as message). I want to know if it is possible to receive messages without "refreshing" the chat, somehow through threading.
class MessageReceiver(Thread):
def __init(self,client,connection):
Thread.__init__(self)
self.daemon = True
self.client = client
self.connection = connection
self.stop = False
def run(self):
while not self.stop:
data = self.connection.recv(8192)
if not data:
break
else:
self.client.receive_message(data)
pass
class Client:
def __init__(self, host, server_port):
self.host = host
self.server_port = server_port
self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.run()
def run(self):
self.connection.connect((self.host, self.server_port))
self.thread = MessageReceiver(self, self.connection)
self.thread.start()
while True:
text = raw_input()
if(text.find("login") == 0):
data={"request":"login","content":text[text.find("login")+6:]}
self.send_payload(data)
if(text.find("names") == 0):
data={"request":"names","content":""}
self.send_payload(data)
else:
data={"request":"msg","content":text}
self.send_payload(data)
def disconnect(self):
self.thread.stop = True
self.connection.close()
print("Disconnected")
def receive_message(self, message):
print("Msg: " + message)
def send_payload(self, data):
self.connection.send(json.dumps(data))
print("Payload sent!")
I've started Python a few times ago and now, I'm currently creating a socket server. I already have the server functioning with multiple threads with multiple clients (Hurray !) But I'm looking for functionality I can't call (i don't even know if it exists) I would like to create a kind of channel where client can send different type of message.
An example I create a channel INFO and if the server received this type of socket it just does a print
I create another channel DEBUG where I can send custom command which the server will execute
etc
In a non-programming language it will do this:
def socketDebug(command):
run command
def socketInfo(input):
print input
if socket == socketDebug:
socketDebug(socket.rcv)
else:
if socket == socketInfo:
socketInfo(socket.rcv)
I hope I'm clear.
Here is a quite simple implementation of a Channel class. It creates a socket, to accept
connections from clients and to send messages. It is also a client itself,
receiving messages from other Channel instances (in separate processes for example).
The communication is done in two threads, which is pretty bad (I would use async io). when
a message is received, it calls the registered function in the receiving thread which
can cause some threading issues.
Each Channel instance creates its own sockets, but it would be much more scalable to
have channel "topics" multiplexed by a single instance.
Some existing libraries provide a "channel" functionality, like nanomsg.
The code here is for educational purposes, if it can help...
import socket
import threading
class ChannelThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.clients = []
self.chan_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.chan_sock.bind(('',0))
_, self.port = self.chan_sock.getsockname()
self.chan_sock.listen(5)
self.daemon=True
self.start()
def run(self):
while True:
new_client = self.chan_sock.accept()
if not new_client:
break
self.clients.append(new_client)
def sendall(self, msg):
for client in self.clients:
client[0].sendall(msg)
class Channel(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.channel_thread = ChannelThread()
def public_address(self):
return "tcp://%s:%d" % (socket.gethostname(), self.channel_thread.port)
def register(self, channel_address, update_callback):
host, s_port = channel_address.split("//")[-1].split(":")
port = int(s_port)
self.peer_chan_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.peer_chan_sock.connect((host, port))
self._callback = update_callback
self.start()
def deal_with_message(self, msg):
self._callback(msg)
def run(self):
data = ""
while True:
new_data = self.peer_chan_sock.recv(1024)
if not new_data:
# connection reset by peer
break
data += new_data
msgs = data.split("\n\n")
if msgs[-1]:
data = msgs.pop()
for msg in msgs:
self.deal_with_message(msg)
def send_value(self, channel_value):
self.channel_thread.sendall("%s\n\n" % channel_value)
Usage:
In process A:
c = Channel()
c.public_address()
In process B:
def msg_received(msg):
print "received:", msg
c = Channel()
c.register("public_address_string_returned_in_process_A", msg_received)
In process A:
c.send_value("HELLO")
In process B:
received: HELLO
Pika library support tornado adapter, here is an example about how to publish message using Asynchronous adapter.
I want use pika in tornado application, just an example, I want put tornado request data to RabbitMQ, But don't know how to do it.
Two question don't know how to solve.
1 Pika use tornado adapter has its own ioloop,
self._connection = pika.SelectConnection(pika.URLParameters(self._url),
self.on_connection_open)
self._connection.ioloop.start()
Tornado application has its own ioloop,
tornado.ioloop.IOLoop.instance().start()
How to combine those two ioloop?
2 The Pika example publish same message again and again, but I want to publish request data, how to pass request data to publish method?
On my search for exactly the same thing I found this blog post of Kevin Jing Qiu.
I went the rabbitmq hole a bit further to give every websocket his own set of channel and queues.
The extract from my project can be found below. A tornado application bound to RabbitMQ consists of these parts:
The Tornado Application that will handle web requests. I only see long lived websockets here, but you can do so with short lived http requests as well.
A (one) RabbitMQ connection hold by the PikaClient Instance
a web connection that defines its channels, queues and exchanges when the open method is triggered.
Now a websocket connection can receive data from tornado (data from the browser) via on_message and send it to RabbitMQ.
The websocket connection will receive data from RabbitMQ via basic_consume.
This is not fully functional, but you should get the idea.
class PikaClient(object):
def __init__(self, io_loop):
logger.info('PikaClient: __init__')
self.io_loop = io_loop
self.connected = False
self.connecting = False
self.connection = None
self.channel = None
self.message_count = 0
"""
Pika-Tornado connection setup
The setup process is a series of callback methods.
connect:connect to rabbitmq and build connection to tornado io loop ->
on_connected: create a channel to rabbitmq ->
on_channel_open: declare queue tornado, bind that queue to exchange
chatserver_out and start consuming messages.
"""
def connect(self):
if self.connecting:
#logger.info('PikaClient: Already connecting to RabbitMQ')
return
#logger.info('PikaClient: Connecting to RabbitMQ')
self.connecting = True
cred = pika.PlainCredentials('guest', 'guest')
param = pika.ConnectionParameters(
host='localhost',
port=5672,
virtual_host='/',
credentials=cred
)
self.connection = TornadoConnection(param,
on_open_callback=self.on_connected,stop_ioloop_on_close=False)
self.connection.add_on_close_callback(self.on_closed)
def on_connected(self, connection):
logger.info('PikaClient: connected to RabbitMQ')
self.connected = True
self.connection = connection
# now you are able to call the pika api to do things
# this could be exchange setup for websocket connections to
# basic_publish to later.
self.connection.channel(self.on_channel_open)
def on_channel_open(self, channel):
logger.info('PikaClient: Channel %s open, Declaring exchange' % channel)
self.channel = channel
def on_closed(self, connection):
logger.info('PikaClient: rabbit connection closed')
self.io_loop.stop()
class MyWebSocketHandler(websocket.WebSocketHandler):
def __init__(self):
self.status = 'not connected yet'
def open(self, *args, **kwargs):
self.status = "ws open"
self.rabbit_connect() # connect this websocket object to rabbitmq
def rabbit_connect():
self.application.pc.connection.channel(self.rabbit_channel_in_ok)
def rabbit_channel_in_ok(self,channel):
self.channel_in = channel
self.channel_in.queue_declare(self.rabbit_declare_ok,
exclusive=True,auto_delete=True)
# and so on...
handlers = [ your_definitions_here_like_websockets_or_such ]
settings = { your_settings_here }
application = tornado.web.Application(handlers,**settings)
def main():
io_loop = tornado.ioloop.IOLoop.instance()
# PikaClient is our rabbitmq consumer
pc = PikaClient(io_loop)
application.pc = pc
application.pc.connect()
application.listen(config.tornadoport)
try:
io_loop.start()
except KeyboardInterrupt:
io_loop.stop()
if __name__ == '__main__':
main()
Finally,i figure out it!
the previous solution is outdated for the newest pika component!
1.my pika Version is 1.0.1.
warning :
TornadoConnection Class has changed is package by the newest push request.
from pika.adapters import tornado_connection
2.there is an example: log() and config() should ignore(delete it)
import tornado.web
from handlers.notify import NotifyHandler
from function.function import config
from utils.utils import log
import pika
from pika.adapters import tornado_connection
HANDLERS = [(r'/notify', NotifyHandler)]
class PikaClient():
def __init__(self, io_loop):
self.io_loop = io_loop
self.connected = False
self.connecting = False
self.connection = None
self.channel = None
self.message_count = 9
def connect(self):
if self.connecting:
return
self.connecting = True
cred = pika.PlainCredentials('guest', 'guest')
param = pika.ConnectionParameters(host="10.xxx.xxx.75", credentials=cred)
self.connection = tornado_connection.TornadoConnection(param, custom_ioloop = self.io_loop, on_open_callback = self.on_connected)
self.connection.add_on_open_error_callback(self.err)
self.connection.add_on_close_callback(self.on_closed)
def err(self, conn):
log('socket error', conn)
pass
def on_connected(self, conn):
log('connected')
self.connected = True
self.connection = conn
self.connection.channel(channel_number = 1, on_open_callback = self.on_channel_open)
def on_message(self, channel, method, properties, body):
log(body)
print('body : ', body)
pass
def on_channel_open(self, channel):
self.channel = channel
channel.basic_consume(on_message_callback = self.on_message, queue='hello', auto_ack=True)
return
def on_closed(self, conn, c):
log('pika close!')
self.io_loop.stop()
pass
def main():
port = 3002
is_debug = config('sys', 'debug')
print('DEBUG', is_debug)
app = tornado.web.Application(
HANDLERS,
debug = is_debug,
)
io_loop = tornado.ioloop.IOLoop.instance()
app.pc = PikaClient(io_loop)
app.pc.connect()
http_server = tornado.httpserver.HTTPServer(app)
app.listen(port)
io_loop.start()
print('listen {}'.format(port))
if __name__ == '__main__':
main()
Well, I'm trying to build a small python prgram with a SocketServer that is supposed to send messages it receives to all connected clients. I'm stuck, I don't know how to store clients on the serverside, and I don't know how to send to multiple clients. Oh and, my program fails everytime more then 1 client connects, and everytime a client sends more then one message...
Here's my code until now:
print str(self.client_address[0])+' connected.'
def handle(self):
new=1
for client in clients:
if client==self.request:
new=0
if new==1:
clients.append(self.request)
for client in clients:
data=self.request.recv(1024)
client.send(data)
class Host:
def __init__(self):
self.address = ('localhost', 0)
self.server = SocketServer.TCPServer(self.address, EchoRequestHandler)
ip, port = self.server.server_address
self.t = threading.Thread(target=self.server.serve_forever)
self.t.setDaemon(True)
self.t.start()
print ''
print 'Hosted with IP: '+ip+' and port: '+str(port)+'. Clients can now connect.'
print ''
def close(self):
self.server.socket.close()
class Client:
name=''
ip=''
port=0
def __init__(self,ip,port,name):
self.name=name
self.hostIp=ip
self.hostPort=port
self.s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((self.hostIp, self.hostPort))
def reco(self):
self.s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((self.hostIp, self.hostPort))
def nick(self,newName):
self.name=newName
def send(self,message):
message=self.name+' : '+message
len_sent=self.s.send(message)
response=self.s.recv(len_sent)
print response
self.reco()
def close(self):
self.s.close()
Obviously I have no idea what I'm doing, so any help would be great.
Thanks in advance!
Edit: I'm using Python 2.7 on Windows Vista.
You want to look at asyncore here. The socket operations you're calling on the client side are blocking (don't return until some data is received or a timeout occurs) which makes it hard to listen for messages sent from the host and let the client instances enqueue data to send at the same time. asyncore is supposed to abstract the timeout-based polling loop away from you.
Here's a code "sample" -- let me know if anything is unclear:
from __future__ import print_function
import asyncore
import collections
import logging
import socket
MAX_MESSAGE_LENGTH = 1024
class RemoteClient(asyncore.dispatcher):
"""Wraps a remote client socket."""
def __init__(self, host, socket, address):
asyncore.dispatcher.__init__(self, socket)
self.host = host
self.outbox = collections.deque()
def say(self, message):
self.outbox.append(message)
def handle_read(self):
client_message = self.recv(MAX_MESSAGE_LENGTH)
self.host.broadcast(client_message)
def handle_write(self):
if not self.outbox:
return
message = self.outbox.popleft()
if len(message) > MAX_MESSAGE_LENGTH:
raise ValueError('Message too long')
self.send(message)
class Host(asyncore.dispatcher):
log = logging.getLogger('Host')
def __init__(self, address=('localhost', 0)):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.bind(address)
self.listen(1)
self.remote_clients = []
def handle_accept(self):
socket, addr = self.accept() # For the remote client.
self.log.info('Accepted client at %s', addr)
self.remote_clients.append(RemoteClient(self, socket, addr))
def handle_read(self):
self.log.info('Received message: %s', self.read())
def broadcast(self, message):
self.log.info('Broadcasting message: %s', message)
for remote_client in self.remote_clients:
remote_client.say(message)
class Client(asyncore.dispatcher):
def __init__(self, host_address, name):
asyncore.dispatcher.__init__(self)
self.log = logging.getLogger('Client (%7s)' % name)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.name = name
self.log.info('Connecting to host at %s', host_address)
self.connect(host_address)
self.outbox = collections.deque()
def say(self, message):
self.outbox.append(message)
self.log.info('Enqueued message: %s', message)
def handle_write(self):
if not self.outbox:
return
message = self.outbox.popleft()
if len(message) > MAX_MESSAGE_LENGTH:
raise ValueError('Message too long')
self.send(message)
def handle_read(self):
message = self.recv(MAX_MESSAGE_LENGTH)
self.log.info('Received message: %s', message)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
logging.info('Creating host')
host = Host()
logging.info('Creating clients')
alice = Client(host.getsockname(), 'Alice')
bob = Client(host.getsockname(), 'Bob')
alice.say('Hello, everybody!')
logging.info('Looping')
asyncore.loop()
Which results in the following output:
INFO:root:Creating host
INFO:root:Creating clients
INFO:Client ( Alice):Connecting to host at ('127.0.0.1', 51117)
INFO:Client ( Bob):Connecting to host at ('127.0.0.1', 51117)
INFO:Client ( Alice):Enqueued message: Hello, everybody!
INFO:root:Looping
INFO:Host:Accepted client at ('127.0.0.1', 55628)
INFO:Host:Accepted client at ('127.0.0.1', 55629)
INFO:Host:Broadcasting message: Hello, everybody!
INFO:Client ( Alice):Received message: Hello, everybody!
INFO:Client ( Bob):Received message: Hello, everybody!
You can use socketserver to broadcast messages to all connected clients. However, the ability is not built into the code and will need to be implemented by extending some of the classes already provided. In the following example, this is implemented using the ThreadingTCPServer and StreamRequestHandler classes. They provide a foundation on which to build but still require some modifications to allow what you are trying to accomplish. The documentation should help explain what each function, class, and method are trying to do in order to get the job done.
Server
#! /usr/bin/env python3
import argparse
import pickle
import queue
import select
import socket
import socketserver
def main():
"""Start a chat server and serve clients forever."""
parser = argparse.ArgumentParser(description='Execute a chat server demo.')
parser.add_argument('port', type=int, help='location where server listens')
arguments = parser.parse_args()
server_address = socket.gethostbyname(socket.gethostname()), arguments.port
server = CustomServer(server_address, CustomHandler)
server.serve_forever()
class CustomServer(socketserver.ThreadingTCPServer):
"""Provide server support for the management of connected clients."""
def __init__(self, server_address, request_handler_class):
"""Initialize the server and keep a set of registered clients."""
super().__init__(server_address, request_handler_class, True)
self.clients = set()
def add_client(self, client):
"""Register a client with the internal store of clients."""
self.clients.add(client)
def broadcast(self, source, data):
"""Resend data to all clients except for the data's source."""
for client in tuple(self.clients):
if client is not source:
client.schedule((source.name, data))
def remove_client(self, client):
"""Take a client off the register to disable broadcasts to it."""
self.clients.remove(client)
class CustomHandler(socketserver.StreamRequestHandler):
"""Allow forwarding of data to all other registered clients."""
def __init__(self, request, client_address, server):
"""Initialize the handler with a store for future date streams."""
self.buffer = queue.Queue()
super().__init__(request, client_address, server)
def setup(self):
"""Register self with the clients the server has available."""
super().setup()
self.server.add_client(self)
def handle(self):
"""Run a continuous message pump to broadcast all client data."""
try:
while True:
self.empty_buffers()
except (ConnectionResetError, EOFError):
pass
def empty_buffers(self):
"""Transfer data to other clients and write out all waiting data."""
if self.readable:
self.server.broadcast(self, pickle.load(self.rfile))
while not self.buffer.empty():
pickle.dump(self.buffer.get_nowait(), self.wfile)
#property
def readable(self):
"""Check if the client's connection can be read without blocking."""
return self.connection in select.select(
(self.connection,), (), (), 0.1)[0]
#property
def name(self):
"""Get the client's address to which the server is connected."""
return self.connection.getpeername()
def schedule(self, data):
"""Arrange for a data packet to be transmitted to the client."""
self.buffer.put_nowait(data)
def finish(self):
"""Remove the client's registration from the server before closing."""
self.server.remove_client(self)
super().finish()
if __name__ == '__main__':
main()
Of course, you also need a client that can communicate with your server and use the same protocol the server speaks. Since this is Python, the decision was made to utilize the pickle module to facilitate data transfer among server and clients. Other data transfer methods could have been used (such as JSON, XML, et cetera), but being able to pickle and unpickle data serves the needs of this program well enough. Documentation is included yet again, so it should not be too difficult to figure out what is going on. Note that server commands can interrupt user data entry.
Client
#! /usr/bin/env python3
import argparse
import cmd
import pickle
import socket
import threading
def main():
"""Connect a chat client to a server and process incoming commands."""
parser = argparse.ArgumentParser(description='Execute a chat client demo.')
parser.add_argument('host', type=str, help='name of server on the network')
parser.add_argument('port', type=int, help='location where server listens')
arguments = parser.parse_args()
client = User(socket.create_connection((arguments.host, arguments.port)))
client.start()
class User(cmd.Cmd, threading.Thread):
"""Provide a command interface for internal and external instructions."""
prompt = '>>> '
def __init__(self, connection):
"""Initialize the user interface for communicating with the server."""
cmd.Cmd.__init__(self)
threading.Thread.__init__(self)
self.connection = connection
self.reader = connection.makefile('rb', -1)
self.writer = connection.makefile('wb', 0)
self.handlers = dict(print=print, ping=self.ping)
def start(self):
"""Begin execution of processor thread and user command loop."""
super().start()
super().cmdloop()
self.cleanup()
def cleanup(self):
"""Close the connection and wait for the thread to terminate."""
self.writer.flush()
self.connection.shutdown(socket.SHUT_RDWR)
self.connection.close()
self.join()
def run(self):
"""Execute an automated message pump for client communications."""
try:
while True:
self.handle_server_command()
except (BrokenPipeError, ConnectionResetError):
pass
def handle_server_command(self):
"""Get an instruction from the server and execute it."""
source, (function, args, kwargs) = pickle.load(self.reader)
print('Host: {} Port: {}'.format(*source))
self.handlers[function](*args, **kwargs)
def preloop(self):
"""Announce to other clients that we are connecting."""
self.call('print', socket.gethostname(), 'just entered.')
def call(self, function, *args, **kwargs):
"""Arrange for a handler to be executed on all other clients."""
assert function in self.handlers, 'You must create a handler first!'
pickle.dump((function, args, kwargs), self.writer)
def do_say(self, arg):
"""Causes a message to appear to all other clients."""
self.call('print', arg)
def do_ping(self, arg):
"""Ask all clients to report their presence here."""
self.call('ping')
def ping(self):
"""Broadcast to all other clients that we are present."""
self.call('print', socket.gethostname(), 'is here.')
def do_exit(self, arg):
"""Disconnect from the server and close the client."""
return True
def postloop(self):
"""Make an announcement to other clients that we are leaving."""
self.call('print', socket.gethostname(), 'just exited.')
if __name__ == '__main__':
main()
why use SocketServer? a simple client doesn't meet your needs?
import socket
HOST = ''
PORT = 8000
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((HOST, PORT))
sock.listen(5)
while True:
conn, addr = sock.accept()
print 'connecting to', addr
while True:
data = conn.recv(1024)
if not data:
break
conn.send(data)
To take multiple clients simultaneously, you will have to add SocketServer.ForkingMixIn or ThreadingMixIn.