I need to trigger a python script every time an email is received by my webserver on a specific account. I will also need the email to be passed to the script as an argument. My webserver is using Dovecot. How do I go about doing this?
I recommend you have a look at Twisted and specifically its IMAP and POP protocols and client modules.
As an example:
#!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Simple IMAP4 client which displays the subjects of all messages in a
particular mailbox.
"""
import sys
from twisted.internet import protocol
from twisted.internet import ssl
from twisted.internet import defer
from twisted.internet import stdio
from twisted.mail import imap4
from twisted.protocols import basic
from twisted.python import util
from twisted.python import log
class TrivialPrompter(basic.LineReceiver):
from os import linesep as delimiter
promptDeferred = None
def prompt(self, msg):
assert self.promptDeferred is None
self.display(msg)
self.promptDeferred = defer.Deferred()
return self.promptDeferred
def display(self, msg):
self.transport.write(msg)
def lineReceived(self, line):
if self.promptDeferred is None:
return
d, self.promptDeferred = self.promptDeferred, None
d.callback(line)
class SimpleIMAP4Client(imap4.IMAP4Client):
"""
A client with callbacks for greeting messages from an IMAP server.
"""
greetDeferred = None
def serverGreeting(self, caps):
self.serverCapabilities = caps
if self.greetDeferred is not None:
d, self.greetDeferred = self.greetDeferred, None
d.callback(self)
class SimpleIMAP4ClientFactory(protocol.ClientFactory):
usedUp = False
protocol = SimpleIMAP4Client
def __init__(self, username, onConn):
self.ctx = ssl.ClientContextFactory()
self.username = username
self.onConn = onConn
def buildProtocol(self, addr):
"""
Initiate the protocol instance. Since we are building a simple IMAP
client, we don't bother checking what capabilities the server has. We
just add all the authenticators twisted.mail has. Note: Gmail no
longer uses any of the methods below, it's been using XOAUTH since
2010.
"""
assert not self.usedUp
self.usedUp = True
p = self.protocol(self.ctx)
p.factory = self
p.greetDeferred = self.onConn
p.registerAuthenticator(imap4.PLAINAuthenticator(self.username))
p.registerAuthenticator(imap4.LOGINAuthenticator(self.username))
p.registerAuthenticator(
imap4.CramMD5ClientAuthenticator(self.username))
return p
def clientConnectionFailed(self, connector, reason):
d, self.onConn = self.onConn, None
d.errback(reason)
def cbServerGreeting(proto, username, password):
"""
Initial callback - invoked after the server sends us its greet message.
"""
# Hook up stdio
tp = TrivialPrompter()
stdio.StandardIO(tp)
# And make it easily accessible
proto.prompt = tp.prompt
proto.display = tp.display
# Try to authenticate securely
return proto.authenticate(password
).addCallback(cbAuthentication, proto
).addErrback(ebAuthentication, proto, username, password
)
def ebConnection(reason):
"""
Fallback error-handler. If anything goes wrong, log it and quit.
"""
log.startLogging(sys.stdout)
log.err(reason)
return reason
def cbAuthentication(result, proto):
"""
Callback after authentication has succeeded.
Lists a bunch of mailboxes.
"""
return proto.list("", "*"
).addCallback(cbMailboxList, proto
)
def ebAuthentication(failure, proto, username, password):
"""
Errback invoked when authentication fails.
If it failed because no SASL mechanisms match, offer the user the choice
of logging in insecurely.
If you are trying to connect to your Gmail account, you will be here!
"""
failure.trap(imap4.NoSupportedAuthentication)
return proto.prompt(
"No secure authentication available. Login insecurely? (y/N) "
).addCallback(cbInsecureLogin, proto, username, password
)
def cbInsecureLogin(result, proto, username, password):
"""
Callback for "insecure-login" prompt.
"""
if result.lower() == "y":
# If they said yes, do it.
return proto.login(username, password
).addCallback(cbAuthentication, proto
)
return defer.fail(Exception("Login failed for security reasons."))
def cbMailboxList(result, proto):
"""
Callback invoked when a list of mailboxes has been retrieved.
"""
result = [e[2] for e in result]
s = '\n'.join(['%d. %s' % (n + 1, m) for (n, m) in zip(range(len(result)), result)])
if not s:
return defer.fail(Exception("No mailboxes exist on server!"))
return proto.prompt(s + "\nWhich mailbox? [1] "
).addCallback(cbPickMailbox, proto, result
)
def cbPickMailbox(result, proto, mboxes):
"""
When the user selects a mailbox, "examine" it.
"""
mbox = mboxes[int(result or '1') - 1]
return proto.examine(mbox
).addCallback(cbExamineMbox, proto
)
def cbExamineMbox(result, proto):
"""
Callback invoked when examine command completes.
Retrieve the subject header of every message in the mailbox.
"""
return proto.fetchSpecific('1:*',
headerType='HEADER.FIELDS',
headerArgs=['SUBJECT'],
).addCallback(cbFetch, proto
)
def cbFetch(result, proto):
"""
Finally, display headers.
"""
if result:
keys = result.keys()
keys.sort()
for k in keys:
proto.display('%s %s' % (k, result[k][0][2]))
else:
print "Hey, an empty mailbox!"
return proto.logout()
def cbClose(result):
"""
Close the connection when we finish everything.
"""
from twisted.internet import reactor
reactor.stop()
def main():
hostname = raw_input('IMAP4 Server Hostname: ')
port = raw_input('IMAP4 Server Port (the default is 143, 993 uses SSL): ')
username = raw_input('IMAP4 Username: ')
password = util.getPassword('IMAP4 Password: ')
onConn = defer.Deferred(
).addCallback(cbServerGreeting, username, password
).addErrback(ebConnection
).addBoth(cbClose)
factory = SimpleIMAP4ClientFactory(username, onConn)
from twisted.internet import reactor
if port == '993':
reactor.connectSSL(hostname, int(port), factory, ssl.ClientContextFactory())
else:
if not port:
port = 143
reactor.connectTCP(hostname, int(port), factory)
reactor.run()
if __name__ == '__main__':
main()
From here you need to just call your Python script in a subprocess when you detect a new email from your IMAP folder that you're checking... Here you could also use Twisted's Process handling as documented here: https://twistedmatrix.com/documents/13.0.0/core/howto/process.html
Good luck!
IMAP server may notify you about new messages if it supports IDLE command. If it doesn't support it then you could poll the inbox periodically using imaplib from stdlib (code example to retrieve messages from a time period).
imaplib doesn't support IDLE command but it could be easily added e.g., imapidle:
from imapidle import imaplib
m = imaplib.IMAP4_SSL('imap.gmail.com')
m.login('robert', 'pa55w0rd')
m.select()
for uid, msg in m.idle(): # yield new messages
print msg
See also How do I enable push-notification for IMAP (Gmail) using Python imaplib?
Related
So I've been working with the Paramiko Libary. I have a client and two servers Server A & Server B. The client connects to Server A, and then requests a reverse-forwarded tunnel to Server B, there is a lot more functionality to write into it but my problem at the moment is very fundamental and likely has a very simply answer I'm just somehow overlooking or not understanding.
What I am trying to do at this point is have Server A send some information to Server B every time it connects to it, which due to a timer on the client should after a connection is closed be each minute. (reconnecting each time)
I want to have Server A send, Server B some information every time it connects to it. Now my question relates to how I'd achieve that.
My first thought was to have the client send a command to Server A after the reverse tunnel is connected, I suspect and here my understanding may be wrong, thus why I'm checking here. The command (which is a string) will be forwarded by Server A to Server B, whilst I am looking for the response to that command to be sent Server B.
The other option as I see it is to have Server A push the data to Server B. But I don't know how to check for when a reverse-forwarded-tunnel is created, I could do it for any connection but then that seems inefficient, as the client will get some data, then the data will once again be sent as the reverse forward tunnel is created. (Again likely overlooking something simple here)
So I'm curious given my code's present state what could I do, could change that would let me check for when a reverse-forward-tunnel is made to Server B so I can send the data I want to send to it.
Thank you for taking the time to try and help me here, and yes I understand hardcoding in passwords etc is a bad idea for application security.
The code below is the client & server code (which again need some work but are getting there.)
client Code
import getpass
import os
import socket
import select
import sys
import threading
import paramiko
from paramiko import Channel
import schedule
import time
import string
import random
from optparse import OptionParser
IP = '127.0.0.1'
USER = 'user'
PASSWORD = 'CrabRave'
PORT = 900
REMOTE_PORT = 443
REMOTE_IP = ... ###Remote IP will go here.
def handler(chan, host, port):
sock = socket.socket()
try:
sock.connect((IP, PORT))
except Exception as e:
Timer()
def ssh_client(IP, PORT, USER, PASSWORD):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(IP, PORT, USER, PASSWORD)
ssh_session = client.get_transport().open_session()
def reverse_forward_tunnel(PORT, REMOTE_IP, REMOTE_PORT, transport):
transport.request_port_forward("", PORT)
while True:
chan = transport.accept(1000)
if chan is None:
continue
thr = threading.Thread(
target=handler, args=(chan, REMOTE_IP, REMOTE_PORT))
thr.setDaemon(True)
thr.start()
def Timer():
if Channel.is_active():
schedule.every(1).seconds.do(Timer)
else: schedule.every(1).minutes.do(main)
def main():
client = ssh_client
try:
ssh_client(IP, PORT, USER, PASSWORD)
except Exception as E:
Timer()
try:
reverse_forward_tunnel(PORT, REMOTE_IP, REMOTE_PORT, client.get_transport())
except KeyboardInterrupt:
Timer()
try: Timer()
except Exception as E:
Timer
if __name__ == '__main__':
main()
Server Code
from ABC import abstractmethod
from sys import platform
from Shell import Shell
from cmd import Cmd
from src.server_base import ServerBase
from src.ssh_server_interface import SshServerInterface
from src.shell import Shell
from src.ssh_server import SshServer
import base64
import os
import socket
import sys
import paramiko
import threading
import string
import random
my_key = ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(100))
class Shell(Cmd):
use_rawinput=False
promt='My Shell> '
def __init__(self, stdin=None, stdout=None):
super(Shell, self).__init__(completkey='tab', stdin=stdin, stdout =stdout)
def print(self, value):
if self.stdout and not self.stdout.closed:
self.stdout.write(value)
self.stdout.flush()
def printline(self, value):
self.print(value + '\r\n')
def emptyline(self):
self.print('\r\n')
class ServerBase(ABC):
def __init__(self):
self._is_running = threading.Event()
self._socket = None
self.client_shell = None
self._listen_thread = None
def start(self, address='127.0.0.1', port=900, timeout=1):
if not self._is_running.is_set():
self._is_running.set()
self._socket(socket.AF_INET, socket.SOCK_STREAM)
if platform == "linux" or platform == "linux2":
self._socket.setsocketopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, True)
self._socket.settimeout(timeout)
self._socket.bind((address, port))
self._listen_thread = threading.Thread(target=self._listen)
self._listen_thread.start()
def stop(self):
if self._is_running.is_set():
self._is_running.clear()
self._listen_thread.join()
self._socket.close()
def listen(self):
while self._is_running.is_set():
try:
self._socket.listen()
client, addr = self._socket.accept()
self.connection_function(client)
except socket.timeout:
pass
#abstractmethod
def connection_function(self, client):
pass
class SshServerInterface(paramiko.ServerInterface):
def check_channel_request(self, kind, chanid):
if kind == "session":
return paramiko.OPEN_SUCCEEDED
def check_auth_password(self, username: str, password: str) -> int:
if (username == "user") and (password == "CrabRave"):
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_channel_pty_request(self, channel: Channel, term: bytes, width: int, height: int, pixelwidth: int, pixelheight: int, modes: bytes):
return True
def check_channel_shell_request(self, channel: Channel) -> bool:
return True
def check_channel_env_request(self, channel: Channel, name: bytes, value: bytes) -> bool:
return True
def check_port_forward_request(self, address: str, port: int) -> int:
return port
class SshServer(ServerBase):
def __init__(self, host_key_file, host_key_file_password=None):
super(SshServer, self).__init__()
self._host_key = paramiko.RSAKey.from_private_key_file(StringIO.StringIO(my_key))
def connection_function(self, client):
try:
session = paramiko.Transport(client)
session.add_server_key(self._host_key)
server = SshServerInterface()
try:
session.start_server(server=server)
except paramiko.SSHException:
return
channel = session.accept()
stdio = channel.makefile('rwU')
self.client = Shell(stdio, stdio)
self.client_shell.cmdloop()
session.close()
except:
pass
if __name__ == '__main__':
server = SshServer(my_key)
server.start()
I have a python Twisted server application interfacing with a legacy client application and each client has been assigned a specific port on which to connect to the server. So I have set up listeners on all those ports on the server and it is working beautifully, but I need to build in some safeguards to disallow more than one client connecting on the same server port. There are too many things on the client application that break when another client is connected to the same port, and I can't update that application right now. I gotta live with how it has been operating.
I know that I could build in some logic to the connectionMade() function to see if someone already exists on that port and if so, close this new connection. But I'd rather there be a way to reject it to begin with, so the client is not even allowed to connect. Then the client will know they made a mistake and they can alter which port they are trying to connect on.
Here is a stripped down version of my server code if that helps.
from twisted.internet.protocol import Factory
from twisted.internet.protocol import Protocol
from twisted.internet import reactor
from twisted.internet import task
import time
class MyServerTasks():
def someFunction(msg):
#Do stuff
def someOtherFunction(msg):
#Do other stuff
class MyServer(Protocol):
def __init__(self, users):
self.users = users
self.name = None
def connectionMade(self):
#Depending on which port is connected, go do stuff
def connectionLost(self, reason):
#Update dictionaries and other global info
def dataReceived(self, line):
t = time.strftime('%Y-%m-%d %H:%M:%S')
d = self.transport.getHost()
print("{} Received message from {}:{}...{}".format(t, d.host, d.port, line)) #debug
self.handle_GOTDATA(line)
def handle_GOTDATA(self, msg):
#Parse the received data string and do stuff based on the message.
#For example:
if "99" in msg:
MyServerTasks.someFunction(msg)
class MyServerFactory(Factory):
def __init__(self):
self.users = {} # maps user names to Chat instances
def buildProtocol(self, *args, **kwargs):
protocol = MyServer(self.users)
protocol.factory = self
protocol.factory.clients = []
return protocol
reactor.listenTCP(50010, MyServerFactory())
reactor.listenTCP(50011, MyServerFactory())
reactor.listenTCP(50012, MyServerFactory())
reactor.listenTCP(50013, MyServerFactory())
reactor.run()
When a client connected to the server, twisted use the factory to create
a protocol (by calling its buildProtocol method) instance to handle the client request.
therefore you can maintain a counter of connected client in your MyServerFactory,
if the counter has reached maximum allowed connected client you can return None
instead of creating new protocol for that client.
Twisted will close the client connection if the factory doesn't return a protocol from
its buildProtocol method.
you can see here
class MyServer(Protocol):
def __init__(self, users):
self.users = users
self.name = None
def connectionMade(self):
#Depending on which port is connected, go do stuff
def connectionLost(self, reason):
#Update dictionaries and other global info
self.factory.counter -= 1
def dataReceived(self, line):
t = time.strftime('%Y-%m-%d %H:%M:%S')
d = self.transport.getHost()
print("{} Received message from {}:{}...{}".format(t, d.host, d.port, line)) #debug
self.handle_GOTDATA(line)
def handle_GOTDATA(self, msg):
#Parse the received data string and do stuff based on the message.
#For example:
if "99" in msg:
MyServerTasks.someFunction(msg)
class MyServerFactory(Factory):
MAX_CLIENT = 2
def __init__(self):
self.users = {} # maps user names to Chat instances
self.counter = 0
def buildProtocol(self, *args, **kwargs):
if self.counter == self.MAX_CLIENT:
return None
self.counter += 1
protocol = MyServer(self.users)
protocol.factory = self
protocol.factory.clients = []
return protocol
I'm trying to integrate an aiohttp web server into a Crossbar+Autobahn system architecture.
More in detail, when the aiohttp server receive a certain API call, it has to publish a message to a Crossbar router.
I've seen this example on the official repos but i've no clue on how to integrate it on my application.
Ideally, i would like to be able to do this
# class SampleTaskController(object):
async def handle_get_request(self, request: web.Request) -> web.Response:
self.publisher.publish('com.myapp.topic1', 'Hello World!')
return web.HTTPOk()
where self il an instance of SampleTaskController(object) which defines all the routes handler of the web server.
def main(argv):
cfg_path = "./task_cfg.json"
if len(argv) > 1:
cfg_path = argv[0]
logging.basicConfig(level=logging.DEBUG,
format=LOG_FORMAT)
loop = zmq.asyncio.ZMQEventLoop()
asyncio.set_event_loop(loop)
app = web.Application(loop=loop)
with open(cfg_path, 'r') as f:
task_cfg = json.load(f)
task_cfg['__cfg_path'] = cfg_path
controller = SampleTaskController(task_cfg)
controller.restore()
app['controller'] = controller
controller.setup_routes(app)
app.on_startup.append(controller.on_startup)
app.on_cleanup.append(controller.on_cleanup)
web.run_app(app,
host=task_cfg['webserver_address'],
port=task_cfg['webserver_port'])
Notice that i'm using an zmq.asyncio.ZMQEventLoop because the server is also listening on a zmq socket, which is configured inside the controller.on_startup method.
Instead of using autobahn, i've also tried to publish the message to Crossbar using wampy and it works, but the autobahn subscribers couldn't correctly parse the message.
# autobahn subscriber
class ClientSession(ApplicationSession):
async def onJoin(self, details):
self.log.info("Client session joined {details}", details=details)
self.log.info("Connected: {details}", details=details)
self._ident = details.authid
self._type = u'Python'
self.log.info("Component ID is {ident}", ident=self._ident)
self.log.info("Component type is {type}", type=self._type)
# SUBSCRIBE
def gen_on_something(thing):
def on_something(counter, id, type):
print('----------------------------')
self.log.info("'on_{something}' event, counter value: {message}",something=thing, message=counter)
self.log.info("from component {id} ({type})", id=id, type=type)
return on_something
await self.subscribe(gen_on_something('landscape'), 'landscape')
await self.subscribe(gen_on_something('nature'), 'nature')
-
# wampy publisher
async def publish():
router = Crossbar(config_path='./crossbar.json')
logging.getLogger().debug(router.realm)
logging.getLogger().debug(router.url)
logging.getLogger().debug(router.port)
client = Client(router=router)
client.start()
result = client.publish(topic="nature", message=0)
logging.getLogger().debug(result)
With this configuration the subscriber receive the message published, but it get an exception while parsing it.
TypeError: on_something() got an unexpected keyword argument 'message'
Recently I tried to use aiohttp and autobahn simultaneously. I reworked the example from crossbar documentation (originally using twisted) and got the following code:
import asyncio
import logging
from aiohttp import web
from aiohttp.web_exceptions import HTTPOk, HTTPInternalServerError
from autobahn.asyncio.component import Component
# Setup logging
logger = logging.getLogger(__name__)
class WebApplication(object):
"""
A simple Web application that publishes an event every time the
url "/" is visited.
"""
count = 0
def __init__(self, app, wamp_comp):
self._app = app
self._wamp = wamp_comp
self._session = None # "None" while we're disconnected from WAMP router
# associate ourselves with WAMP session lifecycle
self._wamp.on('join', self._initialize)
self._wamp.on('leave', self._uninitialize)
self._app.router.add_get('/', self._render_slash)
def _initialize(self, session, details):
logger.info("Connected to WAMP router (session: %s, details: %s)", session, details)
self._session = session
def _uninitialize(self, session, reason):
logger.warning("Lost WAMP connection (session: %s, reason: %s)", session, reason)
self._session = None
async def _render_slash(self, request):
if self._session is None:
return HTTPInternalServerError(reason="No WAMP session")
self.count += 1
self._session.publish(u"com.myapp.request_served", self.count, count=self.count)
return HTTPOk(text="Published to 'com.myapp.request_served'")
def main():
REALM = "crossbardemo"
BROKER_URI = "ws://wamp_broker:9091/ws"
BIND_ADDR = "0.0.0.0"
BIND_PORT = 8080
logging.basicConfig(
level='DEBUG',
format='[%(asctime)s %(levelname)s %(name)s:%(lineno)d]: %(message)s')
logger.info("Starting aiohttp backend at %s:%s...", BIND_ADDR, BIND_PORT)
loop = asyncio.get_event_loop()
component = Component(
transports=BROKER_URI,
realm=REALM,
)
component.start(loop=loop)
# When not using run() we also must start logging ourselves.
import txaio
txaio.start_logging(level='info')
app = web.Application(
loop=loop)
_ = WebApplication(app, component)
web.run_app(app, host=BIND_ADDR, port=BIND_PORT)
if __name__ == '__main__':
main()
I'm trying to make a simple distributed job client/server system in Twisted. Basically the steps are:
Start up the JobServer with a few jobs and associated files
Start up JobClient instances, they connect to JobServer and ask for Jobs
Server gives JobClient job and sends serialized JSON over TCP
After perhaps a lot of computation, the JobClient sends back a result and waits for new job
Rinse and repeat
But I'm having trouble debugging my protocol on a local machine.
JobServer.py
from twisted.application import internet, service
from twisted.internet import reactor, protocol, defer
from twisted.protocols import basic
from twisted.protocols.basic import Int32StringReceiver
from twisted.web import client
import random
import json
import base64
from logger import JobLogger
class JobServerProtocol(Int32StringReceiver):
log = JobLogger("server.log")
def connectionMade(self):
self.log.write("Connected to client")
self.sendJob(None)
def stringReceived(self, msg):
self.log.write("Recieved job from client: %s" % msg)
self.sendJob(msg)
def sendJob(self, msg):
d = self.factory.getJob(msg)
def onError(err):
self.transport.write("Internal server error")
d.addErrback(onError)
def sendString(newjob_dict):
encoded_str = json.dumps(newjob_dict)
self.transport.write(encoded_str)
self.log.write("Sending job to client: %s" % encoded_str)
d.addCallback(sendString)
def lengthLimitExceeded(self, msg):
self.transport.loseConnection()
class JobServerFactory(protocol.ServerFactory):
protocol = JobServerProtocol
def __init__(self, jobs, files):
assert len(jobs) == len(files)
self.jobs = jobs
self.files = files
self.results = []
def getJob(self, msg):
# on startup the client will not have a message to send
if msg:
# recreate pickled msg
msg_dict = json.loads(msg)
self.results.append((msg_dict['result'], msg_dict['jidx']))
# if we're all done, let the client know
if len(self.jobs) == 0:
job = None
jidx = -1
encoded = ""
else:
# get new job for client to process
jidx = random.randint(0, len(self.jobs) - 1)
job = self.jobs[jidx]
del self.jobs[jidx]
# get file
with open(self.files[jidx], 'r') as f:
filecontents = f.read()
encoded = base64.b64encode(filecontents)
# create dict object to send to client
response_msg = {
"job" : job,
"index" : jidx,
"file" : encoded
}
return defer.succeed(response_msg)
# args for factory
files = ['test.txt', 'test.txt', 'test.txt']
jobs = ["4*4-5", "2**2-5", "2/9*2/3"]
application = service.Application('jobservice')
factory = JobServerFactory(jobs=jobs, files=files)
internet.TCPServer(12345, factory).setServiceParent(
service.IServiceCollection(application))
JobClient.py
from twisted.internet import reactor, protocol
from twisted.protocols.basic import Int32StringReceiver
import json
import time
from logger import JobLogger
class JobClientProtocol(Int32StringReceiver):
log = JobLogger("client.log")
def stringReceived(self, msg):
# unpack job from server
server_msg_dict = json.loads(msg)
job = server_msg_dict["job"]
index = server_msg_dict["index"]
filestring = server_msg_dict["file"]
if index == -1:
# we're done with all tasks
self.transport.loseConnection()
self.log.write("Recieved job %d from server with file '%s'" % (index, filestring))
# do something with file
# job from the server...
time.sleep(5)
result = { "a" : 1, "b" : 2, "c" : 3}
result_msg = { "result" : result, "jidx" : index }
self.log.write("Completed job %d from server with result '%s'" % (index, result))
# serialize and tell server
result_str = json.dumps(result_msg)
self.transport.write(encoded_str)
def lengthLimitExceeded(self, msg):
self.transport.loseConnection()
class JobClientFactory(protocol.ClientFactory):
def buildProtocol(self, addr):
p = JobClientProtocol()
p.factory = self
return p
reactor.connectTCP("127.0.0.1", 12345, JobClientFactory())
reactor.run()
logging.py
class JobLogger(object):
def __init__(self, filename):
self.log = open(filename, 'a')
def write(self, string):
self.log.write("%s\n" % string)
def close(self):
self.log.close()
Running, testing locally with only one client:
$ twistd -y JobServer.py -l ./jobserver.log --pidfile=./jobserver.pid
$ python JobClient.py
Problems I'm having:
The client and server .log files don't get written to reliably - sometimes not until after I kill the process.
The protocol gets stuck after the client connects and the server sends back a message. The message seemingly never gets to the client.
In general, I hope these protocols ensure that operations on either side can take any amount of time, but perhaps I didn't design that correctly.
The client and server .log files don't get written to reliably - sometimes not until after I kill the process.
If you want bytes to appear on disk in a timely manner, you may need to call flush on your file object.
The protocol gets stuck after the client connects and the server sends back a message. The message seemingly never gets to the client.
The server doesn't send int32 strings to the client: it calls transport.write directly. The client gets confused because these end up looking like extremely long int32 strings. For example, the first four bytes of "Internal server error" decode as the integer 1702129225 so if there is an error on the server and these bytes are sent to the client, the client will wait for roughly 2GB of data before proceeding.
Use Int32StringReceiver.sendString instead.
I have the simple code of XMPP bot by python and http://xmpppy.sourceforge.net/
#!/usr/bin/python
# -*- coding: utf-8 -*-
import xmpp
import urllib2
import ConfigParser
config = ConfigParser.ConfigParser()
config.read('ipbot.conf')
##########################
user= (config.get('account', 'login'))
password=(config.get('account', 'password'))
presence=(config.get('presence','presence'))
##########################
jid=xmpp.protocol.JID(user)
client=xmpp.Client(jid.getDomain())
client.connect()
client.auth(jid.getNode(),password)
################Parse IP##################
strURL='http://api.wipmania.com/'
f = urllib2.urlopen(urllib2.Request(strURL))
response = f.read()
ipget= response.split("<br>")
f.close()
#############################################
def status(xstatus):
status=xmpp.Presence(status=xstatus,show=presence,priority='1')
client.send(msging)
def message(conn,mess):
global client
if ( mess.getBody() == "ip" ):
client.send(xmpp.protocol.Message(mess.getFrom(),ipget[1]+" => "+ipget[0]))#Send IP
client.RegisterHandler('message',message)
client.sendInitPresence()
while True:
client.Process(1)
Please, tell me, how to translate this code to use http://wokkel.ik.nu/ and twistedmatrix.com/
Thanks a lot.
The following code should do it. A few notes:
Wokkel uses so-called subprotocol handlers to cover support for specific
subprotocols, usually split up by conceptual feature, by namespace or per
XEP.
XMPPClient is a so-called stream manager that establishes connections,
and takes care of authentication with the server. It works with the
hooked-up subprotocol handlers to process traffic with the XML stream it
manages. It automatically reconnects if the connection has been lost.
This example defines one new handler to process incoming messages.
Unlike the original code, here the request to retrieve the IP address is
done for each incoming message with ip in the body.
In the original code, status was never called. I now the use
PresenceProtocol subprotocol handler to send out the presence each time
a connection has been established and authentication has taken place.
The example is a so-called Twisted Application, to be started using twistd, as mentioned in the docstring. This will daemonize the process and logs go to twisted.log. If you specify -n (before -y), it will not detach and log to the console instead.
#!/usr/bin/python
"""
XMPP example client that replies with its IP address upon request.
Usage:
twistd -y ipbot.tac
"""
import ConfigParser
from twisted.application import service
from twisted.python import log
from twisted.web.client import getPage
from twisted.words.protocols.jabber.jid import JID
from twisted.words.protocols.jabber.xmlstream import toResponse
from wokkel.client import XMPPClient
from wokkel.xmppim import PresenceProtocol, MessageProtocol
class IPHandler(MessageProtocol):
"""
Message handler that sends presence and returns its IP upon request.
#ivar presenceHandler: Presence subprotocol handler.
#type presenceHandler: L{PresenceProtocol}
#ivar show: Presence show value to send upon connecting.
#type show: C{unicode} or C{NoneType}
"""
def __init__(self, presenceHandler, show=None):
self.presenceHandler = presenceHandler
self.show = show
def connectionInitialized(self):
"""
Connection established and authenticated.
Use the given presence handler to send presence.
"""
MessageProtocol.connectionInitialized(self)
self.presenceHandler.available(show=self.show, priority=1)
def onMessage(self, message):
"""
A message has been received.
If the body of the incoming message equals C{"ip"}, retrieve our
IP address and format the response message in the callback.
"""
def onPage(page):
address, location = page.split(u"<br>")
body = u"%s => %s" % (location, address)
response = toResponse(message, stanzaType=message['type'])
response.addElement("body", content=body)
self.send(response)
if unicode(message.body) != u"ip":
return
d = getPage("http://api.wipmania.com")
d.addCallback(onPage)
d.addErrback(log.err)
# Read the configuration file
config = ConfigParser.ConfigParser()
config.read('ipbot.conf')
user = config.get('account', 'login')
password = config.get('account', 'password')
presence = config.get('presence','presence')
# Set up a Twisted application.
application = service.Application('XMPP client')
# Set up an XMPP Client.
jid = JID(user)
client = XMPPClient(jid, password)
client.logTraffic = True
client.setServiceParent(application)
# Add a presence handler.
presenceHandler = PresenceProtocol()
presenceHandler.setHandlerParent(client)
# Add our custom handler
ipHandler = IPHandler(presenceHandler, presence)
ipHandler.setHandlerParent(client)