Can someone help me figure out why I am getting this exception please? Even though I have the suscribe key configured, it seems like it is not.
Here is the exception: "Exception in subscribe loop: Subscribe key not configured
reconnection policy is disabled, please handle reconnection manually."
import time
from pubnub.pubnub import PubNub
from pubnub.pnconfiguration import PNConfiguration
from pubnub.callbacks import SubscribeCallback
from backend.blockchain.block import Block
pnconfig = PNConfiguration()
pnconfig.suscribe_key = 'sub-c-6d0fe192-dee4-11ea-9b19-...'
pnconfig.publish_key = 'pub-c-c3553c68-bf24-463c-ae43-...'
CHANNELS = {
'TEST': 'TEST',
'BLOCK': 'BLOCK'
}
class Listener(SubscribeCallback):
def __init__(self, blockchain):
self.blockchain = blockchain
def message(self, pubnub, message_object):
print('\n-- Channel: {message_object.channel} | Message: {message_object.message}')
if message_object.channel == CHANNELS['BLOCK']:
block = Block.from_json(message_object.message)
potential_chain = self.blockchain.chain[:]
potential_chain.append(block)
try:
self.blockchain.replace_chain(potential_chain)
print('\n -- Successfully replaced the local chain')
except Exception as e:
print('\n -- Did not replace chain: {e}')
class PubSub():
"""
Handles the publish/subscribe layer of the application.
Provides communication between the nodes of the blockchain network.
"""
def __init__(self, blockchain):
self.pubnub = PubNub(pnconfig)
self.pubnub.subscribe().channels(CHANNELS.values()).execute()
self.pubnub.add_listener(Listener(blockchain))
def publish(self, channel, message):
"""
Publish the message object to the channel.
"""
self.pubnub.publish().channel(channel).message(message).sync()
def broadcast_block(self, block):
"""
Broadcast a block object to all nodes.
"""
self.publish(CHANNELS['BLOCK'], block.to_json())
def main():
pubsub = PubSub()
time.sleep(1)
pubsub.publish(CHANNELS['TEST'], { 'foo': 'bar' })
if __name__ == '__main__':
main()
I wasn't able to get the error "Exception in subscribe loop: Subscribe key not configured reconnection policy is disabled, please handle reconnection manually.".
I used the code block:
pnconfig = PNConfiguration()
pnconfig.subscribe_key = "my_subkey"
pnconfig.publish_key = "my_pubkey"
pnconfig.ssl = True
pnconfig.uuid = "my_custom_uuid"
pnconfig.reconnect_policy = "LINEAR"
pubnub = PubNub(pnconfig)
from page: https://www.pubnub.com/docs/python/api-reference-configuration
and added:
PNReconnectionPolicy.NONE
from page: https://www.pubnub.com/docs/python/api-reference-configuration
Related
I'm using the Python Azure ServiceBus package and have the following snippet of code:
# receiver.py
import logging
logger = logging.getLogger("test")
def receive_message(
connection_str,
queue_name
) -> None:
"""
Call Azure ServiceBus API to retrieve message from a queue
"""
with ServiceBusClient.from_connection_string(
connection_str, logging_enable=True
) as servicebus_client:
with servicebus_client.get_queue_receiver(
queue_name=queue_name,
max_wait_time=20,
receive_mode=ServiceBusReceiveMode.PEEK_LOCK,
) as receiver:
for message in receiver:
logger.debug(f"Received message {message}")
I'm attempting to write unit tests for this function, and want to be able to mock out the recevier. Here is my attempt at writing the unit test, which fails because I can't figure out how to get the test to enter the for message in receiver block.
# test_receiver.py
#patch("receiver.ServiceBusClient")
#patch("receiver.logger")
def test_receive_message(mock_logger, mock_svcbus_client):
# Figure out how to mock
mock_svcbus_client.from_connection_string.return_value.get_queue_receiver.return_value = iter(["message"])
receive_message("mock_connection_str", "mock_q_name")
# Assertion fails
mock_logger.return_value.debug.assert_called_once()
You can try from mocks import MockReceivedMessage, MockReceiver to mock the receiver
Example 1:
class MockReceivedMessage(ServiceBusReceivedMessage):
def __init__(self, prevent_renew_lock=False, exception_on_renew_lock=False, **kwargs):
self._lock_duration = kwargs.get("lock_duration", 2)
self._raw_amqp_message = None
self._received_timestamp_utc = utc_now()
self.locked_until_utc = self._received_timestamp_utc + timedelta(seconds=self._lock_duration)
self._settled = False
self._receiver = MockReceiver()
self._prevent_renew_lock = prevent_renew_lock
self._exception_on_renew_lock = exception_on_renew_lock
Example 2:
def test_queue_message_receive_and_delete(self, servicebus_namespace_connection_string, servicebus_queue, **kwargs):
with ServiceBusClient.from_connection_string(
servicebus_namespace_connection_string, logging_enable=False) as sb_client:
with sb_client.get_queue_sender(servicebus_queue.name) as sender:
message = ServiceBusMessage("Receive and delete test")
sender.send_messages(message)
with sb_client.get_queue_receiver(servicebus_queue.name,
receive_mode=ServiceBusReceiveMode.RECEIVE_AND_DELETE) as receiver:
messages = receiver.receive_messages(max_wait_time=10)
assert len(messages) == 1
message = messages[0]
print_message(_logger, message)
with pytest.raises(ValueError):
receiver.complete_message(message)
with pytest.raises(ValueError):
receiver.abandon_message(message)
with pytest.raises(ValueError):
receiver.defer_message(message)
with pytest.raises(ValueError):
receiver.dead_letter_message(message)
with pytest.raises(ValueError):
receiver.renew_message_lock(message)
time.sleep(10)
with sb_client.get_queue_receiver(servicebus_queue.name) as receiver:
messages = receiver.receive_messages(max_wait_time=10)
for m in messages:
print_message(_logger, m)
assert len(messages) == 0
You can refer to mocks.py
and test_queues.py
If you still have doubt, You can open an issue on GitHub: azure-sdk-for-python
I am receiving None when calling poll() in this program but I am getting the messages when running the kafka-console-consumer.bat from cmd, I can't figure out what exactly the problem.
The execution starts from main.py
from queue import Queue
from concurrent.futures import ThreadPoolExecutor
import time
import json
from kafka_message_consumer import KafkaMessageConsumer
from kafka_discovery_executor import KafkaDiscoveryExecutor
with open('kafka_properties.json') as f:
kafka_properties = json.loads(f.read())
message_queue = Queue()
kafka_message_consumer = KafkaMessageConsumer(kafka_properties, message_queue)
kafka_discovery_executor = KafkaDiscoveryExecutor(message_queue, kafka_properties)
with ThreadPoolExecutor(max_workers=5) as executor:
executor.submit(kafka_message_consumer.run())
time.sleep(1)
executor.submit(kafka_discovery_executor.run())
time.sleep(1)
KafkaDiscoveryExecutor class is for consuming messages from shared queue and processing that messages.
This is kafka_message_consumer.py
import logging
from confluent_kafka import Consumer
class KafkaMessageConsumer:
def __init__(self, kafka_properties, message_queue):
self.message_queue = message_queue
self.logger = logging.getLogger('KafkaMessageConsumer')
self.kafka_stream_consumer = None
self.create_consumer(kafka_properties)
def create_consumer(self, kafka_properties):
"""
Create an instance of Kafka Consumer with the consumer configuration properties
and subscribes to the defined topic(s).
"""
consumer_config = dict()
# Consumer configuration properties.
consumer_config['bootstrap.servers'] = kafka_properties.get('bootstrap.servers')
consumer_config['group.id'] = kafka_properties.get('group.id')
consumer_config['enable.auto.commit'] = True
consumer_config['auto.offset.reset'] = 'earliest'
# For SSL Security
# consumer_config['security.protocol'] = 'SASL_SSL'
# consumer_config['sasl.mechanisms'] = 'PLAIN'
# consumer_config['sasl.username'] = ''
# consumer_config['sasl.password'] = ''
# Create the consumer using consumer_config.
self.kafka_stream_consumer = Consumer(consumer_config)
# Subscribe to the specified topic(s).
self.kafka_stream_consumer.subscribe(['mytopic'])
def run(self):
while True:
msg = self.kafka_stream_consumer.poll(1.0)
if msg is None:
# No message available within timeout.
print("Waiting for message or event/error in poll()")
continue
elif msg.error():
print("Error: {}".format(msg.error()))
else:
# Consume the record.
# Push the message into message_queue
try:
self.message_queue.put(msg)
except Exception as e:
self.logger.critical("Error occured in kafka Consumer: {}".format(e))
The specified topic has events but I am getting None here and the print statement inside 'if msg is None:' is executing.
I am still not sure as to why the above code is not working as it should.
Here's what changes I made to make this code work
I used threading module instead of concurrent.futures
used daemon thread
make a call to thread.init() inside the constructor of the classes [KafkaMessageConsumer, KafkaDiscoveryExecutor]
Here's main.py
from queue import Queue
import threading
import time
import json
from kafka_message_consumer import KafkaMessageConsumer
from kafka_discovery_executor import KafkaDiscoveryExecutor
def main():
with open('kafka_properties.json') as f:
kafka_properties = json.loads(f.read())
message_queue = Queue()
threads = [
KafkaMessageConsumer(kafka_properties, message_queue),
KafkaDiscoveryExecutor(message_queue, kafka_properties)
]
for thread in threads:
thread.start()
time.sleep(1)
for thread in threads:
thread.join()
time.sleep(1)
if __name__ == "__main__":
main()
and kafka_message_consumer.py
import logging
from confluent_kafka import Consumer
import threading
class KafkaMessageConsumer(threading.Thread):
daemon = True
def __init__(self, kafka_properties, message_queue):
threading.Thread.__init__(self)
self.message_queue = message_queue
self.logger = logging.getLogger('KafkaMessageConsumer')
self.kafka_stream_consumer = None
self.create_consumer(kafka_properties)
def create_consumer(self, kafka_properties):
"""
Create an instance of Kafka Consumer with the consumer configuration properties
and subscribes to the defined topic(s).
"""
consumer_config = dict()
# Consumer configuration properties.
consumer_config['bootstrap.servers'] = kafka_properties.get('bootstrap.servers')
consumer_config['group.id'] = kafka_properties.get('group.id')
consumer_config['enable.auto.commit'] = True
consumer_config['auto.offset.reset'] = 'earliest'
# Create the consumer using consumer_config.
self.kafka_stream_consumer = Consumer(consumer_config)
# Subscribe to the specified topic(s).
self.kafka_stream_consumer.subscribe(['mytopic'])
def run(self):
while True:
msg = self.kafka_stream_consumer.poll(1.0)
if msg is None:
# No message available within timeout.
print("Waiting for message or event/error in poll()")
continue
elif msg.error():
print("Error: {}".format(msg.error()))
else:
# Consume the record.
# Push the message into message_queue
try:
self.message_queue.put(msg)
except Exception as e:
self.logger.critical("Error occured in kafka Consumer: {}".format(e))
self.kafka_stream_consumer.close()
I have RabbitMQ server running in Docker and two python clients that connect to the server and send messages to each other using headers exchange. Message rate is about 10/s. After some amount of time (most of the time after 300-500 messages have been exchanged) one of the exchange become unresponsive. channel.basic_publish call passes without any exception but receiver doesn't receive any messages. Also on rabbitmq dashboard there's no any activity on this exchange. rabbitmq dashboard screenshot
Here is the code example:
import pika
import threading
import time
import sys
class Test:
def __init__(
self,
p_username,
p_password,
p_host,
p_port,
p_virtualHost,
p_outgoingExchange,
p_incomingExchange
):
self.__outgoingExch = p_outgoingExchange
self.__incomingExch = p_incomingExchange
self.__headers = {'topic': 'test'}
self.__queueName = ''
self.__channelConsumer = None
self.__channelProducer = None
self.__isRun = False
l_credentials = pika.PlainCredentials(p_username, p_password)
l_parameters = pika.ConnectionParameters(
host=p_host,
port=p_port,
virtual_host=p_virtualHost,
credentials=l_credentials,
socket_timeout=30,
connection_attempts=5,
)
self.__connection = pika.SelectConnection(
parameters=l_parameters,
on_open_callback=self.__on_connection_open,
on_open_error_callback=self.__on_connection_open_error,
on_close_callback=self.__on_connection_closed
)
def __on_connection_open(self, _conn):
print("Connection opened")
self.__connection.channel(on_open_callback=self.__on_consume_channel_open)
self.__connection.channel(on_open_callback=self.__on_produce_channel_open)
def __on_connection_open_error(self, _conn, _exception):
print("Failed to open connection")
def __on_connection_closed(self, _conn, p_exception):
print("Connection closed: {}".format(p_exception))
def __on_consume_channel_open(self, p_ch):
print("Consumer channel opened")
self.__channelConsumer = p_ch
self.__channelConsumer.exchange_declare(
exchange=self.__incomingExch,
exchange_type="headers",
callback=self.__on_consume_exchange_declared
)
def __on_consume_exchange_declared(self, p_method):
print("Consumer exchange declared")
self.__channelConsumer.queue_declare(
queue='',
callback=self.__on_queue_declare
)
def __on_queue_declare(self, p_method):
print("Consumer queue declared")
self.__queueName = p_method.method.queue
self.__channelConsumer.queue_bind(
queue=self.__queueName,
exchange=self.__incomingExch,
arguments=self.__headers,
)
self.__channelConsumer.basic_consume(self.__queueName, self.__onMessageReceived)
def __on_produce_channel_open(self, p_ch):
print("Producer channel opened")
self.__channelProducer = p_ch
self.__channelProducer.exchange_declare(
exchange=self.__outgoingExch,
exchange_type="headers",
callback=self.__on_produce_exchange_declared
)
def __on_produce_exchange_declared(self, p_method):
print("Producer exchange declared")
l_publisher = threading.Thread(target=self.__publishProcedure)
l_publisher.start()
def __onMessageReceived(self, p_channel, p_method, p_properties, p_body):
p_channel.basic_ack(p_method.delivery_tag)
print("Message received: {}".format(p_body))
def __publishProcedure(self):
print("Start publishing")
l_msgCounter = 0
while self.__isRun:
l_msgCounter += 1
self.__publish(l_msgCounter)
time.sleep(0.1)
def __publish(self, p_msgCounter):
self.__channelProducer.basic_publish(
exchange=self.__outgoingExch,
routing_key="#",
body=str(p_msgCounter),
properties=pika.BasicProperties(headers=self.__headers)
)
def run(self):
self.__isRun = True
try:
self.__connection.ioloop.start()
except KeyboardInterrupt:
self.__isRun = False
self.__connection.close()
print("Exit...")
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Provide node name [node1 | node2]")
exit(-1)
l_outgoingExch = ''
l_incomingExch = ''
if sys.argv[1] == 'node1':
l_outgoingExch = 'node2.headers'
l_incomingExch = 'node1.headers'
elif sys.argv[1] == 'node2':
l_outgoingExch = 'node1.headers'
l_incomingExch = 'node2.headers'
else:
print("Wrong node name")
exit(-1)
l_testInstance = Test(
p_username='admin',
p_password='admin',
p_host='localhost',
p_port=5672,
p_virtualHost='/',
p_incomingExchange=l_incomingExch,
p_outgoingExchange=l_outgoingExch
)
l_testInstance.run()
I run two instances as two nodes (node1 and node2) so they should communicate with each other.
Also sometimes I have the issue described here:
Stream connection lost: AssertionError(('_AsyncTransportBase._produce() tx buffer size underflow', -275, 1),)
I found that I misused pika. As pika documentation states, it's not safe to share connection across multiple threads. The only way you can interact with connection from other threads is to use add_callback_threadsafe function. In my example it should look like this:
def __publishProcedure(self):
print("Start publishing")
l_msgCounter = 0
while self.__isRun:
l_msgCounter += 1
l_cb = functools.partial(self.__publish, l_msgCounter)
self.__connection.ioloop.add_callback_threadsafe(l_cb)
time.sleep(0.1)
def __publish(self, p_msgCounter):
self.__channelProducer.basic_publish(
exchange=self.__outgoingExch,
routing_key="#",
body=str(p_msgCounter),
properties=pika.BasicProperties(headers=self.__headers)
)
I am implementing stomp consumer as a library. By calling this library in other application i should be able to get the data in ActiveMQ. I am implementing it as below, but I have a problem in returning the frame.body. I am not able to retrieve the data from outside the class.
from twisted.internet import defer
from stompest.async import Stomp
from stompest.async.listener import SubscriptionListener
from stompest.config import StompConfig
from socket import gethostname
from uuid import uuid1
import json
class Consumer(object):
def __init__(self, amq_uri):
self.amq_uri = amq_uri
self.hostname = gethostname()
self.config = StompConfig(uri=self.amq_uri)
#defer.inlineCallbacks
def run(self, in_queue):
client = yield Stomp(self.config)
headers = {
StompSpec.ACK_HEADER: StompSpec.ACK_CLIENT_INDIVIDUAL,
StompSpec.ID_HEADER: self.hostname,
'activemq.prefetchSize': '1000',
}
yield client.connect(headers=self._return_client_id())
client.subscribe(
in_queue,
headers,
listener=SubscriptionListener(self.consume)
)
try:
client = yield client.disconnected
except StompConnectionError:
yield client.connect(headers=self._return_client_id())
client.subscribe(
in_queue,
headers,
listener=SubscriptionListener(self.consume)
)
while True:
try:
yield client.disconnected
except StompProtocolError:
pass
except StompConnectionError:
yield client.connect(headers=self._return_client_id())
client.subscribe(
in_queue,
headers,
listener=SubscriptionListener(self.consume)
)
def _return_client_id(self):
client_id = {}
client_id['client-id'] = gethostname() + '-' + str(uuid1())
return client_id
def consume(self, client, frame):
data = json.loads(frame.body)
print 'Received Message Type {}'.format(type(data))
print 'Received Message {}'.format(data)
## I want to return data here. I am able to print the frame.body here.
# Call from another application
import Queue
from twisted.internet import reactor
amq_uri = 'tcp://localhost:61613'
in_queue = '/queue/test_queue'
c = Consumer(amq_uri)
c.run(in_queue)
print "data is from outside function", data # Should be able to get the data which is returned by consume here
reactor.run()
Can someone please let me know how can i achieve this.
Thanks
I found a solution to my problem. Instead of using async stomp library, i used sync stomp library. Implemented it as below,
class Consumer(object):
def __init__(self, amq_uri):
self.amq_uri = amq_uri
self.hostname = gethostname()
self.config = StompConfig(uri=self.amq_uri)
def run(self, in_queue, return_dict):
client = Stomp(self.config)
headers = {
StompSpec.ACK_HEADER: StompSpec.ACK_CLIENT_INDIVIDUAL,
StompSpec.ID_HEADER: self.hostname
}
client.connect()
client.subscribe(in_queue, headers)
try:
frame = client.receiveFrame()
data = json.dumps(frame.body)
except Exception as exc:
print exc
client.ack(frame)
client.disconnect()
return_dict['data'] = data
return data
Background:
I would like to integrate yowsup to my home automation project. I have seen a simple sample on how to receive messages and after some minor changes it is working fine.
Issue:
My problem starts when it comes to integrate the send message feature. Those are the two files I am using:
run.py
from layer import EchoLayer
from yowsup.layers.auth import YowAuthenticationProtocolLayer
from yowsup.layers.protocol_messages import YowMessagesProtocolLayer
from yowsup.layers.protocol_receipts import YowReceiptProtocolLayer
from yowsup.layers.protocol_acks import YowAckProtocolLayer
from yowsup.layers.protocol_presence import YowPresenceProtocolLayer
from yowsup.layers.network import YowNetworkLayer
from yowsup.layers.coder import YowCoderLayer
from yowsup.common import YowConstants
from yowsup.layers import YowLayerEvent
from yowsup.stacks import YowStack, YOWSUP_CORE_LAYERS
from yowsup import env
CREDENTIALS = ("phone", "pwd")
if __name__ == "__main__":
layers = (
EchoLayer,
(YowAuthenticationProtocolLayer, YowMessagesProtocolLayer, YowReceiptProtocolLayer, YowAckProtocolLayer, YowPresenceProtocolLayer)
) + YOWSUP_CORE_LAYERS
stack = YowStack(layers)
# Setting credentials
stack.setProp(YowAuthenticationProtocolLayer.PROP_CREDENTIALS, CREDENTIALS)
# WhatsApp server address
stack.setProp(YowNetworkLayer.PROP_ENDPOINT, YowConstants.ENDPOINTS[0])
stack.setProp(YowCoderLayer.PROP_DOMAIN, YowConstants.DOMAIN)
stack.setProp(YowCoderLayer.PROP_RESOURCE, env.CURRENT_ENV.getResource())
# Sending connecting signal
stack.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_CONNECT))
# Program main loop
stack.loop()
layer.py
from yowsup.layers.interface import YowInterfaceLayer, ProtocolEntityCallback
from yowsup.layers.protocol_messages.protocolentities import TextMessageProtocolEntity
from yowsup.layers.protocol_receipts.protocolentities import OutgoingReceiptProtocolEntity
from yowsup.layers.protocol_acks.protocolentities import OutgoingAckProtocolEntity
from yowsup.layers.protocol_presence.protocolentities import PresenceProtocolEntity
import threading
import logging
logger = logging.getLogger(__name__)
class EchoLayer(YowInterfaceLayer):
#ProtocolEntityCallback("message")
def onMessage(self, messageProtocolEntity):
#send receipt otherwise we keep receiving the same message over and over
print str(messageProtocolEntity.getFrom()) + ' - ' + str(messageProtocolEntity.getBody())
receipt = OutgoingReceiptProtocolEntity(messageProtocolEntity.getId(), messageProtocolEntity.getFrom())
self.toLower(receipt)
#ProtocolEntityCallback("send_message")
def sendMessage(self, destination, message, messageProtocolEntity):
outgoingMessageProtocolEntity = TextMessageProtocolEntity(
message,
to = destination + "#s.whatsapp.net")
self.toLower(outgoingMessageProtocolEntity)
#ProtocolEntityCallback("receipt")
def onReceipt(self, entity):
ack = OutgoingAckProtocolEntity(entity.getId(), "receipt", "delivery")
self.toLower(ack)
# List of (jid, message) tuples
PROP_MESSAGES = "org.openwhatsapp.yowsup.prop.sendclient.queue"
def __init__(self):
super(EchoLayer, self).__init__()
self.ackQueue = []
self.lock = threading.Condition()
#ProtocolEntityCallback("success")
def onSuccess(self, successProtocolEntity):
self.lock.acquire()
for target in self.getProp(self.__class__.PROP_MESSAGES, []):
phone, message = target
if '#' in phone:
messageEntity = TextMessageProtocolEntity(message, to = phone)
elif '-' in phone:
messageEntity = TextMessageProtocolEntity(message, to = "%s#g.us" % phone)
else:
messageEntity = TextMessageProtocolEntity(message, to = "%s#s.whatsapp.net" % phone)
self.ackQueue.append(messageEntity.getId())
self.toLower(messageEntity)
self.lock.release()
#ProtocolEntityCallback("ack")
def onAck(self, entity):
self.lock.acquire()
if entity.getId() in self.ackQueue:
self.ackQueue.pop(self.ackQueue.index(entity.getId()))
if not len(self.ackQueue):
logger.info("Message sent")
#raise KeyboardInterrupt()
self.lock.release()
Questions:
Where am I supposed to call the send_message method, so I can send messages wherever I need it?
Is there a regular event (triggering every second or something) which I could use to send my messages?
#ProtocolEntityCallback("send_message")
def sendMessage(self, destination, message, messageProtocolEntity):
outgoingMessageProtocolEntity = TextMessageProtocolEntity(
message,
to = destination + "#s.whatsapp.net")
self.toLower(outgoingMessageProtocolEntity)
In the avove code sendMessage to be called, protocolEntity.getTag() == "send_message" has to be True. You don't need it to send message.
layer.py
from yowsup.layers.interface import YowInterfaceLayer, ProtocolEntityCallback
from yowsup.layers.protocol_messages.protocolentities import TextMessageProtocolEntity
from yowsup.layers.protocol_receipts.protocolentities import OutgoingReceiptProtocolEntity
from yowsup.layers.protocol_acks.protocolentities import OutgoingAckProtocolEntity
from yowsup.layers.protocol_presence.protocolentities import PresenceProtocolEntity
import threading
import logging
logger = logging.getLogger(__name__)
recv_msg = []
class EchoLayer(YowInterfaceLayer):
def __init__(self):
super(EchoLayer, self).__init__()
self.ackQueue = []
self.lock = threading.Condition()
#ProtocolEntityCallback("message")
def onMessage(self, messageProtocolEntity):
if messageProtocolEntity.getType() == 'text':
recv_msg.append((messageProtocolEntity.getFrom(),messageProtocolEntity.getBody()))
#send receipt otherwise we keep receiving the same message over and over
receipt = OutgoingReceiptProtocolEntity(messageProtocolEntity.getId(), messageProtocolEntity.getFrom())
self.toLower(receipt)
#ProtocolEntityCallback("receipt")
def onReceipt(self, entity):
ack = OutgoingAckProtocolEntity(entity.getId(), "receipt", "delivery")
self.toLower(ack)
# List of (jid, message) tuples
PROP_MESSAGES = "org.openwhatsapp.yowsup.prop.sendclient.queue"
#ProtocolEntityCallback("success")
def onSuccess(self, successProtocolEntity):
self.lock.acquire()
for target in self.getProp(self.__class__.PROP_MESSAGES, []):
phone, message = target
if '#' in phone:
messageEntity = TextMessageProtocolEntity(message, to = phone)
elif '-' in phone:
messageEntity = TextMessageProtocolEntity(message, to = "%s#g.us" % phone)
else:
messageEntity = TextMessageProtocolEntity(message, to = "%s#s.whatsapp.net" % phone)
self.ackQueue.append(messageEntity.getId())
self.toLower(messageEntity)
self.lock.release()
#ProtocolEntityCallback("ack")
def onAck(self, entity):
self.lock.acquire()
if entity.getId() in self.ackQueue:
self.ackQueue.pop(self.ackQueue.index(entity.getId()))
if not len(self.ackQueue):
self.lock.release()
logger.info("Message sent")
raise KeyboardInterrupt()
self.lock.release()
To send message define a function send_message in the stack run.py. You can also import run.py and use it's function from other script.
from layer import EchoLayer, recv_msg
CREDENTIALS = ("phone", "pwd")
def send_message(destination, message):
'''
destination is <phone number> without '+'
and with country code of type string,
message is string
e.g send_message('11133434343','hello')
'''
messages = [(destination, message)]
layers = (EchoLayer,
(YowAuthenticationProtocolLayer,
YowMessagesProtocolLayer,
YowReceiptProtocolLayer,
YowAckProtocolLayer,
YowPresenceProtocolLayer)
) + YOWSUP_CORE_LAYERS
stack = YowStack(layers)
stack.setProp(EchoLayer.PROP_MESSAGES, messages)
stack.setProp(YowAuthenticationProtocolLayer.PROP_PASSIVE, True)
# Setting credentials
stack.setProp(YowAuthenticationProtocolLayer.PROP_CREDENTIALS, CREDENTIALS)
# WhatsApp server address
stack.setProp(YowNetworkLayer.PROP_ENDPOINT, YowConstants.ENDPOINTS[0])
stack.setProp(YowCoderLayer.PROP_DOMAIN, YowConstants.DOMAIN)
stack.setProp(YowCoderLayer.PROP_RESOURCE, env.CURRENT_ENV.getResource())
# Sending connecting signal
stack.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_CONNECT))
try:
# Program main loop
stack.loop()
except AuthError as e:
print('Authentication error %s' % e.message)
sys.exit(1)
def recv_message():
layers = ( EchoLayer,
(YowAuthenticationProtocolLayer, YowMessagesProtocolLayer,
YowReceiptProtocolLayer, YowAckProtocolLayer,
YowPresenceProtocolLayer)
) + YOWSUP_CORE_LAYERS
stack = YowStack(layers)
# Setting credentials
stack.setProp(YowAuthenticationProtocolLayer.PROP_CREDENTIALS, CREDENTIALS)
# WhatsApp server address
stack.setProp(YowNetworkLayer.PROP_ENDPOINT, YowConstants.ENDPOINTS[0])
stack.setProp(YowCoderLayer.PROP_DOMAIN, YowConstants.DOMAIN)
stack.setProp(YowCoderLayer.PROP_RESOURCE, env.CURRENT_ENV.getResource())
# Sending connecting signal
stack.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_CONNECT))
try:
# Program main loop
stack.loop()
except AuthError as e:
print('Authentication error %s' % e.message)
sys.exit(1)
if __name__ == '__main__':
if len(sys.argv) == 1:
print('%s send number message\nrecv\n' % sys.argv[0])
sys.exit(1)
if sys.argv[1] == 'send':
try:
send_message(sys.argv[2],sys.argv[3])
except KeyboardInterrupt:
print('closing')
sys.exit(0)
if sys.argv[1] == 'recv':
try:
recv_message()
except KeyboardInterrupt:
print('closing')
sys.exit(0)
for m in recv_msg:
print('From %s:\n%s\n' % m)
Now you can send message by calling send_message('1234567890','Howdy') and recieve message by calling recv_message().