How to do scheduled sending of email with django-mailer - python

I'm making a django app that needs to be able to make emails and then send these out at a given time. I was thinking i could use django-mailer to put things in que and then send it of. But even though theire sample case list, lists that this is a feature, I cant seem to find out how.
What I need is to be able to set a 'when_to_send' field in the message model of django-mailer, and when the cron job fires the send_mail function this needs to filter out the ones that has a 'when_to_send' date that is greater than the current time...
def send_all():
"""
Send all eligible messages in the queue.
"""
lock = FileLock("send_mail")
logging.debug("acquiring lock...")
try:
lock.acquire(LOCK_WAIT_TIMEOUT)
except AlreadyLocked:
logging.debug("lock already in place. quitting.")
return
except LockTimeout:
logging.debug("waiting for the lock timed out. quitting.")
return
logging.debug("acquired.")
start_time = time.time()
dont_send = 0
deferred = 0
sent = 0
try:
for message in prioritize():
if DontSendEntry.objects.has_address(message.to_address):
logging.info("skipping email to %s as on don't send list " % message.to_address)
MessageLog.objects.log(message, 2) # ### avoid using literal result code
message.delete()
dont_send += 1
else:
try:
logging.info("sending message '%s' to %s" % (message.subject.encode("utf-8"), message.to_address.encode("utf-8")))
core_send_mail(message.subject, message.message_body, message.from_address, [message.to_address])
MessageLog.objects.log(message, 1) # ### avoid using literal result code
message.delete()
sent += 1
except (socket_error, smtplib.SMTPSenderRefused, smtplib.SMTPRecipientsRefused, smtplib.SMTPAuthenticationError), err:
message.defer()
logging.info("message deferred due to failure: %s" % err)
MessageLog.objects.log(message, 3, log_message=str(err)) # ### avoid using literal result code
deferred += 1
finally:
logging.debug("releasing lock...")
lock.release()
logging.debug("released.")
logging.info("")
logging.info("%s sent; %s deferred; %s don't send" % (sent, deferred, dont_send))
logging.info("done in %.2f seconds" % (time.time() - start_time))
Anyone see how to customize this function to don't send email's where the message.when_to_send field is greater than the current time?

You need to implement the cron job for django-mailer:
* * * * * (cd $PINAX; /usr/local/bin/python2.5 manage.py send_mail >> $PINAX/cron_mail.log 2>&1)
And then in engine.py line 96:
# Get rid of "while True:"
while not Message.objects.all():
# Get rid of logging.debug("sleeping for %s seconds before checking queue again" % EMPTY_QUEUE_SLEEP)
# Get rid of sleep
send_all()

You can just add another clause to the conditionals under your message processing loop (you will also need to import datetime at the top of your file):
for message in prioritize():
if DontSendEntry.objects.has_address(message.to_address):
logging.info("skipping email to %s as on don't send list " % message.to_address)
MessageLog.objects.log(message, 2) # ### avoid using literal result code
message.delete()
dont_send += 1
elif message.when_to_send > datetime.datetime.now():
continue
else:
try:
... the rest of your code ...

Related

Tweepy returning 420 Unknown when trying to DM anyone? Twitter API Python

I'm trying to create a script in Python that DMs followers of a certain account. Initially, I was able to get this to work but at some point, it started returning a 420 Unknown error. I presumed this was because I had hit a DM limit (though I have not crossed 250 within the span of the day) but I'm receiving this error many hours later still.
I then thought maybe this was because the user I was attempting to message did not have DMs open. But it's happening for users with DMs open as well. Any help would be greatly appreciated!
In the code below, I keep hitting the "Rate limit reached. Sleeping for 15 minutes." line despite waiting for many hours.
def dm_users_following_this_account(api: tweepy.API, target_username: str):
user = api.get_user(screen_name=target_username)
dm_content = 'Hi'
messages_sent = 0
for follower in tweepy.Cursor(api.get_followers, screen_name=target_username, count=200).items():
if messages_sent < 200:
if (open('WO_already_messaged_or_cant_message.txt', 'r').read().find(follower.screen_name) < 0 and
open('WO_do_not_message_list.txt', 'r').read().find(follower.screen_name) < 0):
file = open("WO_already_messaged_or_cant_message.txt", "a") # append mode
file.write(follower.screen_name + "\n")
file.close()
# Attempt to send out the Twitter DM. We add them to the list regardless but only count the DM (because
# of the API DM limit) if it's successful.
follower_first_name = follower.name.partition(" ")[0]
try:
api.send_direct_message(follower.id, dm_content)
messages_sent += 1
print(f"Successfully messaged #{follower.screen_name}, added to "
f"WO_already_messaged_or_cant_message.txt and incremented counter.")
except Exception as e:
exception_string = str(e)
print(f"Exception: {exception_string}")
if exception_string.find("420") > -1:
print("Rate limit reached. Sleeping for 15 minutes.")
else:
print(f"Could not message #{follower.screen_name}. Added to "
f"WO_already_messaged_or_cant_message.txt and did not increment counter.")
else:
print(f"Skipped #{follower.screen_name} because they have already been messaged or are on the do not "
f"message list.")
else:
print(f"Total messages sent: {messages_sent}. Threshold reached. Exiting.")
sys.exit()
print(f"Total messages successfully sent: {messages_sent}")

variable referenced before assignment: Python

I'm trying to get data from raspberry pi 3 to Azure
The script reads data from Raspberry Pi3, which is connect via bluetooth to sensors and takes several values.
Unfortunately i am getting an error, when I run it
"local variable 'temperature' referenced before assignment"
def iothub_client_sample_run():
msgs=[]
for address, name in list(devices.items()):
try:
client = iothub_client_init()
if client.protocol == IoTHubTransportProvider.MQTT & (name == "Flower care"):
msg_txt_formatted = MSG_TXT % (
temperature,
sunlight,
moisture,
fertility)
message = IoTHubMessage(msg_txt_formatted)
# optional: assign ids
message.temperature_id = "%d" % temperature
client.send_event_async(message, send_confirmation_callback, devices.items())
print ( "IoTHubClient.send_event_async accepted message {} for transmission to IoT Hub.".format(devices.items()) )
return msgs
while 1:
msgs=iothub_client_sample_run()
for msg in msgs:
print msg['topic']
print msg['payload']
(result, mid)=mqttc.publish(msg['topic'],msg['payload'])
print ( "Send status: %s" % status )
time.sleep(10)
mqttc.disconnect()
except IoTHubError as iothub_error:
print ( "Unexpected error %s from IoTHub" % iothub_error )
return
except KeyboardInterrupt:
print ( "IoTHubClient sample stopped" )
print_last_message_time(client)
The error message here is pretty clear.
Remember that Python reads and executes code one line at a time so if you have a variable stated after the function that uses it then it is going to throw an error. Juggle your code around to put your variables before they are called and you shouldn't have any problems with this error again.

why are my python threads blocking each other

My current predicament is that I attempted to make a blocking web serving script non blocking to allow for more than one download to take place at any one time but currently it will hang and wait for the first download to complete before starting the second. Before you go out of your way to down vote this because the answer is odious please know that this is my first ever python script and I am self teaching.
In the example below I only post a single "ConnectionProcesser" Because they all contain the same code
if you need more code please just ask
The script has 3 dependinces
import socket # Networking support
import signal # Signal support (server shutdown on signal receive)
import threading #to make the thing run more than one at a time
Please note that the script has been edited and quite a bit of the code is missing but I believe that it is unrelated to the problem.
def ConnectionProcessorC(self):
connC, AddressC = self.socket.accept()
print("C Got connection from:", AddressC)
DataRecivedC = connC.recv(1024) #receive data from client
DataRecivedC = bytes.decode(DataRecivedC) #decode it to string
print(DataRecivedC)
RequestMethod = DataRecivedC.split(' ')[0]
print ("C Method: ", RequestMethod)
if (RequestMethod == 'GET') | (RequestMethod == 'HEAD'):
Response_Headers = 'HTTP/1.1 200 OK\n'
# Current_Date = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
# Response_Headers += 'Date: ' + current_date +'\n'
Response_Headers += 'Server: Moes-Python-Server\n'
Response_Headers += 'Connection: close\n\n' # signal that the conection wil be closed after complting the request
Server_Response = Response_Headers.encode() # return headers for GET and HEAD
file_handler = open('/usr/share/nginx/html/100mb.dump','rb')
Response_Content = file_handler.read() # read file content
file_handler.close()
URL=DataRecivedC.split(' ')
URL = URL[1] # get 2nd element
#Response_Content="<html><body><p>Charlie TEStin this stuff yehURL:"+URL+"</p></body></html>"
Server_Response += Response_Content
connC.send(Server_Response)
print ("C Closing connection with client")
else:
print("C Unknown HTTP request method:", RequestMethod)
connC.close()
return
def Distrabuteconnections(self):
A=0
""" Main loop awaiting connections """
while True:
print ("Awaiting New connection")
self.socket.listen(10) # maximum number of queued connections #changed to 1 from 3 to try and prevent waiting after closing for ther que to clean up
if (A==0):
ConnectionProcessorA = threading.Thread(target=self.ConnectionProcessorA())
ConnectionProcessorA.start()
A=1
elif (A==1):
ConnectionProcessorB = threading.Thread(target=self.ConnectionProcessorB())
ConnectionProcessorB.start()
A=2
else:
ConnectionProcessorC = threading.Thread(target=self.ConnectionProcessorC())
ConnectionProcessorC.start()
A=0
I think that the problem could be solved by changing while true to something that loops 3 times instead of one.
You should pass a reference to the method you wish to start in a thread. Instead, you are calling the thread, and passing the data returned from that method to the threading.Thread() call.
In short your code should become:
if (A==0):
ConnectionProcessorA = threading.Thread(target=self.ConnectionProcessorA)
ConnectionProcessorA.start()
A=1
elif (A==1):
ConnectionProcessorB = threading.Thread(target=self.ConnectionProcessorB)
ConnectionProcessorB.start()
A=2
else:
ConnectionProcessorC = threading.Thread(target=self.ConnectionProcessorC)
ConnectionProcessorC.start()
A=0
Note the removal of the brackets after self.ConnectionProcessorA etc. This passes a reference to the method to start in the thread, which the threading module will call itself.
Note, it is recommended to store a reference to the thread you create so that it doesn't get garbage collected. I would thus recommend your code becomes:
if (A==0):
self.cpa_thread= threading.Thread(target=self.ConnectionProcessorA)
self.cpa_thread.start()
A=1
elif (A==1):
self.cpb_thread= threading.Thread(target=self.ConnectionProcessorB)
self.cpb_thread.start()
A=2
else:
self.cpc_thread= threading.Thread(target=self.ConnectionProcessorC)
self.cpc_thread.start()
A=0

How to get complete output from remote command execution using paramiko

I am trying to learn executing command on remote system using python paramiko code.
I am receiving partial output of length 12519 characters.
How can I get complete result of length 12550 or more?
Here is the code, this is a code taken from a different site in internet only
def _run_poll(self, session, timeout, input_data):
'''
Poll until the command completes.
#param session The session.
#param timeout The timeout in seconds.
#param input_data The input data.
#returns the output
'''
interval = 0.1
maxseconds = timeout
maxcount = maxseconds / interval
# Poll until completion or timeout
# Note that we cannot directly use the stdout file descriptor
# because it stalls at 64K bytes (65536).
input_idx = 0
timeout_flag = False
self.info('polling (%d, %d)' % (maxseconds, maxcount))
start = datetime.datetime.now()
start_secs = time.mktime(start.timetuple())
output = ''
session.setblocking(0)
while True:
if session.recv_ready():
data = session.recv(self.bufsize)
output += data
self.info('read %d bytes, total %d' % (len(data), len(output)))
print('[{}]'.format(output))
#this prints partial output of 12519 characters
if session.send_ready():
# We received a potential prompt.
# In the future this could be made to work more like
# pexpect with pattern matching.
if input_idx < len(input_data):
data = input_data[input_idx] + '\n'
input_idx += 1
self.info('sending input data %d' % (len(data)))
session.send(data)
self.info('session.exit_status_ready() = %s' % (str(session.exit_status_ready())))
if session.exit_status_ready():
print('here') #this line is also printed
break
# Timeout check
now = datetime.datetime.now()
now_secs = time.mktime(now.timetuple())
et_secs = now_secs - start_secs
self.info('timeout check %d %d' % (et_secs, maxseconds))
if et_secs > maxseconds:
self.info('polling finished - timeout')
timeout_flag = True
break
time.sleep(0.200)
self.info('polling loop ended')
if session.recv_ready():
data = session.recv(self.bufsize)
output += data
self.info('read %d bytes, total %d' % (len(data), len(output)))
self.info('polling finished - %d output bytes' % (len(output)))
if timeout_flag:
self.info('appending timeout message')
output += '\nERROR: timeout after %d seconds\n' % (timeout)
session.close()
return output

Python, Catch timeout during stream request

I'm reading XML events with the requests library as stated in the code below. How do I raise a connection-lost error once the request is started? The Server is emulating a HTTP push / long polling -> http://en.wikipedia.org/wiki/Push_technology#Long_polling and will not end by default.
If there is no new message after 10minutes, the while loop should be exited.
import requests
from time import time
if __name__ == '__main__':
#: Set a default content-length
content_length = 512
try:
requests_stream = requests.get('http://agent.mtconnect.org:80/sample?interval=0', stream=True, timeout=2)
while True:
start_time = time()
#: Read three lines to determine the content-length
for line in requests_stream.iter_lines(3, decode_unicode=None):
if line.startswith('Content-length'):
content_length = int(''.join(x for x in line if x.isdigit()))
#: pause the generator
break
#: Continue the generator and read the exact amount of the body.
for xml in requests_stream.iter_content(content_length):
print "Received XML document with content length of %s in %s seconds" % (len(xml), time() - start_time)
break
except requests.exceptions.RequestException as e:
print('error: ', e)
The server push could be tested with curl via command line:
curl http://agent.mtconnect.org:80/sample\?interval\=0
This might not be the best method, but you can use multiprocessing to run the requests in a separate process.
Something like this should work:
import multiprocessing
import requests
import time
class RequestClient(multiprocessing.Process):
def run(self):
# Write all your code to process the requests here
content_length = 512
try:
requests_stream = requests.get('http://agent.mtconnect.org:80/sample?interval=0', stream=True, timeout=2)
start_time = time.time()
for line in requests_stream.iter_lines(3, decode_unicode=None):
if line.startswith('Content-length'):
content_length = int(''.join(x for x in line if x.isdigit()))
break
for xml in requests_stream.iter_content(content_length):
print "Received XML document with content length of %s in %s seconds" % (len(xml), time.time() - start_time)
break
except requests.exceptions.RequestException as e:
print('error: ', e)
While True:
childProcess = RequestClient()
childProcess.start()
# Wait for 10mins
start_time = time.time()
while time.time() - start_time <= 600:
# Check if the process is still active
if not childProcess.is_alive():
# Request completed
break
time.sleep(5) # Give the system some breathing time
# Check if the process is still active after 10mins.
if childProcess.is_alive():
# Shutdown the process
childProcess.terminate()
raise RuntimeError("Connection Timed-out")
Not the perfect code for your problem, but you get the idea.

Categories

Resources