Python watchdog not processing all files in Windows? - python

Got this watchdog looking at a folder and using a handler to LPR all newly created files to a specific printer (defined on a command prompt batch). Problem is that when you submit a lot of files the watchdog will only process 8, 9, 10 or 11 of them...
What am I doing wrong? I'm pretty sure there's something wrong with my 'print queue' (maybe getting corrupted) or with the Windows processing timeout...
The script is:
import os
import os.path
import subprocess
from subprocess import *
import sys
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
class Watcher:
DIRECTORY_TO_WATCH = r"C:\Users\50544342\Desktop\Newfolder3\Files"
def __init__(self):
self.observer = Observer()
def run(self):
event_handler = Handler()
self.observer.schedule(event_handler, self.DIRECTORY_TO_WATCH, recursive=True)
self.observer.start()
try:
while True:
time.sleep(5)
except:
self.observer.stop()
print("Error")
self.observer.join()
class Handler(FileSystemEventHandler):
#staticmethod
def on_any_event(event):
if event.is_directory:
# LPR print from batch on any event.
p = subprocess.Popen(['LPR.bat', event.src_path], stdout=PIPE, stderr=PIPE)
output, errors = p.communicate()
p.wait() # wait for process to terminate
elif event.event_type == 'created':
# LPR print from batch when a file is first created.
p = subprocess.Popen(['LPR.bat', event.src_path], stdout=PIPE, stderr=PIPE)
output, errors = p.communicate()
p.wait() # wait for process to terminate
if __name__ == '__main__':
w = Watcher()
w.run()
The LPR.bat reads:
lpr.exe -S 127.0.0.1 -P Queue %1
Thanks in advance for any help or tips you may provide.

You should try changing the buffer size of watchdog. Look at this.
try to use a bigger buffer size:
Value to change

Related

Communicating with Program by command line using Python

I am trying to create a python script that will allow some interface with Cadence Skill (command line interface). I want any output to be directed to the shell. I feel like this should be simple, but I'm not able to get it working yet. With Popen however, I can't see any output on the command line, and I'm not sure that the communicate() is properly sending the command. Here is what i have so far:
import re, array
import sys
from subprocess import call
from subprocess import Popen, PIPE, STDOUT
from threading import Thread
import os
#SET THESE VARIABLES
LibraryPath = 'path_to_library'
skillPath = 'path_to_cadence'
#Cadence Environment path
cadence_env= 'source /mscad/apps/bin/mscad_bash/cadtools --env cadence'
class cd:
"""Context manager for changing the current working directory"""
def __init__(self, newPath):
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
# Change to proper Cadence Directory
# Debugging Variables
etype = 0; value = 0; traceback = 0
NewPath = cd(LibraryPath)
NewPath.__enter__()
# Open Cadence Virtuoso in Shell Mode
try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty # python 3.x
ON_POSIX = 'posix' in sys.builtin_module_names
def enqueue_output(out, queue):
for line in iter(out.readline, b''):
queue.put(line)
out.close()
p = Popen(['/bin/bash', '-i', '-c', 'cadence_env cmos12s0; virtuoso -nograph'], stdout=PIPE, bufsize=1, close_fds=ON_POSIX)
q = Queue()
t = Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True # thread dies with the program
t.start()
# read line without blocking
try: line = q.get_nowait() # or q.get(timeout=.1)
except Empty:
print('no output yet')
else: # got line
print(line)
load_command = "load(\""+skillPath+"\")"
print load_command
p.communicate(input=load_command)
print "Command Sent ..."
NewPath.__exit__(etype, value, traceback)
call(["ls -l"], shell=True)
Thanks in advance for the help.
References
Non-blocking read on a subprocess.PIPE in python
I was making it more complicated than it needed to be for my application; however the main problem I had was I was missing \n when sending my command to the program. Also, removing stdout=PIPE allowed all the output of the program to go directly to the console.
proc = Popen(['/bin/bash', '-i', '-c', 'Command_To_Open_Program'], stdin=PIPE, stderr=STDOUT)
command = "command_to_program\n"
proc.stdin.write(command)

Partial read from "stdout" on Python using Popen [duplicate]

This question already has answers here:
Stop reading process output in Python without hang?
(5 answers)
Closed 8 years ago.
i'm trying to build a python script, which opens a subprocess (bash script) and reads "stdout" into a variable during 10 seconds. After 10 sec i need to transfer data to server via POST request.
How to make POST request i know, but how to collect "stdout" during 10 seconds?
i find a lot of examples how to use "Popen", launch bash script and read stderr instantly without biffering, but how to collect output during some time and release is partially?
I think this solution with two threads with simple responsibilities is clean and elegant.
import os
import subprocess
import threading
import functools
from time import sleep
class OutputMonitor(threading.Thread):
""" Start the subprocess in separate thread and append it's output to a buffer. """
def __init__(self, cmd):
super(OutputMonitor, self).__init__()
self.daemon = True
self.cmd = cmd
self.buffer = ''
self.buflock = threading.Lock()
def run(self):
popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while popen.poll() is None:
data = popen.stdout.read(4)
if data != "":
with self.buflock:
self.buffer += data
def get_current_output(self):
with self.buflock:
buf = self.buffer
self.buffer = ""
return buf
class OutputHandler(threading.Thread):
"""
Start a thread responsible for tracking subprocess output, and periodically
check if it has produced new output. If so, call handler to process this data.
"""
def __init__(self, cmd, interval, filepath):
super(OutputHandler, self).__init__()
self.om = OutputMonitor(cmd)
self.interval = interval
# Replace it with your handler init...
self.filepath = filepath
if os.path.exists(self.filepath):
os.unlink(self.filepath)
def run(self):
self.om.start()
while self.om.is_alive():
sleep(self.interval)
data = self.om.get_current_output()
self._handle_data_chunk(data)
def _handle_data_chunk(self, data):
# Replace it with you handling.
with open(self.filepath, 'a') as f:
f.write(data)
if __name__ == '__main__':
logfile_path = "C:\\log.txt"
interval = 5
cmd = ['ping', 'n', '10', '127.0.0.1']
oh = OutputHandler(cmd, interval, logfile_path)
oh.start()
oh.join()
You could do something similar to what is below:
point the subprocess to output to console output
catch the output in a variable which is common to posting and capturing function
setup the thread to post the logs every 10 seconds
import threading, sys, subprocess
out = ""
def postLogs():
print out
#do your posting here
threading.Timer(10.0, postLogs).start() #execute this function every 10 seconds
proc = subprocess.Popen("ping google.com", shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while proc.poll() is None:
out = proc.stdout.readline()
sys.stdout.flush
if out != "":
postLogs()
okay lets continue with mrad's script
i edit it just a little. Added writing to file function and it works perfect. with
ping google.com
BUT it do not work with command which i need... i need to launch ffmpeg. Command which i need is
ffmpeg -i "my rtsp link" -vcodec copy -loglevel verbose -an -f flv "my RTMP link"
when i put my command inside this code 1- i see output instantly. 2- nothing saved in the file (
import subprocess
import threading
from datetime import datetime
from time import sleep
from Queue import Queue
class Monitor(threading.Thread):
def __init__(self, queue, cmd):
super(Monitor, self).__init__()
self.queue = queue
self.cmd = cmd
def run(self):
popen = subprocess.Popen(self.cmd, stdout=subprocess.PIPE, shell=True)
while popen.poll() is None:
line = popen.stdout.readline()
self.queue.put(line)
def foo(cmd, interval):
q = Queue()
m = Monitor(q, cmd)
m.start()
while m.is_alive():
sleep(interval)
current_queue_length = q.qsize()
chunk = ''
for i in xrange(current_queue_length):
chunk += q.get()
print chunk
f=open("/home/pi/raf/log.txt","a") #trying to write to log
f.write(chunk)
f.close()
if __name__ == '__main__':
cmd = 'ping google.com'
interval = 3
foo(cmd, interval)

How do I get data from a subprocess PIPE while the subprocess is running in Python?

I've got a program on Windows that calls a bunch of subprocesses, and displays the results in a GUI. I'm using PyQt for the GUI, and the subprocess module to run the programs.
I've got the following WorkerThread, that spawns a subthread for each shell command devoted to reading the process stdout and printing the results (later I'll wire it up to the GUI).
This all works. Except proc.stdout.read(1) never returns until after the subprocess has completed. This is a big problem, since some of these subprocesses can take 15-20 minutes to run, and I need to display results as they're running.
What do I need to do to get the pipe working while the subprocess is running?
class WorkerThread(QtCore.QThread):
def run(self):
def sh(cmd, cwd = None):
proc = subprocess.Popen(cmd,
shell = True,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
stdin = subprocess.PIPE,
cwd = cwd,
env = os.environ)
proc.stdin.close()
class ReadStdOutThread(QtCore.QThread):
def run(_self):
s = ''
while True:
if self.request_exit: return
b = proc.stdout.read(1)
if b == '\n':
print s
s = ''
continue
if b:
s += b
continue
if s: print s
return
thread = ReadStdOutThread()
thread.start()
retcode = proc.wait()
if retcode:
raise subprocess.CalledProcessError(retcode, cmd)
return 0
FWIW: I rewrote the whole thing using QProcess, and I see the exact same problem. The stdout receives no data, until the underlying process has returned. Then I get everything all at once.
If you know how long will be the the lines of command's output you can poll on the stdout PIPE of the process.
An example of what I mean:
import select
import subprocess
import threading
import os
# Some time consuming command.
command = 'while [ 1 ]; do sleep 1; echo "Testing"; done'
# A worker thread, not as complex as yours, just to show my point.
class Worker(threading.Thread):
def __init__(self):
super(Worker, self).__init__()
self.proc = subprocess.Popen(
command, shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE, stderr=subprocess.STDOUT
)
def run(self):
self.proc.communicate()
def get_proc(self):
# The proc is needed for ask him for his
# output file descriptor later.
return self.proc
if __name__ == '__main__':
w = Worker()
w.start()
proc = w.get_proc()
pollin = select.poll()
pollin.register(proc.stdout, select.POLLIN)
while ( 1 ):
events = pollin.poll()
for fd, event in events:
if event == select.POLLIN:
# This is the main issue of my idea,
# if you don't know the length of lines
# that process ouput, this is a problem.
# I put 7 since I know the word "Testing" have
# 7 characters.
print os.read(fd, 7)
Maybe this is not exactly what you're looking for, but I think it give you a pretty good idea of what to do to solve your problem.
EDIT: I think I've just found what you need Streaming stdout from a Python subprocess in Python.

Python program to manage python script as child

I am looking for a python equivalent of following:
until python program.py; do
echo "Crashed...Restarting..." >&2
sleep 1
done
Also, I need to kill program.py when the parent program is killed. Any suggestions?
Modules subprocess and psutil should provide most (if not all) you need.
import sys, subprocess
while True :
retCode= subprocess.call(["python","program.py"])
if retCode == 0 : break
print('Crashed...Restarting...', file=sys.stderr )
#!/usr/bin/python
import os
import signal
import subprocess
import sys
import time
Proc = None
def signal_handler(sig, frame):
''' Kill the program '''
os.kill(Proc.pid, sig)
sys.exit(0)
def main():
global Proc
''' Handle signal on parent program '''
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
Proc = subprocess.Popen(['python', 'program.py'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while True:
try:
wait = Proc.wait()
if wait:
if wait != (-1 * signal.SIGKILL):
print "Restarting ....(return code %d)" % wait
time.sleep(1)
Proc = subprocess.Popen(['python', 'program.py'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
else:
''' kill yourself '''
sys.exit(0)
except KeyboardInterrupt:
# If python >= 2.6
# Proc.kill()
sys.exit(0)
if __name__ == '__main__':
main()
If you can change program.py I would modify it so that you can call it directly, rather than in a subprocess. If your program follows the convention of containing only definitions and a final executable section to be executed only when it is directly called, i.e. it looks like
def do_something():
pass
def do_something_else():
pass
if __name__ == '__main__':
do_something()
do_something_else()
It is sufficient to wrap the last block in a function, such as
def main():
do_something()
do_something_else()
if __name__ == '__main__':
main()
At this point you can just import your program.py module and call program.main(), making your code much simpler.

pycurl subprocess in a separate Thread

I need help to get the output from pycurl, that I'm trying to run in subprocess. This output I'm trying to put in a queue and than pull this queue out in a different class.
unfortunately, Right now I have no output =(
import threading
import random
import time
import Queue
import urllib2
import sys
import simplejson, pycurl
import sys, signal
queue = Queue.Queue()
keep_running = True
user = "username"
pswd = "pass"
class MyThread(threading.Thread):
def __init__(self, queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
curl_path = '/usr/bin/curl'
curl_list = [curl_path]
args = ('curl', 'https://stream.twitter.com/1/statuses/filter.json?track=java', '-u', 'user:pass')
for arg in args:
curl_list.append(arg)
child = subprocess.Popen(
curl_list,
shell=False,
#stdout=subprocess.PIPE)
stderr=subprocess.PIPE)
try:
out += child.communicate()
c_out.write(out)
self.queue.put(c_out)
self.queue.task_done()
except KeyboardInterrupt:
child.kill()
class Starter():
def __init__(self):
self.queue = queue
t = MyThread(self.queue)
t.daemon=True
t.start()
self.next()
def next(self):
while True:
time.sleep(0.5)
if not self.queue.empty():
line = self.queue.get(timeout=0.2)
print '\n\nIM IN STARTER %s' % line
else:
print 'waiting for queue'
def main():
try:
Starter()
except KeyboardInterrupt, e:
print 'Stopping'
raise
main()
You seem to be confusing your arguments to subprocess quite a bit... the args list should be all of the different pieces of the command that you would be using for curl, you are currently putting them all together in a fashion that is not going to work with subprocess. Your curl_list should look more like this...
curl_path = '/usr/bin/curl'
curl_list = [curl_path, 'https://stream.twitter.com/1/statuses/filter.json?track=java', '-u', 'user:pass']
You are also using an unnecessary for at the moment... you don't want to loop over that list you just want to pass it to subprocess which will handle it appropriately. And you are also going to want stdout to get the results from that, so you need to include the pipe there as well.
I.E, the entire thing should be...
def run(self):
curl_path = '/usr/bin/curl'
curl_list = [curl_path, 'https://stream.twitter.com/1/statuses/filter.json?track=java', '-u', 'user:pass']
child = subprocess.Popen(curl_list,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
try:
out += child.communicate()[0]
c_out.write(out)
self.queue.put(c_out)
self.queue.task_done()
except KeyboardInterrupt:
child.kill()
Might want to take another look at the subprocess documentation to better understand the changes above. I haven't actually run this through an interpreter so it may not be perfect but it should get you going in the right direction... good luck!

Categories

Resources