I've been looking into a way to directly change variables in a running module.
What I want to achieve is that a load test is being run and that I can manually adjust the call pace or whatsoever.
Below some code that I just created (not-tested e.d.), just to give you an idea.
class A():
def __init__(self):
self.value = 1
def runForever(self):
while(1):
print self.value
def setValue(self, value):
self.value = value
if __name__ == '__main__':
#Some code to create the A object and directly apply the value from an human's input
a = A()
#Some parallelism or something has to be applied.
a.runForever()
a.setValue(raw_input("New value: "))
Edit #1: Yes, I know that now I will never hit the a.setValue() :-)
Here is a multi-threaded example. This code will work with the python interpreter but not with the Python Shell of IDLE, because the raw_input function is not handled the same way.
from threading import Thread
from time import sleep
class A(Thread):
def __init__(self):
Thread.__init__(self)
self.value = 1
self.stop_flag = False
def run(self):
while not self.stop_flag:
sleep(1)
print(self.value)
def set_value(self, value):
self.value = value
def stop(self):
self.stop_flag = True
if __name__ == '__main__':
a = A()
a.start()
try:
while 1:
r = raw_input()
a.set_value(int(r))
except:
a.stop()
The pseudo code you wrote is quite similar to the way Threading / Multiprocessing works in python. You will want to start a (for example) thread that "runs forever" and then instead of modifying the internal rate value directly, you will probably just send a message through a Queue that gives the new value.
Check out this question.
Here is a demonstration of doing what you asked about. I prefer to use Queues to directly making calls on threads / processes.
import Queue # !!warning. if you use multiprocessing, use multiprocessing.Queue
import threading
import time
def main():
q = Queue.Queue()
tester = Tester(q)
tester.start()
while True:
user_input = raw_input("New period in seconds or (q)uit: ")
if user_input.lower() == 'q':
break
try:
new_speed = float(user_input)
except ValueError:
new_speed = None # ignore junk
if new_speed is not None:
q.put(new_speed)
q.put(Tester.STOP_TOKEN)
class Tester(threading.Thread):
STOP_TOKEN = '<<stop>>'
def __init__(self, q):
threading.Thread.__init__(self)
self.q = q
self.speed = 1
def run(self):
while True:
# get from the queue
try:
item = self.q.get(block=False) # don't hang
except Queue.Empty:
item = None # do nothing
if item:
# stop when requested
if item == self.STOP_TOKEN:
break # stop this thread loop
# otherwise check for a new speed
try:
self.speed = float(item)
except ValueError:
pass # whatever you like with unknown input
# do your thing
self.main_code()
def main_code(self):
time.sleep(self.speed) # or whatever you want to do
if __name__ == '__main__':
main()
Related
I have a short script (below) where one thread is continuously checking the state of variable (if it is None or not). Is this approach safe without using a Lock, or is there any possibility that the checking thread could read "something strange between None and the reference on the new value"?
Thank you for answer, Martin
import time
import threading
class A(object):
def __init__(self):
self.data = None
self.e = threading.Event()
self.e.set()
self.checking_thread = threading.Thread(target=self.__check_if_not_none)
self.checking_thread.start()
def __check_if_not_none(self):
while self.e.is_set():
if self.data is not None:
print("Ready")
else:
print("None")
def prepare_data(self):
data = []
data.extend([1, 2, 3, 4, 5]) # Data preparing...
self.data = data
def quit(self):
self.e.clear()
self.checking_thread.join()
print("Done")
if __name__ == "__main__":
a = A()
time.sleep(0.001)
a.prepare_data()
time.sleep(0.001)
a.quit()
I've been looking into the multiprocessing module to figure this out but I'm not entirely sure of all the components I need or how to structure them. The basic structure/logic that I'm trying to get goes something like this though:
import datetime
import time
import multiprocessing
class So_Classy():
def __init__(self):
self.value = 0
def update_values(self):
print('In the process doing stuff!')
while True:
self.value = self.value + 1
time.sleep(0.1)
print("Self.value = {self.value}")
def run(self):
# Constantly update old value to a new one
try:
if __name__ == '__main__':
p = multiprocessing.Process(target=self.update_values)
p.start()
print("Process started!")
except Exception as e:
print(str(e))
def get_result(self, arg):
return self.value*arg
##### MAIN PROGRAM #####
# Initialize process given certain parameters
sc = So_Classy()
# This spawns a process running an infinite while loop
sc.run()
one_second = datetime.datetime.now() + datetime.timedelta(seconds=1)
while True:
if datetime.datetime.now() > one_second:
# Pass an arg to sc and do a calc with it
print(sc.get_result(5))
one_second = datetime.datetime.now() + datetime.timedelta(seconds=1)
The run() function is making it through to the end without causing an exception but it doesn't appear to actually be entering the process. No idea why. :\
The real process I will be using will be computationally intensive so it has to run as a separate process.
Hi i need to create 2 threads one which repeatedly writes the time of day as an
HH:MM:SS string into a global variable 100 times per second. The second thread will repeatedly read the time of day
string from that variable twice per second and try to display it to screen but code in that thread should ensure the same
string is never written twice in a row. The result is that second thread really displays to screen only once per second. i have tried following code but its not working
import threading
import time
c = threading.Condition()
flag = 0 #shared between Thread_A and Thread_B
val = ''
class Thread_A(threading.Thread):
def __init__(self, name):
threading.Thread.__init__(self)
self.name = name
def run(self):
global flag
global val #made global here
while True:
c.acquire()
if flag == 0:
time.sleep(0)
flag = 1
a=range(1,101)
for i in a:
val=time.strftime("%H:%M:%S", time.localtime(time.time()))
c.notify_all()
else:
c.wait()
c.release()
class Thread_B(threading.Thread):
def __init__(self, name):
threading.Thread.__init__(self)
self.name = name
def run(self):
global flag
global val #made global here
while True:
c.acquire()
if flag == 1:
#time.sleep(1)
flag = 0
a=range(0,2)
for i in a:
print str(val)
#val = 20
c.notify_all()
else:
c.wait()
c.release()
a = Thread_A("myThread_name_A")
b = Thread_B("myThread_name_B")
b.start()
a.start()
a.join()
b.join()
You're making this more complicated than it needs to be. You can use a simple Lock object to make sure that only one thread can access val at a time.
The code below will run on Python 2 or Python 3. To stop it, hit Enter
import time
from threading import Thread, Lock
# Rename Python 2's raw_input to input
try:
input = raw_input
except NameError:
pass
val = ''
lock = Lock()
def set_time(delay=0.01):
''' Write the current time to val '''
global val
while True:
lock.acquire()
val = time.strftime("%H:%M:%S")
lock.release()
time.sleep(delay)
def get_time(delay=0.5):
''' Read the current time from val and print
it if it hasn't been printed already
'''
oldval = ''
while True:
lock.acquire()
if val != oldval:
print(val)
oldval = val
lock.release()
time.sleep(delay)
# Start the threads
for func in (set_time, get_time):
t = Thread(target=func)
t.setDaemon(True)
t.start()
#Wait until we get some input
s = input()
some typical output
02:22:04
02:22:05
02:22:06
02:22:07
02:22:08
In this easy example how can I access the value of a counter object while the process is still running?
import multiprocessing
import time
class Counter(object):
def __init__(self):
self.value = 0
def update(self):
self.value += 1
def job(Counter):
while True:
counter.update()
if __name__ == '__main__':
counter = Counter()
p = multiprocessing.Process(target=job,args=(counter,)
p.start()
time.sleep(10)
# I want to check the value of the counter object here
p.terminate()
You have to use multiprocessing.Queue() or multiprocessing.Pipe() to communicate between processes.
multiprocessing.Pipe() creates two endpoints conn_1, conn_2 and you have to use one of them in main process and second in subprocess.
Use poll() to check if there is something in pipe and then you can use recv() to receive data. (if you use directly recv() then it will block program till you send something to pipe).
Now you can use send() to send message with result.
Here I use conn_2 in job()
import multiprocessing
import time
class Counter(object):
def __init__(self):
self.value = 0
def update(self):
self.value += 1
def job(counter, conn):
while True:
counter.update()
if conn.poll():
print('job:', conn.recv())
conn.send(counter.value)
if __name__ == '__main__':
conn_1, conn_2 = multiprocessing.Pipe()
counter = Counter()
p = multiprocessing.Process(target=job, args=(counter, conn_2))
p.start()
time.sleep(2)
# I want to check the value of the counter object here
conn_1.send('give me result')
print('result:', conn_1.recv())
p.terminate()
Here I use conn_2 directly in class
import multiprocessing
import time
class Counter(object):
def __init__(self, conn):
self.conn = conn
self.value = 0
def update(self):
self.value += 1
if self.conn.poll(): # if message from main process
print('Counter:', self.conn.recv())
self.conn.send(self.value)
def job(counter):
while True:
counter.update()
if __name__ == '__main__':
conn_1, conn_2 = multiprocessing.Pipe()
counter = Counter(conn_2)
p = multiprocessing.Process(target=job, args=(counter,))
p.start()
time.sleep(2)
conn_1.send('give me result')
print('result:', conn_1.recv())
p.terminate()
You could consider attaching a debugger (such as the PyDev debugger, GDB or others) to the running process. You can then freeze the process with a breakpoint and inspect state.
How can I implement conditional lock in threaded application, for instance I haw
30 threads that are calling function and for most off the time all threads can access is simultaneous, but depending on function input there can be condition when only one thread can do that one thing. (If value for input is repeated and some thread is still working then I need lock.)
I now that there is module threading with Rlock() but I don't now how to use it in a way that i described it in first part.
Edit: The question is actually about how to prevent any two threads from running the same function with the same argument at the same time. (Thanks to David for helping me formulate my question :) )
Try this: have a lock in the module where your function is, and if the input to the function is such that locking is required, acquire the lock inside the function. Otherwise don't.
l = threading.RLock()
def fn(arg):
if arg == arg_that_needs_lock:
l.acquire()
try:
# do stuff
finally:
l.release()
else:
# do other stuff
EDIT:
As far as I can tell now, the question is actually about how to prevent any two threads from running the same function with the same argument at the same time. There's no problem with two threads running the same function with different arguments at the same time, though. The simple method to do this, if all valid arguments to the function can be dictionary keys, is to create a dictionary of arguments to locks:
import threading
dict_lock = threading.RLock()
locks = {}
def fn_dict(arg):
dict_lock.acquire()
try:
if arg not in dict:
locks[arg] = threading.RLock()
l = locks[arg]
finally:
dict_lock.release()
l.acquire()
try:
# do stuff
finally:
l.release()
If your function can be called with many different arguments, though, that amounts to a lot of locks. Probably a better way is to have a set of all arguments with which the function is currently executing, and have the contents of that set protected by a lock. I think this should work:
set_condition = threading.Condition()
current_args = set()
def fn_set(arg):
set_condition.acquire()
try:
while arg in current_args:
set_condition.wait()
current_args.add(arg)
finally:
set_condition.release()
# do stuff
set_condition.acquire()
try:
current_args.remove(arg)
set_condition.notifyAll()
finally:
set_condition.release()
It sounds like you want something similar to a Readers-Writer lock.
This is probably not what you want, but might be a clue:
from __future__ import with_statement
import threading
def RWLock(readers = 1, writers = 1):
m = _Monitor(readers, writers)
return (_RWLock(m.r_increment, m.r_decrement), _RWLock(m.w_increment, m.w_decrement))
class _RWLock(object):
def __init__(self, inc, dec):
self.inc = inc
self.dec = dec
def acquire(self):
self.inc()
def release(self):
self.dec()
def __enter__(self):
self.inc()
def __exit__(self):
self.dec()
class _Monitor(object):
def __init__(self, max_readers, max_writers):
self.max_readers = max_readers
self.max_writers = max_writers
self.readers = 0
self.writers = 0
self.monitor = threading.Condition()
def r_increment(self):
with self.monitor:
while self.writers > 0 and self.readers < self.max_readers:
self.monitor.wait()
self.readers += 1
self.monitor.notify()
def r_decrement(self):
with self.monitor:
while self.writers > 0:
self.monitor.wait()
assert(self.readers > 0)
self.readers -= 1
self.monitor.notify()
def w_increment(self):
with self.monitor:
while self.readers > 0 and self.writers < self.max_writers:
self.monitor.wait()
self.writers += 1
self.monitor.notify()
def w_decrement(self):
with self.monitor:
assert(self.writers > 0)
self.writers -= 1
self.monitor.notify()
if __name__ == '__main__':
rl, wl = RWLock()
wl.acquire()
wl.release()
rl.acquire()
rl.release()
(Unfortunately not tested)