python update variable in loop and use it in another process - python

Why while loop is ignored in work1? I would like to update value from string to another value in loop and output this value in process work2. Also already tried with Queue, but problem is I have only one variable which I would like to update in work1 and access to it at work2.
from multiprocessing import Process, Manager, Value
from ctypes import c_char_p
import time
def work1(string):
i = 2
string.value = i
# while True:
# print("work1")
# string.value = i + 1
# time.sleep(2)
def work2(string):
while True:
print("Value set in work1 " + str(string.value))
time.sleep(2)
if __name__ == '__main__':
manager = Manager()
string = manager.Value(int, 0);
p1=Process(target=work1, args=(string,))
p1.start()
p1.join()
p2=Process(target=work2, args=(string,))
p2.start()
p2.join()

That is because you didn't make your program parallel with two processes, but instead, two processes run in tandem. What you need to do is to start both process before any join. Like my modification below:
from multiprocessing import Process, Manager, Value
from ctypes import c_char_p
import time
def work1(string):
i = 2
string.value = i
while True:
i = i+1
string.value = i
print("work1 set value to "+str(string.value))
time.sleep(2)
def work2(string):
while True:
print("Value set in work1 " + str(string.value))
time.sleep(2)
if __name__ == '__main__':
manager = Manager()
string = manager.Value(int, 0, lock=False);
p1=Process(target=work1, args=(string,))
p2=Process(target=work2, args=(string,))
p1.start()
p2.start()
p2.join()
p1.join()
Indeed, if you write the code in this way, the join never happened due to the infinite while loop.

Related

Update and Read the same variable using python Ray

I'm just studying about multiprocessing in Python. I have a code that updates the value of a variable in a process, and other processes read the value of this variable. This is working as I expected.
Now I just want to know if there is some way to do the same using the Ray library to improve the speed of execution if I need to run lots of processes reading it
from multiprocessing import Process, Manager
def write_to_dict(d, value):
while True:
value = value + 1
d['key'] = value
def read_from_dict(d):
while True:
read = d['key']
print(read)
if __name__ == '__main__':
manager = Manager()
shared_dict = manager.dict()
p1 = Process(target=write_to_dict, args=(shared_dict, 0))
p2 = Process(target=read_from_dict, args=(shared_dict,))
p1.start()
p2.start()
p1.join()
p2.join()

multiprocessing in python, can i use variables for global?

hellow,
please some help.
i want to take variables when using repeating statement.
Actually in my code, there are so many variables and function to handle variables.
so i have to use multiprocess for some reason, but it's doesn't work for what i want.
below is simple code,
please help me.
from multiprocessing import Process, Manager
import time
def a(final_list):
c=0
while True:
c += 1
final_list.append(c)
time.sleep(1)
print(final_list)
def b(final_list):
while True:
print(final_list[-1])
time.sleep(1)
if __name__ == '__main__':
manager = Manager()
final_list = []
final_list = manager.list()
#print(a)
p1 = Process(target=a, args=(final_list,))
p2 = Process(target=b, args=(final_list,))
p1.start()
time.sleep(3)
p2.start()
I think you forgot to use join() for the processes. try this:
from multiprocessing import Process, Manager
import time
def a(final_list):
c=0
while True:
c += 1
final_list.append(c)
time.sleep(1)
print(final_list)
def b(final_list):
while True:
print(final_list[-1])
time.sleep(1)
if __name__ == '__main__':
with Manager() as manager:
final_list = manager.list()
p1 = Process(target=a, args=(final_list,))
p2 = Process(target=b, args=(final_list,))
p1.start()
time.sleep(3)
p2.start()
p1.join()
p2.join()

Start while loop in one multiprocessing function, from another multiprocessing function

So I want to start a nested while loop in one multiprocessing function from another multiprocessing function. In one function, I'm changing a variable (action) to "fn2", and in the other function there is a nested while loop whose condition is while action == "fn2":.
See code:
from multiprocessing import Process
running = True
action = None
def func1():
global action
if 1+1 == 2:
action = "fn2"
print(action)
def func2():
while running:
while action == "fn2":
print("fn2")
if __name__ == '__main__':
p1 = Process(target=func1)
p1.start()
p2 = Process(target=func2)
p2.start()
p1.join()
p2.join()
However, when I run it, the code just prints "fn2" once (confirming that action is equal to "fn2"). But the nested loop inside func2() does not execute. Sorry if the answer is obvious, I'm new to multiprocessing.
i added two comments (with print statements) to highlight the error.
basically action=None in func2() so that is why...
from multiprocessing import Process
running = True
action = None
def func1():
global action
if 1+1 == 2:
action = "fn2"
print(action)
def func2():
while running:
print('got here') # <--- loops infinitly here
print(action) # <--- this is none
while action == "fn2":
print("fn2")
if __name__ == '__main__':
p1 = Process(target=func1)
p1.start()
p2 = Process(target=func2)
p2.start()
p1.join()
p2.join()
In order to share values when multiprocessing, which is called Sharing state between processes you need to use value or array for a single device shared memory or alternatively, Manager for networks of servers.
Here is a link:
https://docs.python.org/3/library/multiprocessing.html
The basic format looks like this:
from multiprocessing import Process, Value, Array
def f(n, a):
n.value = 3.1415927
for i in range(len(a)):
a[i] = -a[i]
if __name__ == '__main__':
num = Value('d', 0.0)
arr = Array('i', range(10))
p = Process(target=f, args=(num, arr))
p.start()
p.join()
print(num.value)
print(arr[:])
So in the case of the question what the variable action is equivalent to n (variable) or a (list) etc.. and this can be shares across functions.
Also note that one can parse arguments into multiprocess functions with the args keyword: args=(num, arr ...).

How to update a dictionary in a while loop?

I am using two loops and cannot figure out how to properly update the dictionary in one loop and use it in the other loop.
In the first loop I am adding a new pair to the dictionary, but in the second loop I don't see any changes, how do I do it correctly?
import time
from multiprocessing import Process
dict_1 = {1:'1'}
def func_1():
while True:
dict_1.update({2:'2'})
print('Result func_1-',dict_1)
time.sleep(5)
def func_2():
while True:
print('Result func_2-',dict_1)
time.sleep(5)
if __name__ == '__main__':
p1 = Process(target=func_1)
p2 = Process(target=func_2)
p1.start()
p2.start()
p1.join()
p2.join()
Result func_1- {1: '1', 2: '2'} Result func_2- {1: '1'}
In the first cycle I see a new pair, but I do not see it in the second cycle.
You can solve this by using multiprocessing.Manager to create a managed dictionary for your purpose this way:
import time
from multiprocessing import Process, Manager
manager = Manager()
dict_1 = manager.dict({1:'1'})
def func_1():
while True:
dict_1.update({2:'2'})
print('Result func_1-',dict_1)
time.sleep(5)
def func_2():
while True:
print('Result func_2-',dict_1)
time.sleep(5)
if __name__ == '__main__':
p1 = Process(target=func_1)
p2 = Process(target=func_2)
p1.start()
p2.start()
p1.join()
p2.join()

Something strange happen with python multiprocess

I've just tested python multiprocessing for reading file or a global variable, but there is something strange happen.
for expample:
import multiprocessing
a = 0
def test(lock, name):
global a
with lock:
for i in range(10):
a = a + 1
print "in process %d : %d" % (name, a)
def main():
lock = multiprocessing.Lock()
p1 = multiprocessing.Process(target=test, args=(lock, 1))
p2 = multiprocessing.Process(target=test, args=(lock, 2))
p1.start()
p2.start()
p1.join()
p2.join()
print "in main process : %d" % a
if __name__=='__main__':
main()
The program read a global variable, but the output is:
in process 1 : 10
in process 2 : 10
in main process : 0
It seems that the sub-process cannot get and edit the global variable properly. Also, if I change the program to read the file, each sub-process will read the file completely, ignoring the lock.
So how does these happen? And how to solve this problem?
Global variables are not shared between processes. When you create and start a new Process(), that process runs inside a separate "cloned" copy of the current Python interpreter. Updating the variable from within a Process() will only update the variable locally to the particular process it is updated in.
To share data between Python processes, we need a multiprocessing.Pipe(), a multiprocessing.Queue(), a multiprocessing.Value(), a multiprocessing.Array() or one of the other multiprocessing-safe containers.
Here's an example based on your code:
import multiprocessing
def worker(lock, counter, name):
with lock:
for i in range(10):
counter.value += 1
print "In process {}: {}".format(name, counter.value)
def main():
lock = multiprocessing.Lock()
counter = multiprocessing.Value('i', 0)
p1 = multiprocessing.Process(target=worker, args=(lock, counter, 1))
p2 = multiprocessing.Process(target=worker, args=(lock, counter, 2))
p1.start()
p2.start()
p1.join()
p2.join()
print "In main process: {}".format(counter.value)
if __name__=='__main__':
main()
This gives me:
In process 1: 10
In process 2: 20
In main process: 20
Now, if you really want to use a global variable, you can use a multiprocessing.Manager(), but I think the first method is preferable, and this is a "heavier" solution. Here's an example:
import multiprocessing
manager = multiprocessing.Manager()
counter = manager.Value('i', 0);
def worker(lock, name):
global counter
with lock:
for i in range(10):
counter.value += 1
print "In process {}: {}".format(name, counter.value)
def main():
global counter
lock = multiprocessing.Lock()
p1 = multiprocessing.Process(target=worker, args=(lock, 1))
p2 = multiprocessing.Process(target=worker, args=(lock, 2))
p1.start()
p2.start()
p1.join()
p2.join()
print "In main process: {}".format(counter.value)
if __name__=='__main__':
main()

Categories

Resources