I have some app.py in which I do the following:
Trader = Trader(settings)
while True:
try:
Trader.analyse_buys()
Now I have the following in trader.py
def __init__(self):
self.since = self.calculate_since()
...
def analyse_buys():
dosomething()
So the analyse_buys() will run in a loop without a new calculation of the value since.
What could be a possible solution to recalculate my variables in the __init__ function again before starting the function again?
If you need to still save some state in Trader, i.e. instantiating a new one with
trader = Trader()
isn't an option, consider moving the bits that need to be reinitialized into another function, and calling that both within __init__() and from elsewhere:
class Trader:
def __init__(self):
self.state_that_shouldnt_be_re_prepared = ...
self.prepare() # (or whatever is a sensible name)
def prepare(self):
# do things
# ...
trader = Trader()
while ...:
if something:
trader.prepare()
Related
Assume I have two classes that use threads
class foo(threading.Thread):
def __init__(self):
threading.Thread.__init__(self,name="foo=>bar")
self.var1 = {}
def run(self):
while True
value, name = getvalue() // name is an string
self.var1[name] = value
bar(self)
class bar(threading.Thread):
def __init__(self,fooInstance):
threading.Thread.__init__(self,name="bar")
def run(self):
while True
arg = myfunction() // somefunction (not shown for simplicity)
val = myOtherfunction(fooInstance.var1[arg]) //other function
print(val)
f = foo()
f.start()
The variable var1 in foo will change over time and bar needs to be aware of these changes. It makes sense to me, but I wonder if there is something fundamental here that could fail eventually. is this correct in python?
The actual sharing part is the same question as "how do I share a value with another object?" without threads, and all the same solutions will work.
For example. you're already passing the foo instance into the bar initializer, so just get it from there:
class bar(threading.Thread):
def __init__(self,fooInstance):
threading.Thread.__init__(self,name="bar")
self.var1 = fooInstance.var1
But is this thread-safe?
Well, yes, but only because you never actually start the background thread. But I assume in your real code, you're going to have two threads running at the same time, both accessing that var1 value. In which case it's not thread-safe without some kind of synchronization. For example:
class foo(threading.Thread):
def __init__(self):
threading.Thread.__init__(self,name="foo=>bar")
self.var1 = {}
self.var1lock = threading.Lock()
class bar(threading.Thread):
def __init__(self,fooInstance):
threading.Thread.__init__(self,name="bar")
self.var1 = fooInstance.var1
self.var1lock = fooInstance.var1lock
And now, instead of this:
self.var1[name] = value
… you do this:
with self.var1lock:
self.var1[name] = value
And likewise, instead of this:
val = myOtherfunction(fooInstance.var1[arg]) //other function
… you do this:
with self.var1lock:
var1arg = var1[arg]
val = myOtherfunction(var1arg)
Or… as it turns out, in CPython, updating a value for a single key in a dict (only a builtin dict, not a subclass or custom mapping class!) has always been atomic, and probably always will be. If you want to rely on that fact, you can. But I'd only do that if the lock turned out to be a significant performance issue. And I'd comment every use of it to make it clear, too.
If you'd rather pass values instead of share them, the usual answer is queue.Queue or one of its relatives.
But this requires a redesign of your program. For example, maybe you want to pass each new/changed key-value pair over the queue. That would go something like this:
class foo(threading.Thread):
def __init__(self):
threading.Thread.__init__(self,name="foo=>bar")
self.var1 = {}
self.q = queue.Queue()
def run(self):
b = bar(self)
b.start()
while True:
value, name = getvalue() // name is an string
self.var1[name] = value
self.q.put((name, value))
class bar(threading.Thread):
def __init__(self,fooInstance):
threading.Thread.__init__(self,name="bar")
self.var1 = copy.deepcopy(fooInstance.var1)
self.q = fooInstance.q
def _checkq(self):
while True:
try:
key, val = self.q.get_nowait()
except queue.Empty:
break
else:
self.var1[key] = val
def run(self):
while True:
self._checkq()
arg = myfunction() // somefunction (not shown for simplicity)
val = myOtherfunction(fooInstance.var1[arg]) //other function
print(val)
I am having trouble implementing the following scheme :
class A:
def __init__(self):
self.content = []
self.current_len = 0
def __len__(self):
return self.current_len
def update(self, new_content):
self.content.append(new_content)
self.current_len += 1
class B:
def __init__(self, id):
self.id = id
And I also have these 2 functions that will be called later in the main :
async def do_stuff(first_var, second_var):
""" this function is ideally called from the main in another
process. Also, first_var and second_var are not modified so it
would be nice if they could be given by reference without
having to copy them """
### used for first call
yield None
while len(first_var) < CERTAIN_NUMBER:
time.sleep(10)
while True:
## do stuff
if condition_met:
yield new_second_var ## which is a new instance of B
## continue doing stuff
def do_other_stuff(first_var, second_var):
while True:
queue = multiprocessing.JoinableQueue()
results = multiprocessing.Queue()
### do stuff
first_var.update(results)
The main looks like this at the moment :
first_var = A()
second_var = B()
while True:
async for new_second_var in do_stuff(first_var, second_var):
if new_second_var:
## stop the do_other_stuff that is currently running
## to re-launch it with the updated new_var
do_other_stuff(first_var, new_second_var)
else: ## used for the first call
do_other_stuff(first_var, second_var)
Here are my questions :
Is there a better solution to make this scheme work?
How can I implement the "stopping" part since there is a while True loop that fills first_var by reference?
Will the instance of A (first_var) be passed by reference to do_stuff if first_var doesn't get modified inside it?
Is it even possible to have an asynchronous generator in another process?
Is it even possible at all?
This is using Python 3.6 for the async generators.
I hope this is somewhat clear! Thanks a lot!
class Time:
def __init__(self,x,y,z):
self.hour=x
self.minute=y
self.second=z
def __str__(self):
return "({:02d}:{:02d}:{:02d})".format(self.hour, self.minute, self.second)
def time_to_int(time):
minutes=time.hour*60+time.minute
seconds=minutes*60+time.second
return seconds
def int_to_time(seconds):
time=Time()
minutes,time.second=divmod(seconds,60)
time.hour,time.minute=divmod(minutes,60)
return time
def add_time(t1,t2):
seconds=time_to_int(t1)+time_to_int(t2)
return int_to_time(seconds)
start=Time(9,45,00)
running=Time(1,35,00)
done=add_time(start,running)
print(done)
I am new to python and i've been doing some practice lately.I came across a question and i've written the code for the same.But I am repeatedly getting an error: "add_time is not defined". I tried defining a main() method but then it doesn't print anything.Please help.
You haven't created an object to the above class.
Any function/method inside a class can only be accessed by an object of that class .For more information on the fundamentals of Object Oriented Programming, please check this page.
Meanwhile for this to work, define your class in the following way :
class Time:
def __init__(self,x=None,y=None,z=None):
self.hour=x
self.minute=y
self.second=z
def __str__(self):
return "({:02d}:{:02d}:{:02d})".format(self.hour, self.minute, self.second)
def time_to_int(time):
minutes=time.hour*60+time.minute
seconds=minutes*60+time.second
return seconds
def int_to_time(seconds):
time=Time()
minutes,time.second=divmod(seconds,60)
time.hour,time.minute=divmod(minutes,60)
return time
def add_time(t1,t2):
seconds=time_to_int(t1)+time_to_int(t2)
return int_to_time(seconds)
and outside the class block, write the following lines :
TimeObject = Time()
start=Time(9,45,00)
running=Time(1,35,00)
TimeObject.add_time(start,running)
print "done"
I however suggest you to write the add_time function outside the class because you are passing the objects to the class as the parameters to the function within the same class and it is considered as a bad design in object oriented programming.
Hope it helps. Cheers!
This works fine for me as long as you specified 3 args in your constructor
def int_to_time(seconds):
time=Time(0,0,0) # just set your 3 positionals args here
minutes,time.second=divmod(seconds,60)
time.hour,time.minute=divmod(minutes,60)
return time
Another way to avoid it could be:
class Time:
def __init__(self,x=0,y=0,z=0):
self.hour=x
self.minute=y
self.second=z
If you want to add your functions to your class (such as time_to_int, int_to_time or even add_time) then you will need to indent with one more level of 4 spaces and add self to your method parameters
Hii Mathers25,
I solve your problem try this below code to get the best output,
class TimeClass:
def __init__(self,x,y,z):
self.hour = x
self.minute = y
self.second = z
def __str__(self):
return "({:02d}:{:02d}:{:02d})".format(self.hour, self.minute, self.second)
def time_to_int(self,time):
minutes = (time.hour * 60) + time.minute
seconds = (minutes * 60) + time.second
return seconds
def int_to_time(self,seconds):
time = TimeClass(0,0,0)
minutes,time.second=divmod(seconds,60)
time.hour,time.minute=divmod(minutes,60)
return time
def add_time(self,t1,t2):
seconds = self.time_to_int(t1) + self.time_to_int(t2)
# Call method int_to_time() using self keyword.
return self.int_to_time(seconds)
# First time object create that time set value is 0 of hour,minute and second
TimeObject = TimeClass(0,0,0)
# After create second object
start=TimeClass(9,45,00)
# After create thired Object
running=TimeClass(1,35,00)
# Store the value which return by add_time()
done = TimeObject.add_time(start,running)
# Display the value of done variable
print(done)
class Employee:
def __init__(self):
self.wage = 0
self.hours_worked = 0
def calculate_pay(self):
return self.wage * self.hours_worked
alice = Employee()
alice.wage = 9.25
alice.hours_worked = 35
print('Alice:\n Net pay: {:.2f}'.format(alice.calculate_pay()))
barbara = Employee()
barbara.wage = 11.50
barbara.hours_worked = 20
print('Barbara:\n Net pay: {:.2f}'.format(barbara.calculate_pay()))
Works for me:
class C:
def f(a, b):
return a + b
x = f(1,2)
print(C.x)
but you should not do such things. Code in class-level is executing when class is "creating", usually you want static methods or class methods (decorated with #staticmethod or #classmethod) and execute code in some function/instantiated class. Also you can execute it on top (module) level if this is the simple script. Your snippet is "bad practice": class level (i'm talking about indentation) is for declarations, not for execution of something. On class-level is normal to execute code which is analogue of C macros: for example, to call decorator, to transform some method/attribute/etc - static things which are "pure" functions!
I have a very long function func which takes a browser handle and performs a bunch of requests and reads a bunch of responses in a specific order:
def func(browser):
# make sure we are logged in otherwise log in
# make request to /search and check that the page has loaded
# fill form in /search and submit it
# read table of response and return the result as list of objects
Each operation require a large amount of code due to the complexity of the DOM and they tend to grow really fast.
What would be the best way to refactor this function into smaller components so that the following properties still hold:
the execution flow of the operations and/or their preconditions is guaranteed just like in the current version
the preconditions are not checked with asserts against the state, as this is a very costly operation
func can be called multiple times on the browser
?
Just wrap the three helper methods in a class, and track which methods are allowed to run in an instance.
class Helper(object):
def __init__(self):
self.a = True
self.b = False
self.c = False
def funcA(self):
if not self.A:
raise Error("Cannot run funcA now")
# do stuff here
self.a = False
self.b = True
return whatever
def funcB(self):
if not self.B:
raise Error("Cannot run funcB now")
# do stuff here
self.b = False
self.c = True
return whatever
def funcC(self):
if not self.C:
raise Error("Cannot run funcC now")
# do stuff here
self.c = False
self.a = True
return whatever
def func(...):
h = Helper()
h.funcA()
h.funcB()
h.funcC()
# etc
The only way to call a method is if its flag is true, and each method clears its own flag and sets the next method's flag before exiting. As long as you don't touch h.a et al. directly, this ensures that each method can only be called in the proper order.
Alternately, you can use a single flag that is a reference to the function currently allowed to run.
class Helper(object):
def __init__(self):
self.allowed = self.funcA
def funcA(self):
if self.allowed is not self.funcA:
raise Error("Cannot run funcA now")
# do stuff
self.allowed = self.funcB
return whatever
# etc
Here's the solution I came up with. I used a decorator (closely related to the one in this blog post) which only allows for a function to be called once.
def call_only_once(func):
def new_func(*args, **kwargs):
if not new_func._called:
try:
return func(*args, **kwargs)
finally:
new_func._called = True
else:
raise Exception("Already called this once.")
new_func._called = False
return new_func
#call_only_once
def stateA():
print 'Calling stateA only this time'
#call_only_once
def stateB():
print 'Calling stateB only this time'
#call_only_once
def stateC():
print 'Calling stateC only this time'
def state():
stateA()
stateB()
stateC()
if __name__ == "__main__":
state()
You'll see that if you re-call any of the functions, the function will throw an Exception stating that the functions have already been called.
The problem with this is that if you ever need to call state() again, you're hosed. Unless you implement these functions as private functions, I don't think you can do exactly what you want due to the nature of Python's scoping rules.
Edit
You can also remove the else in the decorator and your function will always return None.
Here a snippet I used once for my state machine
class StateMachine(object):
def __init__(self):
self.handlers = {}
self.start_state = None
self.end_states = []
def add_state(self, name, handler, end_state=0):
name = name.upper()
self.handlers[name] = handler
if end_state:
self.end_states.append(name)
def set_start(self, name):
# startup state
self.start_state = name
def run(self, **kw):
"""
Run
:param kw:
:return:
"""
# the first .run call call the first handler with kw keywords
# each registered handler should returns the following handler and the needed kw
try:
handler = self.handlers[self.start_state]
except:
raise InitializationError("must call .set_start() before .run()")
while True:
(new_state, kw) = handler(**kw)
if isinstance(new_state, str):
if new_state in self.end_states:
print("reached ", new_state)
break
else:
handler = self.handlers[new_state]
elif hasattr(new_state, "__call__"):
handler = new_state
else:
return
The use
class MyParser(StateMachine):
def __init__(self):
super().__init__()
# define handlers
# we can define many handler as we want
self.handlers["begin_parse"] = self.begin_parse
# define the startup handler
self.set_start("begin_parse")
def end(self, **kw):
logging.info("End of parsing ")
# no callable handler => end
return None, None
def second(self, **kw):
logging.info("second ")
# do something
# if condition is reach the call `self.end` handler
if ...:
return self.end, {}
def begin_parse(self, **kw):
logging.info("start of parsing ")
# long process until the condition is reach then call the `self.second` handler with kw new keywords
while True:
kw = {}
if ...:
return self.second, kw
# elif other cond:
# return self.other_handler, kw
# elif other cond 2:
# return self.other_handler 2, kw
else:
return self.end, kw
# start the state machine
MyParser().run()
will print
INFO:root:start of parsing
INFO:root:second
INFO:root:End of parsing
You could use local functions in your func function. Ok, they are still declared inside one single global function, but Python is nice enough to still give you access to them for tests.
Here is one example of one function declaring and executing 3 (supposedly heavy) subfunctions. It takes one optional parameter test that when set to TEST prevent actual execution but instead gives external access to individual sub-functions and to a local variable:
def func(test=None):
glob = []
def partA():
glob.append('A')
def partB():
glob.append('B')
def partC():
glob.append('C')
if (test == 'TEST'):
global testA, testB, testC, testCR
testA, testB, testC, testCR = partA, partB, partC, glob
return None
partA()
partB()
partC()
return glob
When you call func, the 3 parts are executed in sequence. But if you first call func('TEST'), you can then access the local glob variable as testCR, and the 3 subfunctions as testA, testB and testC. This way you can still test individually the 3 parts with well defined input and control their output.
I would insist on the suggestion given by #user3159253 in his comment on the original question:
If the sole purpose is readability I would split the func into three "private" > or "protected" ones (i.e. _func1 or __func1) and a private or protected property > which keeps the state shared between the functions.
This makes a lot of sense to me and seems more usual amongst object oriented programming than the other options. Consider this example as an alternative:
Your class (teste.py):
class Test:
def __init__(self):
self.__environment = {} # Protected information to be shared
self.public_stuff = 'public info' # Accessible to outside callers
def func(self):
print "Main function"
self.__func_a()
self.__func_b()
self.__func_c()
print self.__environment
def __func_a(self):
self.__environment['function a says'] = 'hi'
def __func_b(self):
self.__environment['function b says'] = 'hello'
def __func_c(self):
self.__environment['function c says'] = 'hey'
Other file:
from teste import Test
t = Test()
t.func()
This will output:
Main function says hey guys
{'function a says': 'hi', 'function b says': 'hello', 'function c says': 'hey'}
If you try to call one of the protected functions, an error occurs:
Traceback (most recent call last):
File "C:/Users/Lucas/PycharmProjects/testes/other.py", line 6, in <module>
t.__func_a()
AttributeError: Test instance has no attribute '__func_a'
Same thing if you try to access the protected environment variable:
Traceback (most recent call last):
File "C:/Users/Lucas/PycharmProjects/testes/other.py", line 5, in <module>
print t.__environment
AttributeError: Test instance has no attribute '__environment'
In my view this is the most elegant, simple and readable way to solve your problem, let me know if it fits your needs :)
So the situation is that I have multiple methods, which might be threaded simaltenously, but all need their own lock
against being re-threaded until they have run. They are established by initialising a class with some dataprocessing options:
class InfrequentDataDaemon(object): pass
class FrequentDataDaemon(object): pass
def addMethod(name):
def wrapper(f):
setattr(processor, f.__name__, staticmethod(f))
return f
return wrapper
class DataProcessors(object):
lock = threading.Lock()
def __init__(self, options):
self.common_settings = options['common_settings']
self.data_processing_configurations = options['data_processing_configurations'] #Configs for each processing method
self.data_processing_types = options['data_processing_types']
self.Data_Processsing_Functions ={}
#I __init__ each processing method as a seperate function so that it can be locked
for type in options['data_processing_types']:
def bindFunction1(name):
def func1(self, data=None, lock=None):
config = self.data_processing_configurations[data['type']] #I get the right config for the datatype
with lock:
FetchDataBaseStuff(data['type'])
#I don't want this to be run more than once at a time per DataProcessing Type
# But it's fine if multiple DoSomethings run at once, as long as each DataType is different!
DoSomething(data, config)
WriteToDataBase(data['type'])
func1.__name__ = "Processing_for_{}".format(type)
self.Data_Processing_Functions[func1.__name__] = func1 #Add this function to the Dictinary object
bindFunction1(type)
#Then I add some methods to a daemon that are going to check if our Dataprocessors need to be called
def fast_process_types(data):
if not example_condition is True: return
if not data['type'] in self.data_processing_types: return #Check that we are doing something with this type of data
threading.Thread(target=self.Data_Processing_Functions["Processing_for_{}".format(data['type'])], args=(self,data, lock)).start()
def slow_process_types(data):
if not some_other_condition is True: return
if not data['type'] in self.data_processing_types: return #Check that we are doing something with this type of data
threading.Thread(target=self.Data_Processing_Functions["Processing_for_{}".format(data['type'])], args=(self,data, lock)).start()
addMethod(InfrequentDataDaemon)(slow_process_types)
addMethod(FrequentDataDaemon)(fast_process_types)
The idea is to lock each method in
DataProcessors.Data_Processing_Functions - so that each method is only accessed by one thread at a time (and the rest of the threads for the same method are queued). How does Locking need to be set up to achieve this effect?
I'm not sure I completely follow what you're trying to do here, but could you just create a separate threading.Lock object for each type?
class DataProcessors(object):
def __init__(self, options):
self.common_settings = options['common_settings']
self.data_processing_configurations = options['data_processing_configurations'] #Configs for each processing method
self.data_processing_types = options['data_processing_types']
self.Data_Processsing_Functions ={}
self.locks = {}
#I __init__ each processing method as a seperate function so that it can be locked
for type in options['data_processing_types']:
self.locks[type] = threading.Lock()
def bindFunction1(name):
def func1(self, data=None):
config = self.data_processing_configurations[data['type']] #I get the right config for the datatype
with self.locks[data['type']]:
FetchDataBaseStuff(data['type'])
DoSomething(data, config)
WriteToDataBase(data['type'])
func1.__name__ = "Processing_for_{}".format(type)
self.Data_Processing_Functions[func1.__name__] = func1 #Add this function to the Dictinary object
bindFunction1(type)
#Then I add some methods to a daemon that are going to check if our Dataprocessors need to be called
def fast_process_types(data):
if not example_condition is True: return
if not data['type'] in self.data_processing_types: return #Check that we are doing something with this type of data
threading.Thread(target=self.Data_Processing_Functions["Processing_for_{}".format(data['type'])], args=(self,data)).start()
def slow_process_types(data):
if not some_other_condition is True: return
if not data['type'] in self.data_processing_types: return #Check that we are doing something with this type of data
threading.Thread(target=self.Data_Processing_Functions["Processing_for_{}".format(data['type'])], args=(self,data)).start()
addMethod(InfrequentDataDaemon)(slow_process_types)
addMethod(FrequentDataDaemon)(fast_process_types)