I keep running into this error that I never had before every time I try running python run_p2pool.py, I even tried doing -n and typing in the argument and that doesn't fix it (as the internet suggested). I also have the configurations already there and even tried reinstalling python twisted with no avail. There is no error in the code I am trying to run and I am yet to figure out how to get this fixed.
# Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!
> Pausing for 3 seconds...
> 2017-11-02 01:07:47.958817 > Traceback (most recent call last):
> 2017-11-02 01:07:47.958986 > File "run_p2pool.py", line 5, in <module>
> 2017-11-02 01:07:47.959116 > main.run()
> 2017-11-02 01:07:47.959191 > File "/var/www/html/DashPool/DPI-MASTER/Dash-Pool-Integration/p2pool/main.py", line 687, in run
> 2017-11-02 01:07:47.959422 > deferral.RobustLoopingCall(logfile.reopen).start(5)
> 2017-11-02 01:07:47.959490 > File "/var/www/html/DashPool/DPI-MASTER/Dash-Pool-Integration/p2pool/util/deferral.py", line 277, in start
> 2017-11-02 01:07:47.959605 > self._df = self._worker(period).addErrback(lambda fail: fail.trap(defer.CancelledError))
> 2017-11-02 01:07:47.959686 > File "/var/www/html/DashPool/DPI-MASTER/Dash-Pool-Integration/p2pool/util/deferral.py", line 225, in _
> 2017-11-02 01:07:47.960104 > df = defer.Deferred(cancelled)
> 2017-11-02 01:07:47.960195 > TypeError: __init__() takes exactly 1 argument (2 given)
I never had this problem before and was able to run the same program before, but just encase, here is the defferal.py file
from __future__ import division
import itertools
import random
import sys
from twisted.internet import defer, reactor
from twisted.python import failure, log
def sleep(t):
d = defer.Deferred(canceller=lambda d_: dc.cancel())
dc = reactor.callLater(t, d.callback, None)
return d
def run_repeatedly(f, *args, **kwargs):
current_dc = [None]
def step():
delay = f(*args, **kwargs)
current_dc[0] = reactor.callLater(delay, step)
step()
def stop():
current_dc[0].cancel()
return stop
class RetrySilentlyException(Exception):
pass
def retry(message='Error:', delay=3, max_retries=None, traceback=True):
'''
#retry('Error getting block:', 1)
#defer.inlineCallbacks
def get_block(hash):
...
'''
def retry2(func):
#defer.inlineCallbacks
def f(*args, **kwargs):
for i in itertools.count():
try:
result = yield func(*args, **kwargs)
except Exception as e:
if i == max_retries:
raise
if not isinstance(e, RetrySilentlyException):
if traceback:
log.err(None, message)
else:
print >>sys.stderr, message, e
yield sleep(delay)
else:
defer.returnValue(result)
return f
return retry2
class ReplyMatcher(object):
'''
Converts request/got response interface to deferred interface
'''
def __init__(self, func, timeout=5):
self.func = func
self.timeout = timeout
self.map = {}
def __call__(self, id):
if id not in self.map:
self.func(id)
df = defer.Deferred()
def timeout():
self.map[id].remove((df, timer))
if not self.map[id]:
del self.map[id]
df.errback(failure.Failure(defer.TimeoutError('in ReplyMatcher')))
timer = reactor.callLater(self.timeout, timeout)
self.map.setdefault(id, set()).add((df, timer))
return df
def got_response(self, id, resp):
if id not in self.map:
return
for df, timer in self.map.pop(id):
df.callback(resp)
timer.cancel()
class GenericDeferrer(object):
'''
Converts query with identifier/got response interface to deferred interface
'''
def __init__(self, max_id, func, timeout=5, on_timeout=lambda: None):
self.max_id = max_id
self.func = func
self.timeout = timeout
self.on_timeout = on_timeout
self.map = {}
def __call__(self, *args, **kwargs):
while True:
id = random.randrange(self.max_id)
if id not in self.map:
break
def cancel(df):
df, timer = self.map.pop(id)
timer.cancel()
try:
df = defer.Deferred(cancel)
except TypeError:
df = defer.Deferred() # handle older versions of Twisted
def timeout():
self.map.pop(id)
df.errback(failure.Failure(defer.TimeoutError('in GenericDeferrer')))
self.on_timeout()
timer = reactor.callLater(self.timeout, timeout)
self.map[id] = df, timer
self.func(id, *args, **kwargs)
return df
def got_response(self, id, resp):
if id not in self.map:
return
df, timer = self.map.pop(id)
timer.cancel()
df.callback(resp)
def respond_all(self, resp):
while self.map:
id, (df, timer) = self.map.popitem()
timer.cancel()
df.errback(resp)
class NotNowError(Exception):
pass
class DeferredCacher(object):
'''
like memoize, but for functions that return Deferreds
#DeferredCacher
def f(x):
...
return df
#DeferredCacher.with_backing(bsddb.hashopen(...))
def f(x):
...
return df
'''
#classmethod
def with_backing(cls, backing):
return lambda func: cls(func, backing)
def __init__(self, func, backing=None):
if backing is None:
backing = {}
self.func = func
self.backing = backing
self.waiting = {}
#defer.inlineCallbacks
def __call__(self, key):
if key in self.waiting:
yield self.waiting[key]
if key in self.backing:
defer.returnValue(self.backing[key])
else:
self.waiting[key] = defer.Deferred()
try:
value = yield self.func(key)
finally:
self.waiting.pop(key).callback(None)
self.backing[key] = value
defer.returnValue(value)
_nothing = object()
def call_now(self, key, default=_nothing):
if key in self.backing:
return self.backing[key]
if key not in self.waiting:
self.waiting[key] = defer.Deferred()
def cb(value):
self.backing[key] = value
self.waiting.pop(key).callback(None)
def eb(fail):
self.waiting.pop(key).callback(None)
if fail.check(RetrySilentlyException):
return
print
print 'Error when requesting noncached value:'
fail.printTraceback()
print
self.func(key).addCallback(cb).addErrback(eb)
if default is not self._nothing:
return default
raise NotNowError(key)
def deferred_has_been_called(df):
still_running = True
res2 = []
def cb(res):
if still_running:
res2[:] = [res]
else:
return res
df.addBoth(cb)
still_running = False
if res2:
return True, res2[0]
return False, None
def inlineCallbacks(f):
from functools import wraps
#wraps(f)
def _(*args, **kwargs):
gen = f(*args, **kwargs)
stop_running = [False]
def cancelled(df_):
assert df_ is df
stop_running[0] = True
if currently_waiting_on:
currently_waiting_on[0].cancel()
df = defer.Deferred(cancelled)
currently_waiting_on = []
def it(cur):
while True:
try:
if isinstance(cur, failure.Failure):
res = cur.throwExceptionIntoGenerator(gen) # external code is run here
else:
res = gen.send(cur) # external code is run here
if stop_running[0]:
return
except StopIteration:
df.callback(None)
except defer._DefGen_Return as e:
# XXX should make sure direct child threw
df.callback(e.value)
except:
df.errback()
else:
if isinstance(res, defer.Deferred):
called, res2 = deferred_has_been_called(res)
if called:
cur = res2
continue
else:
currently_waiting_on[:] = [res]
def gotResult(res2):
assert currently_waiting_on[0] is res
currently_waiting_on[:] = []
if stop_running[0]:
return
it(res2)
res.addBoth(gotResult) # external code is run between this and gotResult
else:
cur = res
continue
break
it(None)
return df
return _
class RobustLoopingCall(object):
def __init__(self, func, *args, **kwargs):
self.func, self.args, self.kwargs = func, args, kwargs
self.running = False
def start(self, period):
assert not self.running
self.running = True
self._df = self._worker(period).addErrback(lambda fail: fail.trap(defer.CancelledError))
#inlineCallbacks
def _worker(self, period):
assert self.running
while self.running:
try:
self.func(*self.args, **self.kwargs)
except:
log.err()
yield sleep(period)
def stop(self):
assert self.running
self.running = False
self._df.cancel()
return self._df
Related
I was trying to make a loop function that uses class, and to make it easier i think i will make it as decorator, but there are a few problem when i tried my code,
When i tried this code, it says that I'm missing 1 required argument and it's "self"
I tried doing this to my code, but realized that it used __call__, which i don't want to because it will executed at the time the function is called. (I dont know if this true)
And this is how i used it, and https://hastebin.com/onozarogac.py (for the full code):
#looper(seconds=3) # this code is from other class
def print(self):
print(len(self.cache.get_available_guilds_view()))
self.print.start() # and by using this to trigger it
def looper(seconds):
def decorator(func):
return Looper(func, seconds * 1000)
return decorator
class Looper:
def __init__(self, fn, seconds):
self.fn = fn
self.interval_ms = seconds / 1000
self.time_last = time()
self.started = False
def loop(self):
time_now = time()
if time_now >= self.time_last + self.interval_ms:
self.fn()
self.time_last = time_now
return True
else:
return False
def set_interval_ms(self, interval_ms):
self.interval_ms = interval_ms / 1000
def set_interval_s(self, set_interval):
self.interval_ms = set_interval
def get_interval_ms(self):
return self.interval_ms
def get_interval_s(self):
return self.interval_ms * 1000
def start(self):
self.started = True
while True:
if self.started:
self.loop()
else:
break
def stop(self):
self.started = False
Its giving me the TypeError which:
TypeError: print() missing 1 required positional argument: 'self'
convert looper function to this
from functools import wraps
def looper(seconds):
def inner(func):
#wraps(func)
def wrapper(*args, **kwargs):
return Looper(func, seconds * 1000)
return wrapper
return inner
I'm trying to build my first decorator and implement it in a class.
# decorator class
class Cache(object):
def __init__(self,filename,**kwargs):
self.time_ago = datetime.now() - timedelta(**kwargs)
self.filename = filename
def __call__(self,fn):
if not os.path.isfile(self.filename):
return self.cache(fn(self))
time_ago = self.time_ago
filename = self.filename
c_age = datetime.fromtimestamp(os.path.getctime(filename))
m_age = datetime.fromtimestamp(os.path.getmtime(filename))
print (c_age)
print (m_age)
print (time_ago)
if c_age < time_ago or m_age < time_ago:
return self.cache(fn(self))
else:
return self.read()
def cache(self,data):
with open(self.filename,'r+') as ef:
ef.write(data)
return ef.read()
def read(self):
f = open(self.filename,'r')
data = f.read()
f.close()
return data
I'm trying to call the decorator in the below class:
class Zabb(object):
#Cache('nodes.json',minutes=1)
def getNodes(self):
return "Get Nodes"
I call it like this:
z = Zabb()
nodes = z.getNodes()
I get the following error:
Traceback (most recent call last):
File "./deco.py", line 52, in <module>
nodes = z.getNodes()
TypeError: 'str' object is not callable
I'm close to making this work. What am I doing wrong?
You need to return a higher-order-function (wrapped) from the __call__ method. Add an inner method and return it.
def __call__(self, fn):
def wrapper(*args, **kwargs): # <-- Add this wrapper
if not os.path.isfile(self.filename):
return self.cache(fn(*args, **kwargs))
time_ago = self.time_ago
filename = self.filename
c_age = datetime.fromtimestamp(os.path.getctime(filename))
m_age = datetime.fromtimestamp(os.path.getmtime(filename))
print (c_age)
print (m_age)
print (time_ago)
if c_age < time_ago or m_age < time_ago:
return self.cache(fn(*args, **kwargs))
else:
return self.read()
return wrapper # <-- Return the wrapper
Decorators are easier to understand if you forget the #deco syntactic sugar and reason on what it really do. In your example,
#Cache('nodes.json',minutes=1)
def getNodes(self):
return "Get Nodes"
really means :
def getNodes(self):
return "Get Nodes"
getNodes = Cache('nodes.json',minutes=1)(getNodes)
which actually rebinds Zabb.getNodes to the result of Cache('nodes.json',minutes=1).__call__(getNodes) - which is a string, not a function.
What you want is to make Cache.__call__ return a function that will wrap the call the decorated function, ie:
def __call__(self,fn):
def wrapper(*args, **kw):
if not os.path.isfile(self.filename):
return self.cache(fn(self))
time_ago = self.time_ago
filename = self.filename
c_age = datetime.fromtimestamp(os.path.getctime(filename))
m_age = datetime.fromtimestamp(os.path.getmtime(filename))
print (c_age)
print (m_age)
print (time_ago)
if c_age < time_ago or m_age < time_ago:
return self.cache(fn(self))
else:
return self.read()
return wrapper
from CodernityDB.database import Database
from CodernityDB.hash_index import HashIndex
class WithXIndex(HashIndex):
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '16s'
super(WithXIndex, self).__init__(*args, **kwargs)
def make_key_value(self, data):
username = data['username']
# if not isinstance(login, basestring):
# login = str(login)
return md5(username).digest(), None
def make_key(self, key):
return md5(key).digest()
def main():
db = Database('l1.db')
if db.exists():
db.open()
else:
db.create()
x_ind = WithXIndex(db.path, 'username')
db.add_index(x_ind)
db.insert(dict( username='lamar', age='33', frm='new jersey'))
for dt in db.all('username',with_doc=True):
print dt
print db.get('username', 'lamar') # throws an exception
I'm writing a very simple decorator to give me some basic debug information about a function.
from functools import wraps
from time import perf_counter
class debug(object):
def __init__(self, Time=False, Parameters=False, Doc=False):
self.t = Time
self.p = Parameters
self.d = Doc
def __call__(self, func):
#wraps(func)
def run(*args, **kwargs):
params = ""
if self.p:
params = ", ".join(["{}".format(arg) for arg in args] + ["{}={}".format(k, v) for k, v in kwargs.items()])
print("\n\tDebug output for '{}({})'".format(func.__name__, params))
if self.d:
print('\tDocstring: "{}"'.format(func.__doc__))
if self.t:
t1 = perf_counter()
val = func(*args, **kwargs)
if self.t:
t2 = perf_counter()
print("\tTime Taken: {:.3e} seconds".format(t2 - t1))
print("\tReturn Type: '{}'\n".format(type(val).__name__))
return val
return run
This is all well and good for normal functions.
#debug(Parameters=True, Time=True, Doc=True)
def foo(i, j=5):
"""Raises i to 2j"""
for _ in range(j):
i **= 2
return i
i = foo(5, j=3)
# Output:
"""
Debug output for 'foo(5, j=3)'
Docstring: "Raises i to 2j"
Time Taken: 1.067e-05 seconds
Return Type: 'int'
"""
However, generators are a different story.
#debug(Parameters=True, Time=True, Doc=True)
def bar(i, j=2):
"""Infinite iterator of increment j"""
while True:
yield i
i += j
b = bar() # Output occurs here
next(b) # No output
Now, from what I have coded, that is completely expected, but I'm wondering how I can hook the .__next__() method or what the best way of going about this is.
You can simply change your __call__ method and return a generator if a generator is given as an input (add import types at the top of your file):
def __call__(self, f):
if isinstance(f, types.GeneratorType):
def run_gen(*args, **kwargs):
# do pre stuff...
for _ in f(*argw, **kwargs):
yield _
# do post stuff...
return run_gen
else:
def run(*args, **kwargs):
# do pre stuff...
r = f(*argw, **kwargs)
# do post stuff...
return r
return run
You can't replace function.next as it is a read only value. But you can do something like this (see debug_generator function):
from functools import wraps
import inspect
class debug(object):
def __init__(self, Time=False, Parameters=False, Doc=False):
self.t = Time
self.p = Parameters
self.d = Doc
def __call__(self, func):
#wraps(func)
def debug_generator(func):
for i, x in enumerate(list(func)):
# here you add your debug statements
print "What you want: step %s" % i
yield x
#wraps(func)
def run(*args, **kwargs):
params = ""
if self.p:
params = ", ".join(["{}".format(arg) for arg in args] + ["{}={}".format(k, v) for k, v in kwargs.items()])
print("\n\tDebug output for '{}({})'".format(func.__name__, params))
if self.d:
print('\tDocstring: "{}"'.format(func.__doc__))
val = func(*args, **kwargs)
print("\tReturn Type: '{}'\n".format(type(val).__name__))
if inspect.isgenerator(val):
return debug_generator(val)
return val
return run
Basically you just get all the value from the generator you want to debug, and then you yield them again, adding debug statement in the loop.
I am writing my first python package and I want to write unit tests for the following decorator:
class MaxTriesExceededError(Exception):
pass
def tries(max_tries=3, error_message=os.strerror(errno.ETIME)):
def decorator(func):
try_count = 0
def wrapper(*args, **kwargs):
try_count+=1
try:
if try_count <= max_tries:
result = func(*args,**kwargs)
return result
else:
raise MaxTriesExceededError(error_message)
except:
if try_count <= max_tries:
wrapper(*args,**kwargs)
else:
raise Exception
return wraps(func)(wrapper)
return decorator
The purpose of the decorator is to throw an error if the function fails more than max_tries, but to eat the error and try again if the max try count has not been exceeded. To be honest, I'm not sure that the code doesn't have bugs. My question is therefore twofold, is the code correct, and how do I write unit tests for it using unittest?
Here is a corrected version, with unittests:
class MaxTriesExceededError(Exception):
pass
def tries(max_tries=3, error_message="failure"):
def decorator(func):
def wrapper(*args, **kwargs):
for try_count in range(max_tries):
try:
return func(*args,**kwargs)
except:
pass
raise MaxTriesExceededError(error_message)
return wrapper
return decorator
import unittest
class TestDecorator(unittest.TestCase):
def setUp(self):
self.count = 0
def test_success_single_try(self):
#tries(1)
def a():
self.count += 1
return "expected_result"
self.assertEqual(a(), "expected_result")
self.assertEqual(self.count, 1)
def test_success_two_tries(self):
#tries(2)
def a():
self.count += 1
return "expected_result"
self.assertEqual(a(), "expected_result")
self.assertEqual(self.count, 1)
def test_failure_two_tries(self):
#tries(2)
def a():
self.count += 1
raise Exception()
try:
a()
self.fail()
except MaxTriesExceededError:
self.assertEqual(self.count,2)
def test_success_after_third_try(self):
#tries(5)
def a():
self.count += 1
if self.count==3:
return "expected_result"
else:
raise Exception()
self.assertEqual(a(), "expected_result")
self.assertEqual(self.count, 3)
if __name__ == '__main__':
unittest.main()