Related
I have a system whereby it allows to import the student attendance file into the system. However, it did not import it successfully. I tried to debug and i found out that when it goes to the importer.py function, it does not go to the "def _do_save(self, row)", hence that is the problem why it was not saved into database.
importer.py
class AttendanceImporter(CsvImporter):
field_names=["username", "mark"]
#it does not go to the method.
def _handle_row(self,row):
print("this")
if (not self._is_row_valid(row)):
return self._FAILED
username=row["username"]
if (self._is_username_exist(username)):
if (self._is_mark_exist(username)):
if (self._do_update(row)):
return self._UPDATED
else:
return self._FAILED
else:
if (self._do_save(row)):
return self._CREATED
else:
return self._FAILED
else:
return self._FAILED
def _is_row_valid(self, row):
for item in self.field_names:
if (len(row[item])==0):
return False
return True
def _is_username_exist(self, username):
return len(User.objects.filter(username=username))>0
print(username)
def _is_mark_exist(self, username):
user = User.objects.get(username=username)
return len(Attendance.objects.filter(user=user))>0
print(username)
def _do_save(self, row):
# create attendace mark
try:
attendance=Attendance()
user=User.objects.get(username=row["username"])
attendance=Attendance.objects.create(user=user, mark=row["mark"])
print("save?")
attendance.save()
except:
return False
return True
def _do_update(self, row):
# update attendance mark
try:
user=User.objects.get(username=row["username"])
attendance=Attendance.objects.get(user=user)
attendance.mark = row["mark"]
attendance.save()
except Exception as e:
print(e)
return False
return True
Views.py
#transaction.atomic
#csrf_exempt
def data_import(request, file_type):
# TODO: currently only support importing users, later can support importing groups
fields_required = None
if(file_type == "user"):
fields_required = "username, password, email, matric_number, fullname, groups"
elif (file_type == "attendance"):
fields_required = "username, mark"
if request.FILES:
successful = False
try:
im = None
if(file_type == "user"):
upload_file = request.FILES['user_file']
file_path = save_uploaded_file(request.FILES['file'], filename=generate_unique_file_name(extension="csv"),
filedir=USER_DATA_UPLOAD_PATH)
im = StudentImporter(source=open(file_path))
elif (file_type == "attendance"):
upload_file = request.FILES['attendance_file']
file_path = save_uploaded_file(upload_file, filename=generate_unique_file_name(extension="csv"),
filedir=USER_DATA_UPLOAD_PATH)
im = AttendanceImporter(source=open(file_path))
successful, result = im.import_to_database()
except Exception:
pass
if successful:
messages.info(request, "The import is successful!\n" + result)
else:
messages.warning(request, "The import is NOT successful, no data is imported!")
return HttpResponseRedirect(reverse("student_user_profile_list"))
return render(request,
"app-appglobal/import-data.html",
{'type': file_type, 'fields_required': fields_required})
Model.py:
class Attendance(models.Model):
user=models.OneToOneField(User, on_delete=models.CASCADE)
mark=models.IntegerField(default=0)
class Meta:
ordering=['user']
def get_mark(self):
return self.mark
Urls
url(r'^student/attendance/$' ,views.data_import,{'file_type':'attendance'},name='attendance_import'),
CSVIMPORTER
class CsvImporter(object, metaclass=ABCMeta):
def __init__(self, source):
"""
:param source: a file object
:return:
"""
self.source=source
def import_to_database(self):
"""
:return: (successful:boolean,result:string)
"""
pass
example:
Intro: I have a small piece of code that takes any image that is being added and makes it smaller and saves it. I am using a external library called Filepond for this.
The Issue: If 2 users add same names to their images(different images). The second users image replaces the 1st users image and both users see the same image.
What I want: Add unique image names. My tries are below the present code. I need the best solution for this so the names are not too big but are unique
Present Code:
fields.py:
class FilePondField(forms.FileField):
widget = forms.TextInput(attrs={'class': 'fileid'})
def __init__(self, name, *args, **kwargs):
super(FilePondField, self).__init__(*args, **kwargs)
self.name = name
def prepare_value(self, data):
if not data:
return None
if isinstance(data, str):
try:
tu = TU.objects.get(upload_id=data)
except TU.DoesNotExist:
return None
return tu.upload_id
name = data.name
base = os.path.basename(name)
file_id = "%s_%s" % (self.name, data.instance.pk)
try:
tu = TU.objects.get(file_id=file_id)
except TU.DoesNotExist:
upload_id = uuid()
tu = TU(upload_id=upload_id, file_id=file_id, # uuid(),
upload_name=base, upload_type=TU.FILE_DATA)
try:
with data.storage.open(name, 'rb') as f:
rd_data = File(f)
tu.file.save(tu.file_id, rd_data, True)
tu.save()
except:
pass
return tu.upload_id
def clean(self, data, initial=None):
self.initial = initial
if not data:
if self.required:
raise ValidationError(self.error_messages['required'], code='required')
return None
return data
def save_cb(self, instance, modfld, tu):
prename = os.path.join(modfld.upload_to, tu.upload_name)
ffile = ImageFieldFile(instance, modfld, prename)
try:
with open(tu.get_file_path(), 'rb') as f:
data = File(f)
ffile.save(tu.upload_name, data, False)
except:
pass
return ffile
def do_tmp(self, instance, modfld, value, cb):
try:
tu = TU.objects.get(upload_id=value)
ffile = cb(instance, modfld, tu) if cb else None
except TU.DoesNotExist:
ffile = None
else:
tu.delete()
file_id = "%s_%s" % (self.name, instance.pk)
try:
ogtu = TU.objects.get(file_id=file_id)
except TU.DoesNotExist:
pass
else:
ogtu.delete()
return ffile
def save(self, instance, modfld, value):
return self.do_tmp(instance, modfld, value, self.save_cb)
def del_tmp(self, instance, modfld, value):
self.do_tmp(instance, modfld, value, None)
def bound_data(self, data, initial):
return data
def has_changed(self, initial, data):
if not initial:
return data
return initial != data
forms.py
class ImageForm(forms.ModelForm):
img_fields = []
def __init__(self, *args, **kwargs):
super(ImageForm, self).__init__(*args, **kwargs)
for (fld, fargs) in self.img_fields:
self.fields[fld] = FilePondField(fld, **fargs)
def save(self, *args, **kwargs):
commit = kwargs.get('commit', True)
for (fld_nm, fargs) in self.img_fields:
fld = dict([(f.name, f) for f in self._meta.model._meta.fields])[fld_nm]
if isinstance(self.fields[fld_nm], FilePondField):
self.fields[fld_nm] = self.fields[fld_nm].save(self.instance, fld, self.cleaned_data[fld_nm])
return super(ImageForm, self).save(*args, **kwargs)
def del_tmp (self):
for (fld_nm, fargs) in self.img_fields:
fld = dict([(f.name, f) for f in self._meta.model._meta.fields])[fld_nm]
if isinstance(self.fields[fld_nm], FilePondField):
self.fields[fld_nm].del_tmp(self.instance, fld, self.cleaned_data[fld_nm])
My Approach:
in fields.py I import
In the function def prepare_value(self, data): and def do_tmp(self, instance, modfld, value, cb): I make the below changes
...
file_id = "%s_%s_%s" % (self.name, data.instance.pk, datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S_%f"))
...
Can someone comment on this or suggest a better alternative
Just use datetime.now() value for the file name such as below:
from datetime import datetime
open(str(datetime.now()) + ".txt", "w+")
Result: It creates a file named 2019-04-22 00:21:31.862001.txt
make the name a variable like this
name = "Your-General-Name-{}".format((int(time.time())))
and then put it in your loop so that the time.time() value changes each time. You obviously dont have to use time.time(). you could use datetime.datetime.now() etc. but then you'd just replace the time function.
I keep running into this error that I never had before every time I try running python run_p2pool.py, I even tried doing -n and typing in the argument and that doesn't fix it (as the internet suggested). I also have the configurations already there and even tried reinstalling python twisted with no avail. There is no error in the code I am trying to run and I am yet to figure out how to get this fixed.
# Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!
> Pausing for 3 seconds...
> 2017-11-02 01:07:47.958817 > Traceback (most recent call last):
> 2017-11-02 01:07:47.958986 > File "run_p2pool.py", line 5, in <module>
> 2017-11-02 01:07:47.959116 > main.run()
> 2017-11-02 01:07:47.959191 > File "/var/www/html/DashPool/DPI-MASTER/Dash-Pool-Integration/p2pool/main.py", line 687, in run
> 2017-11-02 01:07:47.959422 > deferral.RobustLoopingCall(logfile.reopen).start(5)
> 2017-11-02 01:07:47.959490 > File "/var/www/html/DashPool/DPI-MASTER/Dash-Pool-Integration/p2pool/util/deferral.py", line 277, in start
> 2017-11-02 01:07:47.959605 > self._df = self._worker(period).addErrback(lambda fail: fail.trap(defer.CancelledError))
> 2017-11-02 01:07:47.959686 > File "/var/www/html/DashPool/DPI-MASTER/Dash-Pool-Integration/p2pool/util/deferral.py", line 225, in _
> 2017-11-02 01:07:47.960104 > df = defer.Deferred(cancelled)
> 2017-11-02 01:07:47.960195 > TypeError: __init__() takes exactly 1 argument (2 given)
I never had this problem before and was able to run the same program before, but just encase, here is the defferal.py file
from __future__ import division
import itertools
import random
import sys
from twisted.internet import defer, reactor
from twisted.python import failure, log
def sleep(t):
d = defer.Deferred(canceller=lambda d_: dc.cancel())
dc = reactor.callLater(t, d.callback, None)
return d
def run_repeatedly(f, *args, **kwargs):
current_dc = [None]
def step():
delay = f(*args, **kwargs)
current_dc[0] = reactor.callLater(delay, step)
step()
def stop():
current_dc[0].cancel()
return stop
class RetrySilentlyException(Exception):
pass
def retry(message='Error:', delay=3, max_retries=None, traceback=True):
'''
#retry('Error getting block:', 1)
#defer.inlineCallbacks
def get_block(hash):
...
'''
def retry2(func):
#defer.inlineCallbacks
def f(*args, **kwargs):
for i in itertools.count():
try:
result = yield func(*args, **kwargs)
except Exception as e:
if i == max_retries:
raise
if not isinstance(e, RetrySilentlyException):
if traceback:
log.err(None, message)
else:
print >>sys.stderr, message, e
yield sleep(delay)
else:
defer.returnValue(result)
return f
return retry2
class ReplyMatcher(object):
'''
Converts request/got response interface to deferred interface
'''
def __init__(self, func, timeout=5):
self.func = func
self.timeout = timeout
self.map = {}
def __call__(self, id):
if id not in self.map:
self.func(id)
df = defer.Deferred()
def timeout():
self.map[id].remove((df, timer))
if not self.map[id]:
del self.map[id]
df.errback(failure.Failure(defer.TimeoutError('in ReplyMatcher')))
timer = reactor.callLater(self.timeout, timeout)
self.map.setdefault(id, set()).add((df, timer))
return df
def got_response(self, id, resp):
if id not in self.map:
return
for df, timer in self.map.pop(id):
df.callback(resp)
timer.cancel()
class GenericDeferrer(object):
'''
Converts query with identifier/got response interface to deferred interface
'''
def __init__(self, max_id, func, timeout=5, on_timeout=lambda: None):
self.max_id = max_id
self.func = func
self.timeout = timeout
self.on_timeout = on_timeout
self.map = {}
def __call__(self, *args, **kwargs):
while True:
id = random.randrange(self.max_id)
if id not in self.map:
break
def cancel(df):
df, timer = self.map.pop(id)
timer.cancel()
try:
df = defer.Deferred(cancel)
except TypeError:
df = defer.Deferred() # handle older versions of Twisted
def timeout():
self.map.pop(id)
df.errback(failure.Failure(defer.TimeoutError('in GenericDeferrer')))
self.on_timeout()
timer = reactor.callLater(self.timeout, timeout)
self.map[id] = df, timer
self.func(id, *args, **kwargs)
return df
def got_response(self, id, resp):
if id not in self.map:
return
df, timer = self.map.pop(id)
timer.cancel()
df.callback(resp)
def respond_all(self, resp):
while self.map:
id, (df, timer) = self.map.popitem()
timer.cancel()
df.errback(resp)
class NotNowError(Exception):
pass
class DeferredCacher(object):
'''
like memoize, but for functions that return Deferreds
#DeferredCacher
def f(x):
...
return df
#DeferredCacher.with_backing(bsddb.hashopen(...))
def f(x):
...
return df
'''
#classmethod
def with_backing(cls, backing):
return lambda func: cls(func, backing)
def __init__(self, func, backing=None):
if backing is None:
backing = {}
self.func = func
self.backing = backing
self.waiting = {}
#defer.inlineCallbacks
def __call__(self, key):
if key in self.waiting:
yield self.waiting[key]
if key in self.backing:
defer.returnValue(self.backing[key])
else:
self.waiting[key] = defer.Deferred()
try:
value = yield self.func(key)
finally:
self.waiting.pop(key).callback(None)
self.backing[key] = value
defer.returnValue(value)
_nothing = object()
def call_now(self, key, default=_nothing):
if key in self.backing:
return self.backing[key]
if key not in self.waiting:
self.waiting[key] = defer.Deferred()
def cb(value):
self.backing[key] = value
self.waiting.pop(key).callback(None)
def eb(fail):
self.waiting.pop(key).callback(None)
if fail.check(RetrySilentlyException):
return
print
print 'Error when requesting noncached value:'
fail.printTraceback()
print
self.func(key).addCallback(cb).addErrback(eb)
if default is not self._nothing:
return default
raise NotNowError(key)
def deferred_has_been_called(df):
still_running = True
res2 = []
def cb(res):
if still_running:
res2[:] = [res]
else:
return res
df.addBoth(cb)
still_running = False
if res2:
return True, res2[0]
return False, None
def inlineCallbacks(f):
from functools import wraps
#wraps(f)
def _(*args, **kwargs):
gen = f(*args, **kwargs)
stop_running = [False]
def cancelled(df_):
assert df_ is df
stop_running[0] = True
if currently_waiting_on:
currently_waiting_on[0].cancel()
df = defer.Deferred(cancelled)
currently_waiting_on = []
def it(cur):
while True:
try:
if isinstance(cur, failure.Failure):
res = cur.throwExceptionIntoGenerator(gen) # external code is run here
else:
res = gen.send(cur) # external code is run here
if stop_running[0]:
return
except StopIteration:
df.callback(None)
except defer._DefGen_Return as e:
# XXX should make sure direct child threw
df.callback(e.value)
except:
df.errback()
else:
if isinstance(res, defer.Deferred):
called, res2 = deferred_has_been_called(res)
if called:
cur = res2
continue
else:
currently_waiting_on[:] = [res]
def gotResult(res2):
assert currently_waiting_on[0] is res
currently_waiting_on[:] = []
if stop_running[0]:
return
it(res2)
res.addBoth(gotResult) # external code is run between this and gotResult
else:
cur = res
continue
break
it(None)
return df
return _
class RobustLoopingCall(object):
def __init__(self, func, *args, **kwargs):
self.func, self.args, self.kwargs = func, args, kwargs
self.running = False
def start(self, period):
assert not self.running
self.running = True
self._df = self._worker(period).addErrback(lambda fail: fail.trap(defer.CancelledError))
#inlineCallbacks
def _worker(self, period):
assert self.running
while self.running:
try:
self.func(*self.args, **self.kwargs)
except:
log.err()
yield sleep(period)
def stop(self):
assert self.running
self.running = False
self._df.cancel()
return self._df
I have simply 'to do' app in python 2.7 and I wrote for this some unit test. This is my first time with python unit test and I just wanna to know idea of unit test.
Someone can tell me whether I'm going in the right direction?
How to improve this tests?
How to check message in IndexError is correct? for this ("IndexError('Note doesn\'t exist')" or IndexError('Returned more then one entry') )
App:
# coding: utf-8
from __future__ import unicode_literals
from shutil import copyfile
import json
import os
DATABASE = 'notes_data/notes.json'
BOARDS = ['to do', 'in progress', 'done']
class NotesManagerMixin(object):
def count(self):
return len(self.notes)
def filter(self, *args, **kwargs):
result = self.notes
for key, value in kwargs.iteritems():
result = [
note for note in result
if getattr(note, key, None) == value or
note.message.startswith(str(value)) or
note.message.endswith(str(value))
]
return NotesQueryset(result)
def get(self, *args, **kwargs):
notes = self.filter(*args,**kwargs)
if notes.count() == 0:
raise IndexError('Note doesn\'t exist')
elif notes.count() == 1:
return notes[0]
else:
raise IndexError('Returned more then one entry')
def first(self):
return self.notes[0]
def last(self):
return self.notes[-1]
class NotesQueryset(NotesManagerMixin):
def __init__(self, notes):
self.notes = [note for note in notes]
def update(self, *args, **kwargs):
for note in self.notes:
for key, value in kwargs.items():
setattr(note, key, value)
note.save()
return self
def delete(self):
for note in self.notes:
note.delete()
return self
def __getitem__(self, idx):
return self.notes[idx]
def __str__(self):
return str(self.notes)
def __repr__(self):
return self.__str__()
class NotesManager(NotesManagerMixin):
def __init__(self):
self.notes = []
def __iter__(self):
return self.next()
def __generate_id(self):
"""
Funkcja pomocnicza do pobrania pewnej wolnej wartości indexu.
"""
try:
return max(note.id for note in self.notes) + 1
except ValueError:
return 1
def all(self):
return NotesQueryset(self.notes)
def add(self, idx, board, message):
self.notes.append(Note(idx=idx, board=board, message=message))
def create(self, board, message):
note = Note(
idx=self.__generate_id(),
board=board,
message=message
)
note.clean()
self.notes.append(note)
note.save()
return note
def next(self):
for note in self.notes:
yield note
def to_dict(self):
return [note.to_dict() for note in self.notes]
class Note(object):
objects = NotesManager()
def __init__(self, idx, board, message):
self.id = idx
self.board = board
self.message = message
def __str__(self):
return 'ID: {}, Board: {}, Message: {}'.format(
self.id,
self.board,
self.message
)
def __repr__(self):
return self.__str__()
def clean(self):
if not self.message:
raise ValueError('Message is required')
if self.board not in BOARDS:
raise ValueError('Board "{}" doesn\'t exists'.format(self.board))
if type(self.id) != int:
raise ValueError('Note id "{}" is invalid'.format(self.id))
def save(self):
for key, note in enumerate(self.objects):
if note.id == self.id:
self.objects.notes[key] = self
break
with open(DATABASE, 'w') as database_file:
json.dump(self.objects.to_dict(), database_file, indent=4)
return True
def delete(self):
for key, note in enumerate(self.objects.notes):
if note.id == self.id:
self.objects.notes.pop(key)
with open(DATABASE, 'w') as database_file:
json.dump(self.objects.to_dict(), database_file, indent=4)
def to_dict(self):
return {
'id': self.id,
'message': self.message,
'board': self.board
}
def load_initial_data():
with open(DATABASE, 'r') as database_file:
json_data = json.load(database_file, encoding='utf-8')
for item in json_data:
Note.objects.add(
idx=item['id'],
board=item['board'],
message=item['message'],
)
load_initial_data()
unit tests:
import unittest
from notes_manager_v2 import NotesQueryset, Note, load_initial_data, NotesManagerMixin
class TestNotesQueryset(unittest.TestCase):
def test_filter(self):
actual = Note.objects.all().filter(board='in progress') # get all notes with board 'in progress'
expected = []
for note in Note.objects.all():
if note.board == 'in progress':
expected.append(note)
self.assertItemsEqual(actual, expected)
def test_get(self):
actual = [Note.objects.all().get(id=1)] # get note with method get
expected = []
for note in Note.objects.all(): # search note with index 1
if note.id == 1:
expected.append(note)
self.assertEqual(actual, expected)
def test_get_fail_1(self):
self.assertRaises(IndexError, lambda:Note.objects.all().get(id=9868976)) # thos note dont exist should raise IndexError
def test_update(self):
from_board = 'to do'
to_board = 'done'
before_change = Note.objects.filter(board=from_board) # use filter method to get all notes with board 'to do'
actual = Note.objects.filter(board=from_board).update(board=to_board) # update all board
self.assertNotEqual(before_change, actual) # check for difference
notes = Note.objects.all()
for note in actual:
self.assertIn(note, notes) # check notes are updated
def test_delete(self):
to_delete = Note.objects.filter(id=2).delete() # find note with filter method and delete it
notes = Note.objects.all()
self.assertNotIn(to_delete, notes)
def test_create(self):
new_note = Note.objects.create(message='lorem ipsum', board='in progress') # create new note
notes = Note.objects.all()
self.assertIn(new_note, notes) #
if __name__ == '__main__':
unittest.main()
Have you looked at the documentation? See doctest. They are an easy way to integrate unit tests into python code. Another option is the unittest framework.
guys, I'm having this random error on my Project when trying to register users.
This is the information that is returned from the browser.
{Gender: "M", ReEmail: "heyguys#whatup.com", Birthday: "1/19/1980", Role: "Athlete",…}
Birthday: "1/19/1980"
Country: "United States"
Email: "heyguys#whatup.com"
Gender: "M"
Password:"doesntwork123"
ReEmail:"heyguys#whatup.com"
RePassword:"doesntwork123"
Role:"Athlete"
kwargs:{}
method:"POST"
results:{error: "tuple indices must be integers, not str"}
error: "tuple indices must be integers, not str"
By using PyCharm debugger, I have centralized the problem to this specific part
def addUser(self, session, *args, **kwargs):
try:
#existingPerson = session.query(person_models.Player).filter(person_models.Player.Email==args['Email']).first()
print person_models.Player
print person_models.Player.Email
existingPerson = session.query(person_models.Player).filter(person_models.Player.Email == args['Email']).first()
print existingPerson
if existingPerson is not None:
return {'error':"User already exists"}
person_model = getattr(person_models)
person = person_model(*args)
session.add(person)
session.commit()
return {"response": "User added"}
except Exception as e:
return {'error':str(e)}
Once it gets to existing person, it sends me directly to the exception. This is when it gives me "tuple indices must be integers, not str"
Here is some additional code.
Here is some code:
webapi.py
import cherrypy
from api.restBase import jsonDbRest
from person.controllers import PersonController
class PersonAPI:
exposed = True
#jsonDbRest
def GET(self, email=False, **kwargs):
session = cherrypy.request.db
pc = PersonController()
if email in kwargs:
profile = pc.get(session, **kwargs)
return {"results":[profile], "count":'1'}
profile = pc.search(session, **kwargs)
return {"results":profile.to_dict(), "count":'1'}
#jsonDbRest
def POST(self, *args, **kwargs):
session = cherrypy.request.db
pc = PersonController()
response = pc.addUser(session, *args, **kwargs)
return {"results": response}
It's complaining about line 15.
Here is another line that is using the code:
restbase.py
import cherrypy
import json
from engine import create_session
def jsonDbRest(func):
def wrapper(self, *args, **kwargs):
cherrypy.request.db = create_session()
if 'Content-Length' in cherrypy.request.headers:
length = cherrypy.request.headers['Content-Length']
bodytext = cherrypy.request.body.fp.read(int(length))
else:
bodytext = ""
if bodytext != "":
jsonData = json.loads(bodytext)
else:
jsonData = None
cherrypy.request.json = jsonData
func_data = func(self, *args, **kwargs)
data = {"method":func.__name__, "input":jsonData,"kwargs":kwargs}
if func_data is not None:
data.update(func_data)
cherrypy.response.headers['Content-Type'] = 'application/json'
if "error" in data:
cherrypy.response.status = "400"
else:
cherrypy.response.status = "200"
cherrypy.request.db.close()
return json.dumps(data, indent=4)
return wrapper
Another code
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.declarative import AbstractConcreteBase
import datetime
import decimal
import netaddr
import sqlalchemy
from time import strftime
class Base(object):
def to_dict(self):
dict = {}
dict = self.__dict__
remove = '_sa_instance_state'
dict = {key: value for key, value in dict.items() if key is not remove}
dict['DateOfBirth'] = dict['DateOfBirth'].strftime('%m/%d/%Yi')
return dict
def from_dict(self, dict):
for col in dict:
setattr(self, col, dict[col])
def to_array(self, columns=[]):
if len(columns) < 1:
columns = self.__mapper__.columns.keys()
data = []
for col in columns:
data.append(getattr(self, col))
return data
def get_columns(self):
return self.__mapper__.columns.keys()
JsonBase = declarative_base(cls=Base)
I think that your problem is on the first sample:
existingPerson = session.query(person_models.Player).filter(person_models.Player.Email == args['Email']).first()
Try with kwargs instead of args:
existingPerson = session.query(person_models.Player).filter(person_models.Player.Email == kwargs['Email']).first()