I'm working with postgresql and I have used MagicMock to test, but I'm not sure that I have understanded the mock's concepts. This is my example code (I have a dbname=test, table=py_test and user = simone):
import psycopg2
import sys
from mock import Mock, patch
import unittest
from mock import MagicMock
from collections import Counter
import doctest
class db(object):
def __init__(self,database, user):
self.con = None
self.database = database
self.user = user
def test_connection(self):
"""Connection DB"""
try:
self.con = psycopg2.connect(database=self.database, user=self.user)
return True
except psycopg2.DatabaseError, e:
print 'Error %s' % e
return False
def test_empty_table(self,table):
"""empty table?"""
try:
cur = self.con.cursor()
cur.execute('SELECT * from ' + table )
ver = cur.fetchone()
return ver
except psycopg2.DatabaseError, e:
print 'Error %s' % e
def test_data_type(self, table, column):
"""data type"""
try:
cur = self.con.cursor()
cur.execute("SELECT data_type from information_schema.columns where table_name = '"+ table + "' and column_name= '"+column+"'")
ver = cur.fetchone()
return ver
except psycopg2.DatabaseError, e:
print 'Error %s' % e
def __del__(self):
if self.con:
self.con.close()
class test_db(unittest.TestCase):
def testing(self):
tdb = db('test','simone')
self.assertTrue(tdb.test_connection(), 1)
self.assertTrue(tdb.test_empty_table('py_test'), 1)
self.assertTrue(tdb.test_data_type('py_test','id'), int)
class test_mock(object):
def __init__(self, db):
self.db = db
def execute(self, nomedb, user, table, field):
self.db(nomedb, user)
self.db.test_connection()
self.db.test_empty_table(table)
self.db.test_data_type(table, field)
if __name__ == "__main__":
c = MagicMock()
d = test_mock(c)
d.execute('test','simone','py_test','id')
method_count = Counter([str(method) for method in c.method_calls])
print c.method_calls
print method_count
print c.mock_calls
Maybe I'll give You some other example of mocking using Mockito package:
import sphinxsearch
import unittest
from mockito import mock, when, unstub, verify
class SearchManagerTest(unittest.TestCase):
def setUp(self):
self.sphinx_client = mock()
when(sphinxsearch).SphinxClient().thenReturn(self.sphinx_client)
def tearDown(self):
unstub()
def test_search_manager(self):
# given
value = {'id': 142564}
expected_result = 'some value returned from SphinxSearch'
# when
search_manager = SearchManager()
result = search_manager.get(value)
# then
verify(self.sphinx_client).SetServer('127.0.0.1', 9312)
verify(self.sphinx_client).SetMatchMode(sphinxsearch.SPH_MATCH_ALL)
verify(self.sphinx_client).SetRankingMode(sphinxsearch.SPH_RANK_WORDCOUNT)
self.assertEqual(result, expected_result)
Main concept is to replace some module (mock) that is tested some where else (it has it's own unittest module) and record some behavior.
Replace module You use with mock:
self.sphinx_client = mock()
and then record on this mock that if You call specific method, this method will return some data - simple values like strings or mocked data if You need to check behavior:
when(sphinxsearch).SphinxClient().thenReturn(self.sphinx_client)
In this case You tell that if You import sphinxsearch module and call SphinxClient() on it, You get mocked object.
Then the main test comes in. You call method or object to test (SearchManager here). It's body is tested with some given values:
self.search_manager = SearchManager()
When section verifies if some actions where made:
verify(self.sphinx_client).SetServer('127.0.0.1', 9312)
verify(self.sphinx_client).SetMatchMode(sphinxsearch.SPH_MATCH_ALL)
verify(self.sphinx_client).SetRankingMode(sphinxsearch.SPH_RANK_WORDCOUNT)
Here - if SetServer was called on self.sphinx_client with parameters '127.0.0.1' and 9312. Two other lines are self explanatory like above.
And here we do normal checks:
self.assertEqual(result, expected_result)
Related
So i have a flask app, that connects to a Postgres12 DB using a user that just has select privileges. Often times i see my apis return 400 error, and api does this when the sql query executed returns None.
I have built a small wrapper class over execute and executemany function for some error handling.
import time
from functools import wraps
import psycopg
from psycopg import InterfaceError, OperationalError
logger = logging.getLogger(__name__)
def retry(fn):
#wraps(fn)
def wrapper(*args, **kw):
cls = args[0]
exec = None
for x in range(cls._reconnectTries):
try:
return fn(*args, **kw)
except (InterfaceError, OperationalError) as e:
logger.warning(f"Database Connection {e} exception type: {type(e)}")
logger.info(f"Idle for {cls._reconnectIdle} seconds")
time.sleep(cls._reconnectIdle)
cls._connect()
exec = e
import sys
logger.exception(f"Exiting the system, {exec} ")
sys.exit(exec)
return wrapper
class Connection:
_reconnectTries = 5
_reconnectIdle = 2
def __init__(self, conn_string):
self._conn_string = conn_string
self.conn = None
self.cursor = None
self._connect()
def _connect(self):
self.conn = psycopg.connect(self._conn_string)
self.conn.autocommit = True
self.cursor = self.conn.cursor()
#retry
def execute(self, **kwargs):
# self.conn.commit()
if "query" in kwargs:
"""
this is done to ensure postgres logs multi line queries sent by client in single line for easier
log collection and debugging.
"""
kwargs["query"] = kwargs["query"].replace("\n", " ")
kwargs["query"] = " ".join(kwargs["query"].split())
return self.cursor.execute(**kwargs)
#retry
def executemany(self, **kwargs):
# self.conn.commit()
return self.cursor.executemany(**kwargs)
For the sake of simplicity, the query code looks somewhat like
store_detail_by_link_query = """
SELECT
json_build_object('id', store.id, 'uuid', store.uuid)
FROM
optimus_store store
WHERE
store.link= %(store_link_or_domain)s and store.is_active = TRUE and store.is_deleted = FALSE;
"""
optimus_connection = Connection(conn_string=CONN_STRING)
params = {
"store_link_or_domain": "dipen28",
}
row = optimus_connection.execute(
query=store_detail_by_link_query,
params=params,
).fetchone()
The problem is, in the same api call if i just do a check that result is None, then rerun the query, at that time the result comes.
I know the data is there in database, this is coming to us in our production system and i am unable to reproduce it at local.
Any help is much appreciated.
I'm trying to figure out how I can simply check that the execute_db returns
a method call with the name cursor.fetchone?
I'm not interested to see if the db works, that will be done in a integration test later on.
I've written a small unittest already, but here I'm only mocking the return value.. I want to find a way to test that the method with the given name is being called as well.
class DataChecker:
def __init__(self):
# Initialize class
self.conn = sqlite3.connect("pos.db")
self.cursor = self.conn.cursor()
def execute_db(self, query, params=None):
# Execute SQL query with parameters and return data
self.cursor.execute(query, [params])
self.conn.commit()
return self.cursor.fetchone()
Test:
def test_execute_db():
mock_datachecker = Mock()
mock_datachecker.cursor.fetchone.return_value = "one"
assert DataChecker.execute_db(mock_datachecker, "SELECT * FROM Customers;", 1) == "one"
You would mock the method fetchone from sqlite3 that is imported in DataChecker module.
db.py
import sqlite3
class DataChecker:
def __init__(self):
# Initialize class
self.conn = sqlite3.connect("pos.db")
self.cursor = self.conn.cursor()
def execute_db(self, query, params=None):
# Execute SQL query with parameters and return data
if params:
self.cursor.execute(query, params)
else:
self.cursor.execute(query)
self.conn.commit()
return self.cursor.fetchone()
Then you could use the db.sqlite3 to mock the connect().cursor().fetchone method.
def test_execute_db():
with patch('db.sqlite3') as mock_db:
mock_db.connect().cursor().fetchone.return_value = "one"
assert DataChecker().execute_db("SELECT * FROM Customers") == "one"
I am working on a program to store my picture meta data and thumbnails into a postgres database using python and psycopg2. In the example I have defined a class MyDbase with methods to create a table, store a value and load a value. Each of these methods needs to connect to the database and a cursor object to execute sql commands. To avoid repetition of code to make the connection and get the cursor I have made a sub class DbDecorators with a decorator connect.
My question: is this a proper way to handle this and specifically using the with statement and passing the cursor to the Dbase method (func) inside the wrapper?
from functools import wraps
import psycopg2
class MyDbase:
''' example using a decorator to connect to a dbase
'''
table_name = 'my_table'
class DbDecorators:
host = 'localhost'
db_user = 'db_tester'
db_user_pw = 'db_tester_pw'
database = 'my_database'
#classmethod
def connect(cls, func):
#wraps(func)
def wrapper(*args, **kwargs):
connect_string = f'host=\'{cls.host}\' dbname=\'{cls.database}\''\
f'user=\'{cls.db_user}\' password=\'{cls.db_user_pw}\''
result = None
try:
with psycopg2.connect(connect_string) as connection:
cursor = connection.cursor()
result = func(*args, cursor, **kwargs)
except psycopg2.Error as error:
print(f'error while connect to PostgreSQL {cls.database}: '
f'{error}')
finally:
if connection:
cursor.close()
connection.close()
print(f'PostgreSQL connection to {cls.database} is closed')
return result
return wrapper
#staticmethod
def get_cursor(cursor):
if cursor:
return cursor
else:
print(f'no connection to database')
raise()
#classmethod
#DbDecorators.connect
def create_table(cls, *args):
cursor = cls.DbDecorators().get_cursor(*args)
sql_string = f'CREATE TABLE {cls.table_name} '\
f'(id SERIAL PRIMARY KEY, name VARCHAR(30));'
print(sql_string)
cursor.execute(sql_string)
#classmethod
#DbDecorators.connect
def store_value(cls, name, *args):
cursor = cls.DbDecorators().get_cursor(*args)
sql_string = f'INSERT INTO {cls.table_name} (name) VALUES (%s);'
print(sql_string)
cursor.execute(sql_string, (name,))
#classmethod
#DbDecorators.connect
def load_value(cls, _id, *args):
cursor = cls.DbDecorators().get_cursor(*args)
sql_string = f'SELECT * FROM {cls.table_name} where id = \'{_id}\';'
print(sql_string)
cursor.execute(sql_string)
db_row = cursor.fetchone()
return db_row
def test():
my_db = MyDbase()
my_db.create_table()
my_db.store_value('John Dean')
db_row = my_db.load_value(1)
print(f'id: {db_row[0]}, name: {db_row[1]}')
if __name__ == '__main__':
test()
probably I did not get your request correctly. Why you need decorator but don't use context manager? Like define db client in any file where from you can import it later and then use it in context manager –
from psycopg2 import SomeDataBase
db = SomeDataBase(credentials)
def create_table(table_name):
with db:
sql_string = f'CREATE TABLE {table_name} '\
f'(id SERIAL PRIMARY KEY, name VARCHAR(30));'
db.cursor.execute(sql_string)
Using a context manager will not close the connection, only the cursor. So using the decorator pattern actually makes more sense here. More info on the context manager: https://www.psycopg.org/docs/usage.html (scroll down to the "with statement" section.)
For my integration test, I custom wrote the base class from unittest.TestCase.
def initialize_sql(engine, dbsession):
dbsession.configure(bind=engine)
Base.metadata.bind = engine
Base.metadata.drop_all(engine) # ensure the database is clean!
Base.metadata.create_all(engine)
try:
populate(dbsession)
except IntegrityError:
transaction.abort()
class HeavyTestBaseCase(unittest.TestCase):
__TEST__ = False
test_ini = 'test.ini'
#classmethod
def setUpClass(cls):
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
settings = appconfig('config:' + os.path.join(TEST_ROOT, cls.test_ini))
cls.engine = engine_from_config(settings, prefix='sqlalchemy.')
print 'Creating the tables on the test database %s' % cls.engine
cls.dbsession = scoped_session(sessionmaker(
extension=ZopeTransactionExtension()))
config = Configurator(settings=settings)
initialize_sql(cls.engine, cls.dbsession)
def tearDown(self):
transaction.abort() # strange name for rollback ...
#classmethod
def tearDownClass(cls):
Base.metadata.drop_all(cls.engine)
Now here is the test case:
from mock import Mock, patch
from aurum.models import User
from aurum.user import register_user
class TestRegisterUserIntegration(HeavyTestBaseCase):
__TEST__ = True
#classmethod
def setUpClass(cls):
cls.uid = 'uid1234'
cls.username = 'user01'
cls.password = 'password01'
cls.masteru = 'masteru'
cls.masterp = 'masterp'
cls.gcs_patcher = patch('aurum.user.GCS', autospec=True)
cls.gcs = cls.gcs_patcher.start()
cls.gcs.return_value.register.return_value = cls.uid
super(TestRegisterUserIntegration, cls).setUpClass()
def test_register_user01_successful_return_useid_and_shared_key(self):
result = register_user(self.username, self.password, self.masteru, self.masterp)
self.assertEqual(result.keys(), ['user_id', 'shared_key'])
self.assertEqual(result['user_id'], self.uid)
def test_register_user01_successful_write_to_database_query_is_not_none(self):
register_user(self.username, self.password, self.masteru, self.masterp)
result = self.dbsession.query(User).filter_by(username=self.username).first()
self.assertTrue(result is not None)
But rollback didn't do anything. One of the tests will fail because of duplicating keys constraint, which means the commit didn't get drop.
In the actual code, before returning I wrote transaction.commit() to commit the changes.
Any idea what's going on? Thanks
Adapt #zzzek's advise, here's the traceback:
http://pastebin.com/K9fin7ZH
The actual code is about this:
def add_user(dbsession, username, password):
with transaction.manager:
user = User(...)
dbsession.add(user)
On this sample code i want to use the variables on the function db_properties at the function connect_and_query. To accomplish that I choose the return. So, using that strategy the code works perfectly. But, in this example the db.properties files only has 4 variables. That said, if the properties file had 20+ variables, should I continue using return? Or is there a most elegant/cleaner/correct way to do that?
import psycopg2
import sys
from ConfigParser import SafeConfigParser
class Main:
def db_properties(self):
cfgFile='c:\test\db.properties'
parser = SafeConfigParser()
parser.read(cfgFile)
dbHost = parser.get('database','db_host')
dbName = parser.get('database','db_name')
dbUser = parser.get('database','db_login')
dbPass = parser.get('database','db_pass')
return dbHost,dbName,dbUser,dbPass
def connect_and_query(self):
try:
con = None
dbHost=self.db_properties()[0]
dbName=self.db_properties()[1]
dbUser=self.db_properties()[2]
dbPass=self.db_properties()[3]
con = None
qry=("select star from galaxy")
con = psycopg2.connect(host=dbHost,database=dbName, user=dbUser,
password=dbPass)
cur = con.cursor()
cur.execute(qry)
data = cur.fetchall()
for result in data:
qryResult = result[0]
print "the test result is : " +qryResult
except psycopg2.DatabaseError, e:
print 'Error %s' % e
sys.exit(1)
finally:
if con:
con.close()
operation=Main()
operation.connect_and_query()
Im using python 2.7
Regards
If there are a lot of variables, or if you want to easily change the variables being read, return a dictionary.
def db_properties(self, *variables):
cfgFile='c:\test\db.properties'
parser = SafeConfigParser()
parser.read(cfgFile)
return {
variable: parser.get('database', variable) for variable in variables
}
def connect_and_query(self):
try:
con = None
config = self.db_properties(
'db_host',
'db_name',
'db_login',
'db_pass',
)
#or you can use:
# variables = ['db_host','db_name','db_login','db_pass','db_whatever','db_whatever2',...]
# config = self.db_properties(*variables)
#now you can use any variable like: config['db_host']
# ---rest of the function here---
Edit: I refactored the code so you can specify the variables you want to load in the calling function itself.
You certainly don't want to call db_properties() 4 times; just call it once and store the result.
It's also almost certainly better to return a dict rather than a tuple, since as it is the caller needs to know what the method returns in order, rather than just having access to the values by their names. As the number of values getting passed around grows, this gets even harder to maintain.
e.g.:
class Main:
def db_properties(self):
cfgFile='c:\test\db.properties'
parser = SafeConfigParser()
parser.read(cfgFile)
configDict= dict()
configDict['dbHost'] = parser.get('database','db_host')
configDict['dbName'] = parser.get('database','db_name')
configDict['dbUser'] = parser.get('database','db_login')
configDict['dbPass'] = parser.get('database','db_pass')
return configDict
def connect_and_query(self):
try:
con = None
conf = self.db_properties()
con = None
qry=("select star from galaxy")
con = psycopg2.connect(host=conf['dbHost'],database=conf['dbName'],
user=conf['dbUser'],
password=conf['dbPass'])
NB: untested
You could change your db_properties to return a dict:
from functools import partial
# call as db_properties('db_host', 'db_name'...)
def db_properties(self, *args):
parser = SafeConfigParser()
parser.read('config file')
getter = partial(parser.get, 'database')
return dict(zip(args, map(getter, args)))
But otherwise it's probably best to keep the parser as an attribute of the instance, and provide a convenience method...
class whatever(object):
def init(self, *args, **kwargs):
# blah blah blah
cfgFile='c:\test\db.properties'
self._parser = SafeConfigParser()
self._parser.read(cfgFile)
#property
def db_config(self, key):
return self._parser.get('database', key)
Then use con = psycopg2.connect(host=self.db_config('db_host')...)
I'd suggest returning a namedtuple:
from collections import namedtuple
# in db_properties()
return namedtuple("dbconfig", "host name user password")(
parser.get('database','db_host'),
parser.get('database','db_name'),
parser.get('database','db_login'),
parser.get('database','db_pass'),
)
Now you have an object that you can access either by index or by attribute.
config = self.db_properties()
print config[0] # db_host
print config.host # same