Mock: What Is the right way to mock sqlalchemy database? - python

This is what I've done for testing database
#patch.object(query_functions, 'connection')
def test_service_reg_return_correct_data(self, mock_sqlalchmey):
data = {
"user_name": "testuser53",
"password": "123456",
"email_address": "testuser53#example.com",
"dob": "2022-06-07 00:00:00",
"address": "Ahmedabad",
}
# json_data = json.dumps(data)
# print("jsob", json_data)
m1 = mock.MagicMock()
m1.get_json.return_value = data
print("m", m1)
with mock.patch("backened.routes.register.request", m1):
response =user_register()
self.assertTrue(mock_sqlalchmey.called)
self.assertEqual(response.json, {'Message': 'New user Created'},201)
I've tested this function
def user_register():
request_data = request.get_json()
insert = service_register(request_data)
print(type(insert))
return insert
inside service_register(), I've run_insert() for execution of queries i.e query_functions.py file.
query_functions.py
def connection():
engine = create_engine('mysql://root:admin#172.17.0.2:3306/flask', poolclass = NullPool)
conn = engine.connect()
return conn
def run_insert(data):
conn = connection()
sql = text(CONST_INSERT)
print(sql)
conn.execute(sql, data)
conn.close()
return ("executed")
I want to know whether it is right or wrong as I was trying to mock my database and don't want to insert data into DB. I'm not sure how database mock happened in this.

Related

add API currency convert to postres table in python

I have an exercise to do:
Through library psycopg2 connect with my database and get top 10 customers along with their credit limit:
import psycopg2
connection = psycopg2.connect(
host='host',
user='user',
password='password',
dbname='db_name',
)
cursor = connection.cursor()
cursor.execute('SELECT customername, creditlimit FROM customers order by 2 desc limit 10')
for row in cursor:
row[0]
row[1]
print(f"{row[0]}: {row[1]} USD")
This is the result of the query above:
As per next step I need to connect with API and convert this creditlimit to different currencies (EUR, JPY, GBP), so the final output will be like that:
Euro+ Shopping Channel: 227600.00 USD / 187183.93EUR / 24017262.40 JPY /160578.63 GBP
...
This is the code for API (the key is valid, feel free to test):
headers = {
"apikey": "b6iGXz0JZcr2iyV0SeLcd0dEpqOr8ngK"
}
currency = ['EUR', 'JPY', 'GBP']
params = {
"from": "USD",
"to": 'EUR',
"amount": row[1]
}
url = "https://api.apilayer.com/exchangerates_data/convert"
response = requests.get(url, headers=headers, params=params)
if response.status_code == 200:
data = response.json()
I connected with API, but I have difficulties to append it to my sql data, please advise.

Query SQL Server JSON columns using SQLAlchemy

I'm looking for a way to replicate the functionality of SQL Server's JSON_VALUE function using a SQLAlchemy query. I'm using metadata.reflect to define my existing db tables in SQLAlchemy.
SQL:
SELECT Id,
JSON_VALUE(BankDataJSON,'$.AccountName')
FROM BankData
SQLAlchemy Model:
db = SQLAlchemy()
db.Model.metadata.reflect(db.engine)
class BankData(db.Model):
__table__ = db.Model.metadata.tables['BankData']
Endpoint / Query:
#cust_accts_bp.route('/api/CustomerAccts')
def get_cust_accts():
custId = request.args.get('custId')
db = SQLAlchemy(app)
BankData = models.bank_data.BankData
BankAccounts = models.bank_accounts.BankAccounts
qry = db.session.query(BankAccounts.Id, BankAccounts.AccountNumber, BankAccounts.BankName,
BankData.AppId, BankData.CustomerId, BankAccounts.Filename, BankData.BankDataJSON) \
.filter(
and_(BankData.Id == BankAccounts.BankDataId, BankData.CustomerId == custId)
)
engine = app.config['SQLALCHEMY_DATABASE_URI']
df = pd.read_sql(qry.statement, engine)
df['BankDataJSON'] = df['BankDataJSON'].apply(json.loads) # convert string representation of JSON
df['BankDataJSON'] = df['BankDataJSON'].map(lambda x:[x[i] for i in x if i=='AccountName'][0])
df = df.rename(columns={'BankDataJSON':'BusinessName'})
response = json.loads(df.to_json(orient="records"))
return(json.dumps(response))
Using this method, I have to manually serialize the JSON object (BankDataJSON) to a Python dict, and parse it to get the value I want ('AccountName'). If I were to use SQL Server's JSON_VALUE function, this is all done for you.
JSON response:
[
{
"Id": 3003,
"AccountNumber": "111111111",
"BankName": "Wells Fargo",
"AppId": 111111,
"CustomerId": "555555",
"Filename": "some filename.pdf",
"BusinessName": "Some BusinessName"
},
{
"Id": 3004,
"AccountNumber": "22222222",
"BankName": "Wells Fargo",
"AppId": 111111,
"CustomerId": "555555",
"Filename": "Some filename",
"BusinessName": "Some Businessname"
},
]
How can I go about doing this? I walso want to be able to replicated SQL Server's CROSS APPLY OPENJSON functionality for working with array of JSON objects in the future. Do I need to define the BankDataJSON column as a JSON type in my model? When I do this, I get an error regarding pyodbcs inability to deserialize JSON in the MSSQL dialect
may be you can try to implement the server's function in your query, something like this
from sqlalchemy.sql import func
db = SQLAlchemy(app)
BankData = models.bank_data.BankData
qry = db.session.query(BankData.Id,
func.JSON_VALUE(BankData.BankDataJSON,'$.AccountName'))

duplicate key error collection: mydatabase.customers index: _id_ dup key

I want to check if the user exists. If user does exist, do not insert it.
This is my code:
#!/usr/bin/env python
# Python code to illustrate
# inserting data in MongoDB
from pymongo import MongoClient
try:
myclient = MongoClient('10.1.3.18',27017)
print("Connected successfully!!!")
except:
print("Could not connect to MongoDB")
# database
emp_rec1 = {
"name":"Mr.Geek1",
"eid":24,
"location":"delhi"
}
emp_rec2 = {
"name":"Mr.Shaurya",
"eid":14,
"location":"delhi"
}
emp_rec3 = {
"name":"Mr.Shaurya111",
"eid":141111,
"location":"delhi111111"
}
a=[emp_rec1,emp_rec2,emp_rec3]
mydb = myclient["mydatabase"]
#result = mydb.profiles.create_index([('user_id'],unique=True)
mycol = mydb["customers"]
#x = mycol.insert_one(a[2])
cursor = mycol.find()
for record in cursor:
print(record)
mydb.servers.getIndexes()
if record['name']!="Mr.Shaurya":
x = mycol.insert_one(a[0])
print(record)
Is this code correct? Or there is an other solution?
and if I run my code twice I get this error:
pymongo.errors.DuplicateKeyError: E11000 duplicate key error collection:
mydatabase.customers index: _id_ dup key: { :ObjectId('5d7b9a6bc9a8569a44a6da2c') }
How to prevent duplicated key in MongoDB?
How can I use index?
Assuming that eid is your Primary Key column that is causing the Duplicate Key error, the code below should fix your problem.
When you loop through cursor, you want to also loop through you array a and see if the eid in a also exists in database cursor. If it does not exist, then record will be inserted using x = mycol.insert_one(a[0])
Code:
#!/usr/bin/env python
# Python code to illustrate
# inserting data in MongoDB
from pymongo import MongoClient
try:
myclient = MongoClient('10.1.3.18',27017)
print("Connected successfully!!!")
except:
print("Could not connect to MongoDB")
# database
emp_rec1 = {
"name":"Mr.Geek1",
"eid":24,
"location":"delhi"
}
emp_rec2 = {
"name":"Mr.Shaurya",
"eid":14,
"location":"delhi"
}
emp_rec3 = {
"name":"Mr.Shaurya111",
"eid":141111,
"location":"delhi111111"
}
a=[emp_rec1,emp_rec2,emp_rec3]
mydb = myclient["mydatabase"]
#result = mydb.profiles.create_index([('user_id'],unique=True)
mycol = mydb["customers"]
#x = mycol.insert_one(a[2])
cursor = mycol.find()
for record in cursor:
print(record)
mydb.servers.getIndexes()
for i in a:
if record['eid'] != i['eid']:
x = mycol.insert_one(a[0])
print(record)
If you want to strictly check only the name of user in customers table, you can use the following query.
Code:
#!/usr/bin/env python
# Python code to illustrate
# inserting data in MongoDB
from pymongo import MongoClient
try:
myclient = MongoClient('10.1.3.18',27017)
print("Connected successfully!!!")
except:
print("Could not connect to MongoDB")
# database
emp_rec1 = {
"name":"Mr.Geek1",
"eid":24,
"location":"delhi"
}
emp_rec2 = {
"name":"Mr.Shaurya",
"eid":14,
"location":"delhi"
}
emp_rec3 = {
"name":"Mr.Shaurya111",
"eid":141111,
"location":"delhi111111"
}
a=[emp_rec1,emp_rec2,emp_rec3]
mydb = myclient["mydatabase"]
#result = mydb.profiles.create_index([('user_id'],unique=True)
mycol = mydb["customers"]
#x = mycol.insert_one(a[2])
cursor = mycol.find()
for record in cursor:
print(record)
mydb.servers.getIndexes()
for i in a:
if record['name'] != i['name']:
x = mycol.insert_one(a[0])
print(record)

What are the possible ways for JSON data processing using SQL, elastic search or preprocessing using python

I have a case study where i need to take data from a REST API do some analysis on the data using aggregate function,joins etc and use the response data in JSON format to plot some retail grahs.
Approaches being followed till now:
Read the data from JSON store these in python variable and use insert to hit the SQL query. Obviously it is a costly operation because for every JSON line read it is inserting into database.For 33k rows it is taking more than 20 mins which is inefficient.
This can be handled in elastic search for faster processing but complex operation like joins are not present in elastic search.
If anybody can suggest what would be the best approach (like preprocessing or post processing in python) to follow for handling such scenerios it would be helpful.
Thanks in advance
Sql Sript
def store_data(AccountNo)
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = "INSERT INTO cstore (AccountNo) VALUES (%s)"
cursor.execute(insert_query, (AccountNo))
db.commit()
cursor.close()
db.close()
return
def on_data(file_path):
#This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
# Decode the JSON from Twitter
testFile = open(file_path)
datajson = json.load(testFile)
#print (len(datajson))
#grab the wanted data from the Tweet
for i in range(len(datajson)):
for cosponsor in datajson[i]:
AccountNo=cosponsor['AccountNo']
store_data( AccountNo)
Edit1: Json Added
{
"StartDate": "1/1/18",
"EndDate": "3/30/18",
"Transactions": [
{
"CSPAccountNo": "41469300",
"ZIP": "60098",
"ReportDate": "2018-03-08T00:00:00",
"POSCode": "00980030003",
"POSCodeModifier": "0",
"Description": "TIC TAC GUM WATERMEL",
"ActualSalesPrice": 1.59,
"TotalCount": 1,
"Totalsales": 1.59,
"DiscountAmount": 0,
"DiscountCount": 0,
"PromotionAmount": 0,
"PromotionCount": 0,
"RefundAmount": 0,
"RefundCount": 0
},
{
"CSPAccountNo": "41469378",
"ZIP": "60098",
"ReportDate": "2018-03-08T00:00:00",
"POSCode": "01070080727",
"POSCodeModifier": "0",
"Description": "PAYDAY KS",
"ActualSalesPrice": 2.09,
"TotalCount": 1,
"Totalsales": 2.09,
"DiscountAmount": 0,
"DiscountCount": 0,
"PromotionAmount": 0,
"PromotionCount": 0,
"RefundAmount": 0,
"RefundCount": 0
}
]
}
I do not have your json file so not know if it is runnable, but I would have tried something like below: I read just your account infos to a list and than try to write to the db at once with executemany I expect it to have a better(less) execution time than 20 mins.
def store_data(AccountNo):
db = MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = "INSERT INTO cstore (AccountNo,ZIP,ReportDate) VALUES (:AccountNo,:ZIP,:ReportDate)"
cursor.executemany(insert_query, AccountNo)
db.commit()
cursor.close()
db.close()
return
def on_data(file_path):
# This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
#declare an empty list for the all accountno's
accountno_list = list()
# Decode the JSON from Twitter
testFile = open(file_path)
datajson = json.load(testFile)
# print (len(datajson))
# grab the wanted data from the Tweet
for row in datajson[0]['Transactions']:
values = dict()
values['AccountNo'] = row['CSPAccountNo']
values['ZIP'] = row['ZIP']
values['ReportDate'] = row['ReportDate']
#from here on you can populate the attributes you need in a similar way..
accountno_list.append(values)
except:
pass
store_data(accountno_list)

building json data from sql database cursor

Without knowing the structure of the json, how can I return a json object from the database query? All of the the information is there, I just can't figure out how to build the object.
import MySQLdb
import json
db = MySQLdb.connect( host, user, password, db)
cursor = db.cursor()
cursor.execute( query )
rows = cursor.fetchall()
field_names = [i[0] for i in cursor.description]
json_string = json.dumps( dict(rows) )
print field_names[0]
print field_names[1]
print json_string
db.close()
count
severity
{"321": "7.2", "1": "5.0", "5": "4.3", "7": "6.8", "1447": "9.3", "176": "10.0"}
The json object would look like:
{"data":[{"count":"321","severity":"7.2"},{"count":"1","severity":"5.0"},{"count":"5","severity":"4.3"},{"count":"7","severity":"6.8"},{"count":"1447","severity":"9.3"},{"count":"176","severity":"10.0"}]}
The problem you are encountering happens because you only turn the fetched items into dicts, without their description.
dict in python expects either another dict, or an iterable returning two-item tuples, where for each tuple the first item will be the key, and the second the value.
Since you only fetch two columns, you get the first one (count) as key, and the second (severity) as value for each fetched row.
What you want to do is also combine the descriptions, like so:
json_string = json.dumps([
{description: value for description, value in zip(field_names, row)}
for row in rows])
1- You can use pymsql DictCursor:
import pymysql
connection = pymysql.connect(db="test")
cursor = connection.cursor(pymysql.cursors.DictCursor)
cursor.execute("SELECT ...")
row = cursor.fetchone()
print row["key"]
2- MySQLdb also includes DictCursor that you can use. You need to pass cursorclass=MySQLdb.cursors.DictCursor when making the connection.
import MySQLdb
import MySQLdb.cursors
connection = MySQLdb.connect(db="test",cursorclass=MySQLdb.cursors.DictCursor)
cursor = connection.cursor()
cursor.execute("SELECT ...")
row = cursor.fetchone()
print row["key"]
I got this to work using Collections library, although the code is confusing:
import MySQLdb
import json
import collections
db = MySQLdb.connect(host, user, passwd, db)
cursor = db.cursor()
cursor.execute( query )
rows = cursor.fetchall()
field_names = [i[0] for i in cursor.description]
objects_list = []
for row in rows:
d = collections.OrderedDict()
d[ field_names[0] ] = row[0]
d[ field_names[1] ] = row[1]
objects_list.append(d)
json_string = json.dumps( objects_list )
print json_string
db.close()
[{"count": 176, "severity": "10.0"}, {"count": 1447, "severity": "9.3"}, {"count": 321, "severity": "7.2"}, {"count": 7, "severity": "6.8"}, {"count": 1, "severity": "5.8"}, {"count": 1, "severity": "5.0"}, {"count": 5, "severity": "4.3"}]

Categories

Resources