Return Missing Rows from Python SQL Query - python

Is there anyway i can compare two different databases (postgresl, sql server) and return the missing rows? I am missing one row in the postgresql table that is not in the sql server one and have no clue how to return that answer to me.
I have two connections opened for postgresql (bpo_table_results) and for sql server(rps_table_results)
postgresql table:
date count amount
1/1/21 500 1,234,654.12
sql server table:
date count amount
1/1/21 500 1,234,654.12
1/2/21 4541 3,457,787.24
expected results:
The row in the amount of 3,457,787.24 is missing from your posgresql table.
code:
def queryRPS(sql_server_conn, sql_server_cursor):
rps_item_count_l = []
rps_icl_amt_l = []
rps_table_q_2 = f"""select * from rps..sendfile where processingdate = '{cd}' and datasetname like '%ICL%' """
rps_table_results = sql_server_cursor.execute(rps_table_q_2).fetchall()
for row in rps_table_results:
rps_item_count = row[16]
rps_item_count_l.append(rps_item_count)
rps_icl_amt = row[18]
rps_icl_amt_l.append(rps_icl_amt)
def queryBPO(postgres_conn, postgres_cursor,rps_item_count_l, rps_icl_amt_l):
bpo_results_l = []
rps_results_l = []
for rps_count, rps_amount in zip(rps_item_count_l, rps_icl_amt_l):
rps_amount_f = str(rps_amount).rstrip('0')
rps_amount_f = ("{:,}".format(float(rps_amount_f)))
bpo_icl_awk_q_2 = """select * from ppc_data.icl_awk where num_items = '%s' and
file_total = '%s' """ % (str(rps_count), str(rps_amount_f))
postgres_cursor.execute(bpo_icl_awk_q_2)
bpo_table_results = postgres_cursor.fetchall()
rps_table_q_2 = f"""select * from rps..sendfile where processingdate = '{cd}' and datasetname like '%ICL%' """
rps_table_results = sql_server_cursor.execute(rps_table_q_2).fetchall()
rps_item_count_l, rps_icl_amt_l = queryRPS(sql_server_conn, sql_server_cursor)
queryBPO(postgres_conn, postgres_cursor, rps_item_count_l, rps_icl_amt_l)

Related

Get all data API when inputs are empty

I created my first API where I can get data from my tables in Bigquery.
I can get all the data I need based on the 2 inputs below, but I am also trying to get the whole table when the inputs are empty, which I cannot do.
Thanks for your help
#app.route("/tracking", methods=['GET'])
def tracking_data():
haulier_id_tracking = request.args.get('haulier_id_tracking')
month_tracking = request.args.get('month_tracking')
query_job = bq_client.query("""
WITH t AS (
SELECT *
FROM mart.monthly_vehicle_stats
WHERE dt_fr_month = '{month_tracking}-01' AND (haulier_id_tracking = '{haulier_id_tracking}')
SELECT TO_JSON_STRING(STRUCT(ARRAY_AGG(STRUCT(dt_fr_month, haulier_id_tracking, vehicle_id , nb_days_tracked,
data_access, date_first_camp, invoiced)) AS data)) json
FROM t
""".format(month_tracking = month_tracking, haulier_id_tracking = haulier_id_tracking))
for row in query_job:
return json.loads(row["json"])
You can remove the where clause when the input is empty like this:
#app.route("/tracking", methods=['GET'])
def tracking_data():
haulier_id_tracking = request.args.get('haulier_id_tracking')
month_tracking = request.args.get('month_tracking')
where_clause = ''
if haulier_id_tracking != '' and month_tracking != '':
where_clause = f"WHERE dt_fr_month = '{month_tracking}-01' AND (haulier_id_tracking = '{haulier_id_tracking}'"
query_job = bq_client.query(f"""
WITH t AS (
SELECT * FROM mart.monthly_vehicle_stats {where_clause})
SELECT TO_JSON_STRING(STRUCT(ARRAY_AGG(STRUCT(dt_fr_month, haulier_id_tracking, vehicle_id , nb_days_tracked,
data_access, date_first_camp, invoiced)) AS data)) json
FROM t
""")
for row in query_job:
return json.loads(row["json"])

python peewee dynamically or + and clauses

I'd like to do a and clause with two lists of multiple or clauses from the same table.
The problem with the following code is, that the query result is empty. If I just select 'indices' or 'brokers', the result is fine.
...
query = query.join(StockGroupTicker, on=(Ticker.id == StockGroupTicker.ticker))
# indices
if "indices" in filter:
where_indices = []
for f in filter["indices"]:
where_indices.append(StockGroupTicker.stock_index == int(f))
if len(where_indices):
query = query.where(peewee.reduce(peewee.operator.or_, where_indices))
# broker
if "brokers" in filter:
where_broker = []
for f in filter["brokers"]:
where_broker.append(StockGroupTicker.stock_index == int(f))
if len(where_broker):
query = query.where(peewee.reduce(peewee.operator.or_, where_broker))
return query.distinct()
SQL Querie (update)
# index and brocker
SELECT
DISTINCT `t1`.`id`,
`t1`.`symbol`,
`t1`.`type`,
`t1`.`name`,
`t1`.`sector`,
`t1`.`region`,
`t1`.`primary_exchange`,
`t1`.`currency`,
`t1`.`score`,
`t1`.`last_price`,
`t1`.`last_price_date`,
`t1`.`last_price_check`,
`t1`.`last_stock_split`,
`t1`.`next_earning`,
`t1`.`last_earnings_update`,
`t1`.`disused`,
`t1`.`source`,
`t1`.`source_intraday`,
`t1`.`created`,
`t1`.`modified`,
`t2`.`invest_score` AS `invest_score`
FROM
`ticker` AS `t1`
INNER JOIN `tickerstats` AS `t2` ON
(`t1`.`id` = `t2`.`ticker_id`)
INNER JOIN `stockgroupticker` AS `t3` ON
(`t1`.`id` = `t3`.`ticker_id`)
WHERE
(((((`t1`.`disused` IS NULL)
OR (`t1`.`disused` = 0))
AND (`t2`.`volume_mean_5` > 10000.0))
AND (`t3`.`stock_index_id` = 1))
AND (`t3`.`stock_index_id` = 10)
)
Thanks to #coleifer, the peewee solution is quite simple. I had to use an alias.
if "indices" in filter and filter["indices"]:
query = query.join(
StockGroupTicker, peewee.JOIN.INNER, on=(Ticker.id == StockGroupTicker.ticker)
)
where_indices = []
for f in filter["indices"]:
where_indices.append(StockGroupTicker.stock_index == int(f))
if len(where_indices):
query = query.where(peewee.reduce(peewee.operator.or_, where_indices))
if "brokers" in filter and filter["brokers"]:
BrokerGroupTicker = StockGroupTicker.alias()
query = query.join(
BrokerGroupTicker, peewee.JOIN.INNER, on=(Ticker.id == BrokerGroupTicker.ticker)
)
where_broker = []
for f in filter["brokers"]:
where_broker.append(BrokerGroupTicker.stock_index == int(f))
if len(where_broker):
query = query.where(peewee.reduce(peewee.operator.or_, where_broker))
return query.distinct()

read_sql query returns an empty dataframe after I pass parameters as a dict in python pandas

I am trying to parameterize some parts of a SQL Query using the below dictionary:
query_params = dict(
{'target':'status',
'date_from':'201712',
'date_to':'201805',
'drform_target':'NPA'
})
sql_data_sample = str("""select *
from table_name
where dt = %(date_to)s
and %(target)s in (%(drform_target)s)
----------------------------------------------------
union all
----------------------------------------------------
(select *,
from table_name
where dt = %(date_from)s
and %(target)s in ('ACT')
order by random() limit 50000);""")
df_data_sample = pd.read_sql(sql_data_sample,con = cnxn,params = query_params)
However this returns a dataframe with no records at all. I am not sure what the error is since no error is being thrown.
df_data_sample.shape
Out[7]: (0, 1211)
The final PostgreSql query would be:
select *
from table_name
where dt = '201805'
and status in ('NPA')
----------------------------------------------------
union all
----------------------------------------------------
(select *
from table_name
where dt = '201712'
and status in ('ACT')
order by random() limit 50000);-- This part of random() is only for running it on my local and not on server.
Below is a small sample of data for replication. The original data has more than a million records and 1211 columns
service_change_3m service_change_6m dt grp_m2 status
0 -2 201805 $50-$75 NPA
0 0 201805 < $25 NPA
0 -1 201805 $175-$200 ACT
0 0 201712 $150-$175 ACT
0 0 201712 $125-$150 ACT
-1 1 201805 $50-$75 NPA
Can someone please help me with this?
UPDATE:
Based on suggestion by #shmee.. I am finally using :
target = 'status'
query_params = dict(
{
'date_from':'201712',
'date_to':'201805',
'drform_target':'NPA'
})
sql_data_sample = str("""select *
from table_name
where dt = %(date_to)s
and {0} in (%(drform_target)s)
----------------------------------------------------
union all
----------------------------------------------------
(select *,
from table_name
where dt = %(date_from)s
and {0} in ('ACT')
order by random() limit 50000);""").format(target)
df_data_sample = pd.read_sql(sql_data_sample,con = cnxn,params = query_params)
Yes, I am quite confident that your issue results from trying to set column names in your query via parameter binding (and %(target)s in ('ACT')) as mentioned in the comments.
This results in your query restricting the result set to records where 'status' in ('ACT') (i.e. Is the string 'status' an element of a list containing only the string 'ACT'?). This is, of course, false, hence no record gets selected and you get an empty result.
This should work as expected:
import psycopg2.sql
col_name = 'status'
table_name = 'public.churn_data'
query_params = {'date_from':'201712',
'date_to':'201805',
'drform_target':'NPA'
}
sql_data_sample = """select *
from {0}
where dt = %(date_to)s
and {1} in (%(drform_target)s)
----------------------------------------------------
union all
----------------------------------------------------
(select *
from {0}
where dt = %(date_from)s
and {1} in ('ACT')
order by random() limit 50000);"""
sql_data_sample = sql.SQL(sql_data_sample).format(sql.Identifier(table_name),
sql.Identifier(col_name))
df_data_sample = pd.read_sql(sql_data_sample,con = cnxn,params = query_params)

How to use python to ETL between databases?

Using psycopg2, I'm able to select data from a table in one PostgreSQL database connection and INSERT it into a table in a second PostgreSQL database connection.
However, I'm only able to do it by setting the exact feature I want to extract, and writing out separate variables for each column I'm trying to insert.
Does anyone know of a good practice for either:
moving an entire table between databases, or
iterating through features while not having to declare variables for every column you want to move
or...?
Here's the script I'm currently using where you can see the selection of a specific feature, and the creation of variables (it works, but this is not a practical method):
import psycopg2
connDev = psycopg2.connect("host=host1 dbname=dbname1 user=postgres password=*** ")
connQa = psycopg2.connect("host=host2 dbname=dbname2 user=postgres password=*** ")
curDev = connDev.cursor()
curQa = connQa.cursor()
sql = ('INSERT INTO "tempHoods" (nbhd_name, geom) values (%s, %s);')
curDev.execute('select cast(geom as varchar) from "CCD_Neighborhoods" where nbhd_id = 11;')
tempGeom = curDev.fetchone()
curDev.execute('select nbhd_name from "CCD_Neighborhoods" where nbhd_id = 11;')
tempName = curDev.fetchone()
data = (tempName, tempGeom)
curQa.execute (sql, data)
#commit transactions
connDev.commit()
connQa.commit()
#close connections
curDev.close()
curQa.close()
connDev.close()
connQa.close()
One other note is that python allows the ability to explicitly work with SQL functions / data type casting, which for us is important as we work with the GEOMETRY data type. Above you can see I'm casting it to TEXT then dumping it into an existing geometry column in the source table - this will work with MSSQL Server, which is a huge feature in the geospatial community...
In your solution (your solution and your question have a different order of statements) change the lines which start with 'sql = ' and the loop before '#commit transactions' comment to
sql_insert = 'INSERT INTO "tempHoods" (nbhd_id, nbhd_name, typology, notes, geom) values '
sql_values = ['(%s, %s, %s, %s, %s)']
data_values = []
# you can make this larger if you want
# ...try experimenting to see what works best
batch_size = 100
sql_stmt = sql_insert + ','.join(sql_values*batch_size) + ';'
for i, row in enumerate(rows, 1):
data_values += row[:5]
if i % batch_size == 0:
curQa.execute (sql_stmt , data_values )
data_values = []
if (i % batch_size != 0):
sql_stmt = sql_insert + ','.join(sql_values*(i % batch_size)) + ';'
curQa.execute (sql_stmt , data_values )
BTW, I don't think you need to commit. You don't begin any transactions. So there should not be any need to commit them. Certainly, you don't need to commit a cursor if all you did was a bunch of selects on it.
Here's my updated code based on Dmitry's brilliant solution:
import psycopg2
connDev = psycopg2.connect("host=host1 dbname=dpspgisdev user=postgres password=****")
connQa = psycopg2.connect("host=host2 dbname=dpspgisqa user=postgres password=****")
curDev = connDev.cursor()
curQa = connQa.cursor()
print "Truncating Source"
curQa.execute('delete from "tempHoods"')
connQa.commit()
#Get Data
curDev.execute('select nbhd_id, nbhd_name, typology, notes, cast(geom as varchar) from "CCD_Neighborhoods";') #cast geom to varchar and insert into geometry column!
rows = curDev.fetchall()
sql_insert = 'INSERT INTO "tempHoods" (nbhd_id, nbhd_name, typology, notes, geom) values '
sql_values = ['(%s, %s, %s, %s, %s)'] #number of columns selecting / inserting
data_values = []
batch_size = 1000 #customize for size of tables...
sql_stmt = sql_insert + ','.join(sql_values*batch_size) + ';'
for i, row in enumerate(rows, 1):
data_values += row[:5] #relates to number of columns (%s)
if i % batch_size == 0:
curQa.execute (sql_stmt , data_values )
connQa.commit()
print "Inserting..."
data_values = []
if (i % batch_size != 0):
sql_stmt = sql_insert + ','.join(sql_values*(i % batch_size)) + ';'
curQa.execute (sql_stmt, data_values)
print "Last Values..."
connQa.commit()
# close connections
curDev.close()
curQa.close()
connDev.close()
connQa.close()

Python MySQLdb SELECT not returning proper value

Here's the code I'm working on:
poljeID = int(cursor.execute("SELECT poljeID FROM stanje"))
xkoord = cursor.execute("SELECT xkoord FROM polje WHERE poljeID = %s;", poljeID)
ykoord = cursor.execute("SELECT ykoord FROM polje WHERE poljeID = %s;", poljeID)
print xkoord, ykoord
It's a snippet from it, basically what it needs to do is fetch the ID of the field (poljeID) where an agent is currently on (stanje) and use it to get the x and y coordinates of that field (xkoord, ykoord).
The initial values for the variables are:
poljeID = 1
xkoord = 0
ykoord = 0
The values that I get with that code are:
poljeID = 1
xkoord = 1
ykoord = 1
What am I doing wrong?
cursor.execute does not return the result of the query, it returns the number of rows affected. To get the result, you need to do cursor.fetchone() (or cursor.fetchall()) for each query.
(Note, really the second and third queries should be done at once: SELECT xkoord, ycoord FROM ...)

Categories

Resources