Cannot fetch data from MySQL database with Python - python

I'm trying to write a program that receives data from an XBee and then uploads the data to a database. The program is written in Python. I got the XBee part right, but when I try to execute a SQL command I get an error.
Here is the code:
from xbee import ZigBee
import serial
import MySQLdb
PORT = 'COM4'
BAUD_RATE = 9600
# Open serial port
ser = serial.Serial(PORT, BAUD_RATE)
zb = ZigBee(ser, escaped = True)
#DATA BASE CONNECTION
db = MySQLdb.connect("localhost","b3")
# prepare a cursor object using cursor() method
cursor = db.cursor()
# execute SQL query using execute() method.
cursor.execute("SELECT VERSION()")
# Fetch a single row using fetchone() method.
data = cursor.fetchone()
print "Database version : %s " % data
def checkSistem(sistemCode):
sql = "SELECT count(*) FROM sistema WHERE Codigo = '"+sistemCode+"'"
try:
# Execute the SQL command
cursor.execute(sql)
# Fetch all the rows in a list of lists.
results = cursor.fetchone()
print results[0]
except:
print "Error: unable to fecth data"
while True:
try:
dataXBee = zb.wait_read_frame()
data = dataXBee['rf_data'].decode("utf-8").split(",")
checkSistem(data[0])
print (data[1])
except KeyboardInterrupt:
break
And when I run the program I get this:
Database version : 5.6.26
Error: unable to fecth data
S1

Related

Unable to copy data into AWS RedShift

I tried a lot however I am unable to copy data available as json file in S3 bucket(I have read only access to the bucket) to Redshift table using python boto3. Below is the python code which I am using to copy the data. Using the same code I was able to create the tables in which I am trying to copy.
import configparser
import psycopg2
from sql_queries import create_table_queries, drop_table_queries
def drop_tables(cur, conn):
for query in drop_table_queries:
cur.execute(query)
conn.commit()
def create_tables(cur, conn):
for query in create_table_queries:
cur.execute(query)
conn.commit()
def main():
try:
config = configparser.ConfigParser()
config.read('dwh.cfg')
# conn = psycopg2.connect("host={} dbname={} user={} password={} port={}".format(*config['CLUSTER'].values()))
conn = psycopg2.connect(
host=config.get('CLUSTER', 'HOST'),
database=config.get('CLUSTER', 'DB_NAME'),
user=config.get('CLUSTER', 'DB_USER'),
password=config.get('CLUSTER', 'DB_PASSWORD'),
port=config.get('CLUSTER', 'DB_PORT')
)
cur = conn.cursor()
#drop_tables(cur, conn)
#create_tables(cur, conn)
qry = """copy DWH_STAGE_SONGS_TBL
from 's3://udacity-dend/song-data/A/A/A/TRAAACN128F9355673.json'
iam_role 'arn:aws:iam::xxxxxxx:role/MyRedShiftRole'
format as json 'auto';"""
print(qry)
cur.execute(qry)
# execute a statement
# print('PostgreSQL database version:')
# cur.execute('SELECT version()')
#
# # display the PostgreSQL database server version
# db_version = cur.fetchone()
# print(db_version)
print("Executed successfully")
cur.close()
conn.close()
# close the communication with the PostgreSQL
except Exception as error:
print("Error while processing")
print(error)
if __name__ == "__main__":
main()
I don't see any error in the Pycharm console but I see Aborted status in the redshift query console. I don't see any reason why it has been aborted(or I don't know where to look for that)
Other thing that I have noticed is when I run the copy statement in Redshift query editor , it runs fine and data gets moved into the table. I tried to delete and recreate the cluster but no luck. I am not able to figure what I am doing wrong. Thank you
Quick read - it looks like you haven't committed the transaction and the COPY is rolled back when the connection closes. You need to either change the connection configuration to be in "autocommit" or add an explicit "commit()".

Speedtest-cli python script, inserting sql with wrong values

Relatively new to python scripts, so bare with.
I have used speedtest-cli before. I have edited the script so it will insert the values into a sql table as below, however having an issue with one of the inserts. It will insert ping, and download ok, however, the upload is always 2.74 or 2.75 for example, but ONLY when run from a crontab.. very weird.
If I run the python script from cli it will insert values fine.
This is my query, and the values ping, download and upload are coming from the speedtest-cli script.
Here is the full script
import re
import subprocess
import time
import mysql.connector
from mysql.connector import Error
from mysql.connector import errorcode
print "----------------------------------"
print 'Started: {} {}'.format(time.strftime('%d/%m/%y %H:%M:%S'), "")
response = subprocess.Popen('speedtest-cli --simple', shell=True, stdout=subprocess.PIPE).stdout.read()
ping = re.findall('Ping:\s(.*?)\s', response, re.MULTILINE)
download = re.findall('Download:\s(.*?)\s', response, re.MULTILINE)
upload = re.findall('Upload:\s(.*?)\s', response, re.MULTILINE)
ping[0] = ping[0].replace(',', '.')
download[0] = download[0].replace(',', '.')
upload[0] = upload[0].replace(',', '.')
try:
if os.stat('/var/www/html/speed/log.txt').st_size == 0:
print 'Date,Time,Ping (ms),Download (Mbit/s),Upload (Mbit/s)'
except:
pass
print 'PING: {}, DOWN: {}, UP: {}'.format(ping[0], download[0], upload[0])
try:
connection = mysql.connector.connect(host='localhost',
database='dev',
user='dev',
password='dev1')
sql_insert_query = ("""INSERT INTO speedtest(ping, download, upload) VALUES (%s,%s,%s)""", (ping[0], download[0], upload[0]))
cursor = connection.cursor()
result = cursor.execute(*sql_insert_query)
connection.commit()
print ("Insert success into speedtest tbl")
except mysql.connector.Error as error :
connection.rollback() #rollback if any exception occured
print("Failed inserting record into speedtest table {}".format(error))
finally:
#closing database connection.
if(connection.is_connected()):
cursor.close()
connection.close()
print("MySQL conn closed")
print 'Finished: {} {}'.format(time.strftime('%d/%m/%y %H:%M:%S'), "")
Manual script runs ok, just from crontab I get unexpected values. Not sure how to solve.

Getting error on python while transferring data from SQL server to snowflake

I am getting below error
query = command % processed_params TypeError: not all arguments
converted during string formatting
I am trying to pull data from SQL server and then inserting it into Snowflake
my below code
import pyodbc
import sqlalchemy
import snowflake.connector
driver = 'SQL Server'
server = 'tanmay'
db1 = 'testing'
tcon = 'no'
uname = 'sa'
pword = '123'
cnxn = pyodbc.connect(driver='{SQL Server}',
host=server, database=db1, trusted_connection=tcon,
user=uname, password=pword)
cursor = cnxn.cursor()
cursor.execute("select * from Admin_tbldbbackupdetails")
rows = cursor.fetchall()
#for row in rows:
# #data = [(row[0], row[1],row[2], row[3],row[4], row[5],row[6], row[7])]
print (rows[0])
cnxn.commit()
cnxn.close()
connection = snowflake.connector.connect(user='****',password='****',account='*****')
cursor2 = connection.cursor()
cursor2.execute("USE WAREHOUSE FOOD_WH")
cursor2.execute("USE DATABASE Test")
sql1="INSERT INTO CN_RND.Admin_tbldbbackupdetails_ip"
"(id,dbname, dbpath, backupdate, backuptime, backupStatus, FaildMsg, Backupsource)"
"values (?,?,?,?,?,?,?,?)"
cursor2.execute(sql1,*rows[0])
It's obviously string parsing error.
You missed to provide parameter to %s printout.
If you cannot fix it step back and try another approach.
Use another script to achieve the same and get back to you bug tomorrow :-)
My script is doing pretty much the same:
1. Connect to SQL Server
-> fetchmany
-> multipart upload to s3
-> COPY INTO Snowflake table
Details are here: Snowpipe-for-SQLServer

How to fetch data from read-only mysql database using python?

I am trying to fetch data in python from MySQL database using username that has read-only permission. I am using mysql.connector package to connect to database.
It gets connected to database properly, as I checked using following:
connection = mysql.connector.connect(host = HOSTNAME, user = USERNAME, passwd = PASSWORD, db = DATABASE, port=PORT)
print(connection.cmd_statistics())
But when I try to fetch data from Database using cursor, it returns 'None'.
My code is:
cursor = connection.cursor()
try:
query1 = 'SELECT * FROM table_name'
result = cursor.execute(query1)
print(result)
finally:
connection.close()
And the output is:
None
It works for python 3.6.5 and mysql_workbench 8.0 but not tried in other python -version**
import _mysql_connector
avi = _mysql_connector.MySQL()
avi.connect(host='127.0.0.1',user='root',port=3306, password='root',database='hr_table')
avi.query("select * from hr_table.countries")
row = avi.fetch_row()
while row:
print(row)
row = avi.fetch_row()
avi.free_result()
avi.close()

Add JSON or XML or CSV data to MySQL server

Is there any way of sending either JSON, XML, or CSV data to a local MySQL server?
I'm new to MySQL, and wasn't able to find anything online.
Either data type will work as I have code that can covert all of my data into whichever format I require, i.e. JSON, XML, and CSV.
Any help is appreciated!
1). I am going to give you answer for JSON >> How to store JSON data in MySQL DB using python ?
If your JSON format is following and you want to store associative in MySQL database >> table then you can follow the first example.
Example: 1
JSON format
{
"first_key" : 10,
"second_key" : 20
}
Python core script for JSON.
import MySQLdb
myjson = json.loads(jdata)
def dbconnect():
try:
db = MySQLdb.connect(
host='localhost',
user='root',
passwd='',
db='myjson_db'
)
except Exception as e:
sys.exit("Can't connect to database")
return db
cursor = db.cursor()
sql = """INSERT INTO my_table (array_key, array_value) VALUES (%s, %s)"""
for array_key, array_value in myjson.items():
cursor.execute(sql, (array_key, array_value))
If you want to store data in only one column then you can follow the second one as per follow.
Example: 2
import MySQLdb
myjson = json.loads(jdata)
def dbconnect():
try:
db = MySQLdb.connect(
host='localhost',
user='root',
passwd='',
db='myjson_db'
)
except Exception as e:
sys.exit("Can't connect to database")
return db
cursor = db.cursor()
sql = """INSERT INTO my_table (json_column) VALUES (%s)"""
cursor.execute(sql, (myjson))
2). Lets start with XML >> How to store XML data in MySQL DB using
python ?
XML data
<?xml version="1.0" encoding="UTF-8" ?>
<first_key>10</first_key>
<second_key>20</second_key>
Next step is: please install: Python script for converts XML to JSON from here import and import xml2json in our python core script.
Python Core script for XML
import MySQLdb
import xml2json
import json
xml_data = json.loads(xml2json.xml2json(xmldata))
### data store functionality or logic is same as example 1 and example 2
def dbconnect():
try:
db = MySQLdb.connect(
host='localhost',
user='root',
passwd='',
db='myxml_db'
)
except Exception as e:
sys.exit("Can't connect to database")
return db
cursor = db.cursor()
sql = """INSERT INTO my_table (xml_data) VALUES (%s)"""
cursor.execute(sql, (xml_data))
3). Lets discuss for CSV >> How to store CSV data in MySQL DB using
python ?
import csv
import MySQLdb
csv_data = csv.reader(file('my_csv_file.csv'))
def dbconnect():
try:
db = MySQLdb.connect(
host='localhost',
user='root',
passwd='',
db='mycsv_db'
)
except Exception as e:
sys.exit("Can't connect to database")
return db
for row in csv_data:
cursor.execute('INSERT INTO my_csv_table(csv_first_column, \
csv_first_column)' \
'VALUES("%s", "%s")',
row)
I'm unaware of anyway of inserting JSON, XML or CSV into a MySQL database directly.
You can parse the data to a script which can insert it into a Database by using a module such as MySQL-python.
My python isn't great but hopefully this example should suffice.
#!usr/bin/python
# Import mySQL module to interact with database.
import MySQLdb
# Import json module to convert the JSON into a Python data structure.
import json
# Convert the JSON to a usable format.
data = json.loads(yourjson)
# Connect to MySQL server.
db = mySQLdb.connect(host='yourhost',
user='youruser',
passwd='yourpassword',
db='yourschema')
# Create an object to handle SQL statements.
cur = db.cursor()
# Attempt to execute the SQL statement, if not revert any changes.
try:
cur.execute('INSERT INTO table SET col1 = %s, col2 = %s', data.foo, data.bar)
db.commit()
except:
db.rollback()

Categories

Resources