I am trying to load the data from the REST API into SQL server table using the Python script. The script below works perfectly but the issues is it takes too long to upload the data in to the database , it took few minutes to load 8000 records
import requests
import pandas as pd
import pyodbc
import sys
from requests.auth import HTTPBasicAuth
#SQL Connection
conn_str = (
r'DRIVER={SQL Server Native Client 11.0};'
r'SERVER=tcp:abcd.database.windows.net;'
r'DATABASE=DEF;'
r'UID=xxxxx;'
r'PWD=yyyy;'
r'Trusted_Connection=no;'
)
sqlconn = pyodbc.connect(conn_str)
cursor = sqlconn.cursor()
cursor.execute("TRUNCATE TABLE [dbo].[RM_Approved_Room_State]")
sqlconn.commit()
#API Connection
baseURL = 'https://testing.org/api/fdf'
appRoomsResponse = requests.get(baseURL, verify=False)
appRoomsJson = appRoomsResponse.json()
appRoomsDF = pd.json_normalize(appRoomsJson)
appRoomsDF = appRoomsDF.fillna('')
try:
cursor = sqlconn.cursor()
for index,row in appRoomsDF.iterrows():
cursor.execute("INSERT INTO RM_Approved_Room_State(APPROVED_ROOM_STATEID,SOURCE_ROOMID,DEST_ROOMID,ENTITY_TYPEID)\
values(?,?,?,?)"
,row['id']
,row['sourceRoomRefId']
,row['destinationRoomRefId']
,row['entityRefId']
)
sqlconn.commit()
except Exception:
pass
#Load main Dim_appRooms table
cursor = sqlconn.cursor()
sqlconn.commit()
cursor.close()
sqlconn.close()
Is there something I am missing to increase the speed of the insert here. My first script in Python, anyhelp is greatly appreciated
I can successfully connect to SQL Server Management Studio from my jupyter notebook with this script:
from sqlalchemy import create_engine
import pyodbc
import csv
import time
import urllib
params = urllib.parse.quote_plus('''DRIVER={SQL Server Native Client 11.0};
SERVER=SV;
DATABASE=DB;
TRUSTED_CONNECTION=YES;''')
engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
I managed to execute some SQL scripts like this:
engine.execute("delete from table_name_X")
However, I can't execute stored procedures. I tried the following scripts from what I've seen in stored procedures with sqlAlchemy. These following scripts have an output like "sqlalchemy.engine.result.ResultProxy at 0x173ed18e470", but the procedure wasn't executed in reality (nothing happened):
# test 1
engine.execute('stored_procedure_name')
# test 2
from sqlalchemy import func
from sqlalchemy.orm import sessionmaker
session = sessionmaker(bind=engine)()
session.execute(func.upper('stored_procedure_name'))
Could you please give me the correct way to execute stored procedures?
The way you can call a stored procedure using pyodbc is :
cursor.execute("{CALL usp_StoreProcedure}")
I found a solutions in reference to this link . https://github.com/mkleehammer/pyodbc/wiki/Calling-Stored-Procedures
Here a example :
import pyodbc
import urllib
import sqlalchemy as sa
params = urllib.parse.quote_plus("DRIVER={SQL Server Native Client 11.0};"
"SERVER=xxx.xxx.xxx.xxx;"
"DATABASE=DB;"
"UID=user;"
"PWD=pass")
engine = sa.create_engine("mssql+pyodbc:///?odbc_connect={}".format(params))
connection = engine.raw_connection()
try:
cursor = connection.cursor()
cursor.execute("{CALL stored_procedure_name}")
result = cursor.fetchall()
print(result)
connection.commit()
finally:
connection.close()
Finally solved my problem with the following function :
def execute_stored_procedure(engine, procedure_name):
res = {}
connection = engine.raw_connection()
try:
cursor = connection.cursor()
cursor.execute("EXEC "+procedure_name)
cursor.close()
connection.commit()
res['status'] = 'OK'
except Exception as e:
res['status'] = 'ERROR'
res['error'] = e
finally:
connection.close()
return res
I cannot connect to my pythoneverywhere.com database from my local project.
The error is:
mysql.connector.errors.InterfaceError: 2013: Lost connection to MySQL server during query
Thank you in advance!
from flask import Flask, request, url_for, redirect
from flask_restful import Resource, Api
import pymysql as MySQLdb
from flaskext.mysql import MySQL
import mysql.connector
import sshtunnel
app = Flask(__name__)
api = Api(app)
sshtunnel.SSH_TIMEOUT = 5.0
sshtunnel.TUNNEL_TIMEOUT = 5.0
with sshtunnel.SSHTunnelForwarder(
('ssh.pythonanywhere.com'),
ssh_username='XXX', ssh_password='XXX',
remote_bind_address=('bianca.mysql.pythonanywhere-services.com', 3306)
) as tunnel:
connection = mysql.connector.connect(
user='XXX', password='XXX',
host='127.0.0.1', port=tunnel.local_bind_port,
database='bianca$moviesdb',
)
connection.close()
# print('after SSH connection')
conn = MySQLdb.connect("bianca.mysql.pythonanywhere-services.com", "XXX", "XXX", "bianca$moviesdb")
c = conn.cursor()
c.execute("SELECT * FROM reviews")
rows = c.fetchall()
for eachRow in rows:
print(eachRow)
if __name__ == '__main__':
app.run(port=5002)
Many things could be happening here: Timeout errors, packet sizes, etc. Also check your firewall settings.
https://dev.mysql.com/doc/refman/5.7/en/error-lost-connection.html
All,
I am attempting to load data into blaze from a hive2 thrift server. I would like to do some analysis similar to what is posted here. Here is my current process.
import blaze as bz
import sqlalchemy
import impala
conn = connect(host='myhost.url.com', port=10000, database='mydb', user='hive', auth_mechanism='PLAIN')
engine = sqlalchemy.create_engine('hive://', creator=conn)
data = bz.data(engine)
I am able to make the connection and generate the engine, but when I run bz.data it fails with the error
TypeError: 'HiveServer2Connection' object is not callable
Any help is appreciated.
Answer
from pyhive import import hive
import sqlalchemy
from impala.dbapi import import connect
def conn():
return connect(host='myhost.com', port=10000, database='database', user='username', auth_mechanism='PLAIN')
engine = sqlalchemy.create_engine('hive://', creator=conn)
#Workaround
import blaze as bz
data = bz.data(engine)
from pyhive import import hive
import sqlalchemy
from impala.dbapi import import connect
def conn():
return connect(host='myhost.com', port=10000, database='database', user='username', auth_mechanism='PLAIN')
engine = sqlalchemy.create_engine('hive://', creator=conn)
#Workaround
import blaze as bz
data = bz.data(engine)
I was having this same issue when using impyla to connect to Impala with SQLAlchemy. Making conn a function instead of assigning it to a variable worked.
I'm using MySqldb with Python 2.7 to allow Python to make connections to another MySQL server
import MySQLdb
db = MySQLdb.connect(host="sql.domain.com",
user="dev",
passwd="*******",
db="appdb")
Instead of connecting normally like this, how can the connection be made through a SSH tunnel using SSH key pairs?
The SSH tunnel should ideally be opened by Python. The SSH tunnel host and the MySQL server are the same machine.
Only this worked for me
import pymysql
import paramiko
import pandas as pd
from paramiko import SSHClient
from sshtunnel import SSHTunnelForwarder
from os.path import expanduser
home = expanduser('~')
mypkey = paramiko.RSAKey.from_private_key_file(home + pkeyfilepath)
# if you want to use ssh password use - ssh_password='your ssh password', bellow
sql_hostname = 'sql_hostname'
sql_username = 'sql_username'
sql_password = 'sql_password'
sql_main_database = 'db_name'
sql_port = 3306
ssh_host = 'ssh_hostname'
ssh_user = 'ssh_username'
ssh_port = 22
sql_ip = '1.1.1.1.1'
with SSHTunnelForwarder(
(ssh_host, ssh_port),
ssh_username=ssh_user,
ssh_pkey=mypkey,
remote_bind_address=(sql_hostname, sql_port)) as tunnel:
conn = pymysql.connect(host='127.0.0.1', user=sql_username,
passwd=sql_password, db=sql_main_database,
port=tunnel.local_bind_port)
query = '''SELECT VERSION();'''
data = pd.read_sql_query(query, conn)
conn.close()
I'm guessing you'll need port forwarding. I recommend sshtunnel.SSHTunnelForwarder
import mysql.connector
import sshtunnel
with sshtunnel.SSHTunnelForwarder(
(_host, _ssh_port),
ssh_username=_username,
ssh_password=_password,
remote_bind_address=(_remote_bind_address, _remote_mysql_port),
local_bind_address=(_local_bind_address, _local_mysql_port)
) as tunnel:
connection = mysql.connector.connect(
user=_db_user,
password=_db_password,
host=_local_bind_address,
database=_db_name,
port=_local_mysql_port)
...
from sshtunnel import SSHTunnelForwarder
import pymysql
import pandas as pd
tunnel = SSHTunnelForwarder(('SSH_HOST', 22), ssh_password=SSH_PASS, ssh_username=SSH_UNAME,
remote_bind_address=(DB_HOST, 3306))
tunnel.start()
conn = pymysql.connect(host='127.0.0.1', user=DB_UNAME, passwd=DB_PASS, port=tunnel.local_bind_port)
data = pd.read_sql_query("SHOW DATABASES;", conn)
credits to https://www.reddit.com/r/learnpython/comments/53wph1/connecting_to_a_mysql_database_in_a_python_script/
If your private key file is encrypted, this is what worked for me:
mypkey = paramiko.RSAKey.from_private_key_file(<<file location>>, password='password')
sql_hostname = 'sql_hostname'
sql_username = 'sql_username'
sql_password = 'sql_password'
sql_main_database = 'sql_main_database'
sql_port = 3306
ssh_host = 'ssh_host'
ssh_user = 'ssh_user'
ssh_port = 22
with SSHTunnelForwarder(
(ssh_host, ssh_port),
ssh_username=ssh_user,
ssh_pkey=mypkey,
ssh_password='ssh_password',
remote_bind_address=(sql_hostname, sql_port)) as tunnel:
conn = pymysql.connect(host='localhost', user=sql_username,
passwd=sql_password, db=sql_main_database,
port=tunnel.local_bind_port)
query = '''SELECT VERSION();'''
data = pd.read_sql_query(query, conn)
print(data)
conn.close()
You may only write the path to the private key file: ssh_pkey='/home/userName/.ssh/id_ed25519' (documentation is here: https://sshtunnel.readthedocs.io/en/latest/).
If you use mysql.connector from Oracle you must use a construction
cnx = mysql.connector.MySQLConnection(...
Important: a construction
cnx = mysql.connector.connect(...
does not work via an SSh! It is a bug.
(The documentation is here: https://dev.mysql.com/doc/connector-python/en/connector-python-connectargs.html).
Also, your SQL statement must be ideal. In case of an error on SQL server side, you do not receive an error message from SQL-server.
import sshtunnel
import numpy as np
with sshtunnel.SSHTunnelForwarder(ssh_address_or_host='ssh_host',
ssh_username="ssh_username",
ssh_pkey='/home/userName/.ssh/id_ed25519',
remote_bind_address=('localhost', 3306),
) as tunnel:
cnx = mysql.connector.MySQLConnection(user='sql_username',
password='sql_password',
host='127.0.0.1',
database='db_name',
port=tunnel.local_bind_port)
cursor = cnx.cursor()
cursor.execute('SELECT * FROM db_name.tableName;')
arr = np.array(cursor.fetchall())
cursor.close()
cnx.close()
This works for me:
import mysql.connector
import sshtunnel
with sshtunnel.SSHTunnelForwarder(
('ip-of-ssh-server', 'port-in-number-format'),
ssh_username = 'ssh-username',
ssh_password = 'ssh-password',
remote_bind_address = ('127.0.0.1', 3306)
) as tunnel:
connection = mysql.connector.connect(
user = 'database-username',
password = 'database-password',
host = '127.0.0.1',
port = tunnel.local_bind_port,
database = 'databasename',
)
mycursor = connection.cursor()
query = "SELECT * FROM datos"
mycursor.execute(query)
Someone said this in another comment. If you use the python mysql.connector from Oracle then you must use a construction cnx = mysql.connector.MySQLConnection(....
Important: a construction cnx = mysql.connector.connect(... does not work via an SSH! This bug cost me a whole day trying to understand why connections were being dropped by the remote server:
with sshtunnel.SSHTunnelForwarder(
(ssh_host,ssh_port),
ssh_username=ssh_username,
ssh_pkey=ssh_pkey,
remote_bind_address=(sql_host, sql_port)) as tunnel:
connection = mysql.connector.MySQLConnection(
host='127.0.0.1',
port=tunnel.local_bind_port,
user=sql_username,
password=sql_password)
query = 'select version();'
data = pd.read_sql_query(query, connection)
print(data)
connection.close()
If you are using python, and all the username, password, host and port are correct then there is just one thing left, that is using the argument (use_pure=True). This argument uses python to parse the details and password. You can see the doc of mysql.connector.connect() arguments.
with sshtunnel.SSHTunnelForwarder(
(ssh_host,ssh_port),
ssh_username=ssh_username,
ssh_pkey=ssh_pkey,
remote_bind_address=(sql_host, sql_port)) as tunnel:
connection = mysql.connector.MySQLConnection(
host='127.0.0.1',
port=tunnel.local_bind_port,
user=sql_username,
password=sql_password,
use_pure='True')
query = 'select version();'
data = pd.read_sql_query(query, connection)
print(data)
connection. Close()
Paramiko is the best python module to do ssh tunneling. Check out the code here:
https://github.com/paramiko/paramiko/blob/master/demos/forward.py
As said in comments this one works perfect.
SSH Tunnel for Python MySQLdb connection
Best practice is to parameterize the connection variables.
Here is how I have implemented. Works like charm!
import mysql.connector
import sshtunnel
import pandas as pd
import configparser
config = configparser.ConfigParser()
config.read('c:/work/tmf/data_model/tools/config.ini')
ssh_host = config['db_qa01']['SSH_HOST']
ssh_port = int(config['db_qa01']['SSH_PORT'])
ssh_username = config['db_qa01']['SSH_USER']
ssh_pkey = config['db_qa01']['SSH_PKEY']
sql_host = config['db_qa01']['HOST']
sql_port = int(config['db_qa01']['PORT'])
sql_username = config['db_qa01']['USER']
sql_password = config['db_qa01']['PASSWORD']
with sshtunnel.SSHTunnelForwarder(
(ssh_host,ssh_port),
ssh_username=ssh_username,
ssh_pkey=ssh_pkey,
remote_bind_address=(sql_host, sql_port)) as tunnel:
connection = mysql.connector.connect(
host='127.0.0.1',
port=tunnel.local_bind_port,
user=sql_username,
password=sql_password)
query = 'select version();'
data = pd.read_sql_query(query, connection)
print(data)
connection.close()