config file - connecting python to SQL Server via pyodbc - python

I am really confused by writing a config file for connecting python to SQL Server specifically using pyodbc. I have wrote a class which connects to the database. However my boss keeps yelling at me the connection should be in a config file and I'm struggling with how to do it and even understand it. So far my code is below. As you can tell I'm new to coding so can someone please help me understand the purpose of a config file and help me with the process?
import pyodbc
import sqlalchemy as sa
import urllib
import pandas as pd
class SQL_Database:
def __init__(self, database, driver='SQL Server', server='.\TEST_SERVER'):
self.driver = driver
self.server = server
self.database = database
def create_server_connection(self):
connection = None
try:
connection = pyodbc.connect(f'Driver={self.driver};'
f'Server={self.server};'
f'Database={self.database};'
'Trusted_Connection=yes;')
print("MySQL Database connection successful")
except pyodbc.Error as err:
print("Connection failed")
return connection
conn = SQL_Database(database='index_changes').create_server_connection()

Here's an example for loading the values from a json file.
Create a config file named config.json.
{
"driver": "DriverName",
"server": "ServerName",
"database": "DatabaseName"
}
Read in the config parameters in your class.
import pyodbc
import json
class SQL_Database():
def __init__(self):
with open('path/to/config.json','r') as fh:
config = json.load(fh)
self.driver = config['driver']
self.server = config['server']
self.database = config['database']
connection = pyodbc.connect(
f'Driver={self.driver};'
f'Server={self.server};'
f'Database={self.database};'
)
SQL_Database()

Related

How to execute DBCC CHECKIDENT with Python?

I have Python script that I am trying to use to execute this function below on my SQL Server
DBCC CHECKIDENT('TableName', RESEED, 0)
My script looks like this:
qry = '''DBCC CHECKIDENT('TableName', RESEED, 0)'''
def mssql_cmd(qry, env):
# Import Dependencies
import pyodbc
import sqlalchemy as sa
import urllib
import pandas as pd
import sqlalchemy
import json
try:
# Read config json file into config dict
with open("../parameters/config.json") as cf:
config = json.load(cf)
# Try to establish the connection to MSSQL
params = urllib.parse.quote_plus(f'DRIVER={config[env][0]["driver"]};'
f'Server={config[env][0]["server"]};'
f'Database={config[env][0]["database"]};'
f'User={config[env][0]["user"]};'
f'Password={config[env][0]["password"]};'
f'Trusted_connection={config[env][0]["Trusted_connection"]};')
# Establish the engine
engine = sa.create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
db = engine.connect()
print("Connection to Data Warehouse -- SUCCESSFUL")
if db.connect():
try:
db.execute(qry)
db.close()
engine.dispose()
except Exception as e:
print(e)
except Exception as e:
print(e)
I don't get any errors the scrips executes but it doesn't reset my autogen Id on the table.
If I replace the line
db.execute(qry)
with
data = pd.read_sql(sql_qry, db)
then I am able to extract the data.
So the script works if I run the query however I can't make it to run the function to reset my auto gen id.
Does anyone have any clue as to what I am doing wrong here?

How to get the column names in redshift using Python boto3

I want to get the column names in redshift using python boto3
Creaed Redshift Cluster
Insert Data into it
Configured Secrets Manager
Configure SageMaker Notebook
Open the Jupyter Notebook wrote the below code
import boto3
import time
client = boto3.client('redshift-data')
response = client.execute_statement(ClusterIdentifier = "test", Database= "dev", SecretArn= "{SECRET-ARN}",Sql= "SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA`='dev' AND `TABLE_NAME`='dojoredshift'")
I got the response but there is no table schema inside it
Below is the code i used to connect I am getting timed out
import psycopg2
HOST = 'xx.xx.xx.xx'
PORT = 5439
USER = 'aswuser'
PASSWORD = 'Password1!'
DATABASE = 'dev'
def db_connection():
conn = psycopg2.connect(host=HOST,port=PORT,user=USER,password=PASSWORD,database=DATABASE)
return conn
How to get the ip address go to https://ipinfo.info/html/ip_checker.php
pass your hostname of redshiftcluster xx.xx.us-east-1.redshift.amazonaws.com or you can see in cluster page itself
I got the error while running above code
OperationalError: could not connect to server: Connection timed out
Is the server running on host "x.xx.xx..xx" and accepting
TCP/IP connections on port 5439?
I fixed with the code, and add the above the rules
import boto3
import psycopg2
# Credentials can be set using different methodologies. For this test,
# I ran from my local machine which I used cli command "aws configure"
# to set my Access key and secret access key
client = boto3.client(service_name='redshift',
region_name='us-east-1')
#
#Using boto3 to get the Database password instead of hardcoding it in the code
#
cluster_creds = client.get_cluster_credentials(
DbUser='awsuser',
DbName='dev',
ClusterIdentifier='redshift-cluster-1',
AutoCreate=False)
try:
# Database connection below that uses the DbPassword that boto3 returned
conn = psycopg2.connect(
host = 'redshift-cluster-1.cvlywrhztirh.us-east-1.redshift.amazonaws.com',
port = '5439',
user = cluster_creds['DbUser'],
password = cluster_creds['DbPassword'],
database = 'dev'
)
# Verifies that the connection worked
cursor = conn.cursor()
cursor.execute("SELECT VERSION()")
results = cursor.fetchone()
ver = results[0]
if (ver is None):
print("Could not find version")
else:
print("The version is " + ver)
except:
logger.exception('Failed to open database connection.')
print("Failed")

How can I reliably keep a SSH tunnel and MySQL connection open with my Python Flask API?

I have built an API in Flask that performs classification on text messages with Keras. I am currently using sshtunnel and MySQLdb to connect to a MySQL database to fetch messages from a remote database. The entire application is wrapped in a Docker container.
I am able to establish a connection to the remote database and successfully query it, but I am opening and closing a new ssh tunnel every time a POST request comes into the API, and this slows down performance.
I have tried to open a single ssh tunnel and database connection "to rule them all", but the connection gets stale if there is no activity after an hour or so, and then API requests take forever and a day to complete.
How have you done this? Is this slowness unavoidable or is there a way to periodically refresh the ssh and database connections?
This is how I am connecting to my database for every incoming request:
with SSHTunnelForwarder(
(host, 22),
ssh_username=ssh_username,
ssh_private_key=ssh_private_key,
remote_bind_address=(localhost, 3306)
) as server:
conn = db.connect(host=localhost,
port=server.local_bind_port,
user=user,
passwd=password,
db=database)
Okay, I figured it out. I created a DB object as suggested in this answer but with a slight modification. I kept track of the time that the connection to the database was created and then re-established the connection every 30 minutes. This means that one or two queries take slightly longer because I am rebuilding the connection to the database, but the rest of them run much faster and the connection won't go stale.
I've included some code below. I realize the code isn't perfect, but it's what has worked for me so far.
import MySQLdb as mydb
import time
import pandas as pd
from sshtunnel import SSHTunnelForwarder
class DB:
def __init__(self):
self.open_ssh_tunnel()
self.conn = None
self.server = None
self.connect()
self.last_connected_time = time.time()
def open_ssh_tunnel(self):
connection_success = False
while not connection_success:
try:
self.server = SSHTunnelForwarder(
(host, 22),
ssh_username=ssh_username,
ssh_private_key=ssh_private_key,
ssh_password=ssh_pwd,
remote_bind_address=(localhost, 3306))
connection_success = True
except:
time.sleep(0.5)
self.server.start()
def connect(self):
connection_success = False
while not connection_success:
try:
self.conn = mydb.connect(host=localhost,
port=server.local_bind_port,
user=user,
passwd=password,
db=database)
connection_success = True
except:
time.sleep(0.5)
def query(self, sql):
result = None
current_time = time.time()
if current_time - self.last_connected_time > 1600:
self.last_connected_time = current_time
self.server.close()
self.conn.close()
self.open_ssh_tunnel()
self.connect()
try:
result = pd.read_sql_query(sql, self.conn).values
self.conn.commit()
except:
self.server.close()
self.conn.close()
self.open_ssh_tunnel()
self.connect()
result = pd.read_sql_query(sql, self.conn).values
return result

Python 3.6: Mysql/Class/Inhertance connection issue

I'm trying to run the below code to connect to a database using python 3.6, I'm not sure I'm using Python correctly, I read in the config file and create and instance of the Dao class and pass the config details back to the parent class of Dao which is Db.
When I then go an try and open and close a connection on the Dao object it says that the dbhost isn't set.
Any help greatly appreciated.
Error
Traceback (most recent call last):
File "parse.py", line 12, in <module>
sources = daoObj.getSourceUrls()
File "E:\classes\Dao.py", line 14, in getSourceUrls
conn = super().open()
File "E:\classes\Db.py", line 22, in open
conn = pymysql.connect(dbhost, dbuser, dbpass, dbname);
NameError: name 'dbhost' is not defined
parse.py
import configparser
from classes.Dao import Dao
# Load configuration settings
config = configparser.ConfigParser()
config.read("./config.ini")
# Create instance of database class
daoObj = Dao(config)
# Test database connection
daoObj.test()
Dao.py
from classes.Db import Db
class Dao(Db):
"""Contains all SQL queries used for database interaction"""
node = None
def __init__(self, config):
"""Default constructor"""
super().__init__(config)
def getSourceUrls(self):
conn = super().open()
super().close(conn)
Db.py
import pymysql
class Db:
"""Database connection class, handles all opening and closing of MySQL database connections."""
dbuser = None
dbpass = None
dbhost = None
dbname = None
def __init__(self, config):
"""Default constructor"""
# Assign the database login credentials
dbuser = config["DB"]["USER"]
dbpass = config["DB"]["PASS"]
dbhost = config["DB"]["HOST"]
dbname = config["DB"]["DATABASE"]
def open(self):
"""Open database connection."""
conn = None
try:
conn = pymysql.connect(dbhost, dbuser, dbpass, dbname);
except pymysql.err.InternalError as e:
print("Error connecting to database.")
return conn
def close(self, conn):
"""Close passed in database connection"""
conn.close()
config.ini
[DB]
USER=username
PASS=password
HOST=127.0.0.l
DATABASE=database

how to connect remote windows MSSQL server from ubuntu using sqlalchemy

am trying to connect with MSSQL remotely which is in windows from ubuntu using sqlalchemy.I creted DSN like below
dbinfo.py:
username = 'XXX'
pw = 'XXX'
host = '190.122.12.214'
drivername = 'SQL Server'
database = 'XXX'
extra_param=''
and i mported the dbinfo.py file into db_handler.py :
import transaction
from z3c.saconfig import Session as SASession
from z3c.saconfig import EngineFactory
from zope import component
from zope.sqlalchemy import mark_changed
# sqlalchemy
import sqlalchemy as sa
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from redindia.loginpage import dbinfo
info = {
'username' : dbinfo.username,
'pw' : dbinfo.pw,
'host' : dbinfo.host,
'drivername' : dbinfo.drivername,
'database' : dbinfo.database,
'extra_param' : ''
}
drivername = str(info['drivername'])
username = str(info['username'])
host = str(info['host'])
database = str(info['database'])
extra_param = str(info['extra_param'])
def getDb():
pass
def getSession(testing=False):
try:
return SASession()
except component.ComponentLookupError:
pass
# construct url to open database
_testing_ = ''
if testing:
_testing_ = '_testing'
if info['pw'] != '':
DSN = drivername+'://'+username+':' + info['pw'] +'#'+host+'/'+database+_testing_+'?charset=utf8'+extra_param
else:
DSN = drivername+'://'+username+'#'+host+'/'+database+_testing_+'?charset=utf8'+extra_param
engine_factory = EngineFactory(DSN, pool_recycle=7200)
engine = engine_factory()
## create a global session
from z3c.saconfig import GloballyScopedSession
utility = GloballyScopedSession(bind=engine) # i think, without engine, it will find above provided one...
from z3c.saconfig.interfaces import IScopedSession
component.provideUtility(utility, provides=IScopedSession)
return SASession()
session = getSession()
engine = session.get_bind()
Base = declarative_base(engine)
Base.metadata.reflect()
tables = Base.metadata.tables
and then connecting details below mentioned
def callStoreProcedure(self):
form = self.request.form
area = form.get('Area')
session = getSession()
result = session.execute("select * from BBBB")
result_set = result.fetchall()
return result_set
and i configure ODBC connectivity settings
etc/odbc.ini:
[SQL Server]
Description=my dsn
Driver=SQL Server
Database=XXX
Servername=190.122.12.214
UID=XXX
PWD=XXX
etc/odbcinst.ini:
[SQL Server]
Description = sql Driver
Driver = /usr/local/lib/libtdsodbc.so
Setup=/usr/local/lib/libtdsS.so
UsageCount = 1
I configured the settings like above.But i can't able to connect MSSQL.am getting the error like below
"ArgumentError: Could not parse rfc1738 URL from string 'SQL Server://XXX:XXX#190.122.12.214/XXX?charset=utf8'"
Plz can anyone help me to solve this issues.Thanks in advance.
The SQLAlchemy engine URL should begin with either mssql or mssql+pyodbc. See the Engine Configuration documentation.
I created dbhandler.py file. It contains the details about the database connectivity.The details are below
db_handler.py:
from sqlalchemy import create_engine
def getSession(self):
DSN="mssql://UID:PWD#IPADDRESS/DBNAME"
return DSN
our .py file
from xxxx.yyyy.db_handler import getSession
from sqlalchemy import create_engine
def callStoreProcedure(self):
form = self.request.form
DSN = getSession(self)
engine = create_engine(DSN)
cursor = engine.execute("select * from tablename")
result = cursor.fetchall()
return result
Now i have connected with the database.

Categories

Resources