pyodbc connect to database twice and failed - python

I'm trying use Python and pyodbc to access SQL server 2008. The first connection works. Then, after the program finishes its job, it closes the connection. When the program tries to access the database and connect to it again, it fails in the statement:
self.conn = pyodbc.connect(DRIVER=self.DRIVER, SERVER=self.SERVER, DATABASE=self.DATABASE, UID=self.UID, PWD=self.PWD, charset="UTF-8")
but the first time is OK. So does anyone know why? Below is the Python code:
class ODBC_MS:
def __init__(self, DRIVER,SERVER, DATABASE, UID, PWD):
''' initialization '''
self.DRIVER = DRIVER
self.SERVER = SERVER
self.DATABASE = DATABASE
self.UID = UID
self.PWD = PWD
def _GetConnect(self):
''' Connect to the DB '''
if not self.DATABASE:
raise(NameError,"no getting db name")
try:
self.conn = pyodbc.connect(DRIVER=self.DRIVER, SERVER=self.SERVER,
DATABASE=self.DATABASE, UID=self.UID,
PWD=self.PWD, charset="UTF-8")
except Exception,e:
print e.message
else:
self.cur = self.conn.cursor()
if not self.cur:
raise(NameError,"connected failed!")
else:
return self.cur, self.conn
def ExecNoQuery(self,conn, cursor, sql):
cursor.execute(sql)
ret = conn.commit()
return ret
def _UnConnect(self,conn, cursor):
conn.close()
if __name__ == '__main__':
ms = ODBC_MS('{SQL SERVER}', r'<server>', '<db>', '<user>', '<password>')
cursor, conn = ms._GetConnect() #connection
sql = "create table XX for example"
ret = ms.ExecNoQuery(conn, cursor,sql) #sql operation
ms._UnConnect(conn, cursor) #close db
#access the database the second time.
ms = ODBC_MS('{SQL SERVER}', r'<server>', '<db>', '<user>', '<password>')
cursor, conn = ms._GetConnect() # not success, I don't know why
sql = "create table XX for example"
ret = ms.ExecNoQuery(conn, cursor,sql) #sql operation
ms._UnConnect(conn, cursor) #close db
The second time when the program calls ms.GetConnect(), the statement self.conn = pyodbc.connect(DRIVER=self.DRIVER, SERVER=self.SERVER, DATABASE=self.DATABASE, UID=self.UID, PWD=self.PWD, charset="UTF-8") fails.

Related

What should I put for the first parameter when calling "get_Tables_byName" function?

I'm writing a python code to read from mysql database:
def create_server_connection(host, user, password):
connection = None
try:
connection = pymysql.connect(host='localhost',
user='root',
password='pwd',
database='raw_data',
cursorclass=pymysql.cursors.DictCursor)
print("MySQL Database connection successful")
except err as error:
print(f"Error: '{error}'")
return connection
def read_query(connection, query):
cur = connection.cursor()
result = None
try:
cur.execute(query)
result = cur.fetchall()
return result
except err as error:
print(f"Error: '{error}'")
return cur
def get_Tables_byName(cursor, tableName):
q1 = f'''
SELECT table_name FROM raw_data.tables
where table_name like '{tableName}'; '''
res = []
cursor.execute(q1)
for row in cursor:
res.append(row[0])
return res
get_Tables_byName(cursor,'data_31942010201')
If I want to call get_Tables_byName function, what should I put in the first parameter? If I put cursor, the error message shows NameError: name 'cursor' is not defined

Best practice to handle a MySQL connection in a long running python script

I currently have a long running python script (multiple days), which occasionally executes queries in a mysql db. The queries are executed from different threads.
I connect to the db as following:
import mysql.connector
import time
class DB():
connection = None
def __init__(self, host, user, password, database):
self.host = host;
self.user = user;
self.password = password;
self.database = database;
self.connect()
def connect(self):
try:
self.connection = mysql.connector.connect(
host = self.host,
user = self.user,
password = self.password,
database = self.database,
autocommit = True,
)
except Exception as exception:
print("[DBCONNECTION] {exception} Retrying in 5 seconds.".format(exception = str(exception)))
time.sleep(5)
self.connect()
db = DB("11.111.11.111", "mydb", "mydb", "mydb")
One of the functions to execute a query:
def getUsersDB(self):
db.connection.ping(reconnect=True)
cursor = db.connection.cursor()
cursor.execute("SELECT * FROM Users")
users = cursor.fetchall()
return users
I believe, that I'm far away from a good practice how I handle the connection. What is the correct way for such a case?
The problem with your approach is that db.connection.ping(reconnect=True) doesn't promise you a live connection.
You can read the function's description here:
https://dev.mysql.com/doc/connector-python/en/connector-python-api-mysqlconnection-ping.html
You can try to use this:
class DB:
connection = None
def __init__(
self,
host: str,
user: str,
password: str,
database: str
) -> None:
self.host = host
self.user = user
self.password = password
self.database = database
self.connection = self.init_conn()
def init_conn(
self,
):
return mysql.connector.connect(
host=self.host,
user=self.user,
password=self.password,
database=self.database,
)
def get_cursor(
self,
):
# This will try to reconnect also.
# In case it fails, you will have to create a new connection
try:
self.connection.ping(
reconnect=True,
attempts=3,
delay=5,
)
except mysql.connector.InterfaceError as err:
self.connection = self.init_conn()
return self.connection.cursor()
And use the DB object like this:
def getUsersDB(self):
cursor = db.get_cursor() # cursor from a live connection
cursor.execute("SELECT * FROM Users")
users = cursor.fetchall()
return users

Auto reconnect postgresq database

I have the following code:
import psycopg2
conn = psycopg2.connect(database="****", user="postgres", password="*****", host="localhost", port="5432")
print ("Opened database successfully")
cur = conn.cursor()
cur.execute('''select * from xyz''')
print ("Table created successfully")
conn.commit()
conn.close()
Like this i have some 50 -60 complex queries, but problem is some times the postgre sql Database throws the below error
Traceback (most recent call last):
File "C:/Users/ruw2/Desktop/SQL.py", line 87, in <module>
cur.execute('''select * from xyz;''')
psycopg2.DatabaseError: server conn crashed?
server closed the connection unexpectedly.
This probably means the server terminated abnormally before or while processing the request.
Looks like the connection is lost, how can i auto connect the Postgre database
Appreciate your help
I would rely on decorators - retry and reconnect:
import psycopg2
from tenacity import retry, wait_exponential, stop_after_attempt
from typing import Callable
def reconnect(f: Callable):
def wrapper(storage: DbStorage, *args, **kwargs):
if not storage.connected():
storage.connect()
try:
return f(storage, *args, **kwargs)
except psycopg2.Error:
storage.close()
raise
return wrapper
class DbStorage:
def __init__(self, conn: string):
self._conn: string = conn
self._connection = None
def connected(self) -> bool:
return self._connection and self._connection.closed == 0
def connect(self):
self.close()
self._connection = psycopg2.connect(self._conn)
def close(self):
if self.connected():
# noinspection PyBroadException
try:
self._connection.close()
except Exception:
pass
self._connection = None
#retry(stop=stop_after_attempt(3), wait=wait_exponential())
#reconnect
def get_data(self): # pass here required params to get data from DB
# ..
cur = self._connection.cursor()
cur.execute('''select * from xyz''')
# ..
Catch the exception and reconnect:
while True:
conn = psycopg2.connect(database="****", user="postgres", password="*****", host="localhost", port="5432")
cur = conn.cursor()
try:
cur.execute('''select * from xyz''')
except psycopg2.OperationalError:
continue
break;

Python uncaught exception when inserting data into mysql database

I have a python AWS lambda function that takes JSON records, checks them to see if they have required keys, and then inserts into a MySQL db (AWS RDS Aurora). The function gets invoked whenever a new record comes into the stream def handler.
At the moment, Lambda is reporting some errors, but when I look at cloudwatch logs I don't see any errors, which leads me to believe that maybe I'm not handling or catching the exception. Can anyone tell me where the issue might be?
from __future__ import print_function
import base64
import json
import pymysql
RDS_HOST = 'host'
DB_USER = 'dummy_user'
DB_PASSWORD = 'password1234'
DB_NAME = 'crazy_name'
DB_TABLE = 'wow_table'
class MYSQL(object):
'''
This a wrapper Class for PyMySQL
'''
CONNECTION_TIMEOUT = 30
def __init__(self, host, user, password, database, table):
self.host = host
self.user = user
self.password = password
self.database = database
self.table = table
self.connection = self.connect()
def connect(self):
'''
Connects to MySQL instance
'''
try:
connection = pymysql.connect(
host=self.host,
user=self.user,
password=self.password,
db=self.database,
connect_timeout=self.CONNECTION_TIMEOUT
)
return connection
except Exception as ex:
print(ex)
print("ERROR: Unexpected error: Could not connect to AuroraDB instance")
def execute(self, account_id, external_ref_id, timestamp):
'''
Executes command given a MySQL connection
'''
with self.connection.cursor() as cursor:
sql = ('INSERT INTO ' +
self.database +
'.' +
self.table +
'(`account_id`, `external_reference_id`, `registration`, `c_name`, `c_id`, `create_date`)' +
' VALUES (%s, %s, DATE_FORMAT(STR_TO_DATE(%s,"%%Y-%%M-%%d %%H:%%i:%%s"),"%%Y-%%m-%%d %%H:%%i:%%s"), %s, %s, current_timestamp())' +
' ON DUPLICATE KEY UPDATE create_date = VALUES(create_date)')
cursor.execute(sql, (
account_id,
external_ref_id,
timestamp,
'bingo',
300)
)
self.connection.commit()
def close_connection(self):
'''
Closes connection to MySQL
'''
self.connection.close()
def get_data_from_kinesis_object(obj):
'''
Retrieves data from kinesis event
'''
return obj['kinesis']['data']
def decode_data(data):
'''
Decodes record via base64
'''
return base64.b64decode(data)
def split_records_into_record(records):
'''
Splits a record of records into an array of records
'''
return records.split('\n')
def parse_record(record):
'''
parses record into JSON
'''
if record:
return json.loads(record)
def is_record_valid(record):
'''
Check for keys in event
returns True if they all exist
and False if they dont all exist
'''
return all(key in record for key in (
'eventName',
'sourceType',
'AccountId',
'Timestamp',
'ExternalReferenceId'
))
def handler(event, context):
"""
This function inserts data into Aurora RDS instance
"""
mysql = MYSQL(RDS_HOST, DB_USER, DB_PASSWORD, DB_NAME, DB_TABLE)
for obj in event['Records']:
records = decode_data(get_data_from_kinesis_object(obj))
split_records = split_records_into_record(records)
for record in split_records:
parsed_record = parse_record(record)
if is_record_valid(parsed_record):
mysql.execute(
parsed_record['AccountId'],
parsed_record['ExternalReferenceId'],
str(parsed_record['Timestamp'])
)
mysql.close_connection()

Login into Server AND MySQL from Python

I'm trying to login to my MySQL server that I'm running on DigitalOcean, but unfortunately I have no clue as to how to push the login through python. I've got the MySQL part implemented, but don't know how to login to the actual server itself (the computer). What other code do I need to add to accomplish this? I've already added the variables mySqlUser and mySqlPassword to the top of the file.
Here is the code I have so far:
import MySQLdb
class Database:
host = 'some ip address'
user = 'root'
password = '123'
mySqlUser = 'root'
mySqlPassword = 'someotherpassword'
db = 'test'
def __init__(self):
self.connection = MySQLdb.connect(self.host, self.user, self.password, self.db)
self.cursor = self.connection.cursor()
def insert(self, query):
try:
self.cursor.execute(query)
self.connection.commit()
except:
self.connection.rollback()
def query(self, query):
cursor = self.connection.cursor( MySQLdb.cursors.DictCursor )
cursor.execute(query)
return cursor.fetchall()
def __del__(self):
self.connection.close()
if __name__ == "__main__":
db = Database()
#CleanUp Operation
del_query = "DELETE FROM basic_python_database"
db.insert(del_query)
# Data Insert into the table
query = """
INSERT INTO basic_python_database
(`name`, `age`)
VALUES
('Mike', 21),
('Michael', 21),
('Imran', 21)
"""
# db.query(query)
db.insert(query)
# Data retrieved from the table
select_query = """
SELECT * FROM basic_python_database
WHERE age = 21
"""
people = db.query(select_query)
for person in people:
print "Found %s " % person['name']
You can Try this:
def __init__(self):
self.host = 'some ip address'
self.user = 'root'
self.password = '123'
self.mySqlUser = 'root'
self.mySqlPassword = 'someotherpassword'
self.connection = MySQLdb.connect(self.host, self.user, self.password, self.db)
self.cursor = self.connection.cursor()
or
def __init__(self):
self.connection = MySQLdb.connect(host, user, password, db)
self.cursor = self.connection.cursor()
and you batter transfer parameter when instantiation you class , instead of fixed values in class.
just a suggest and don't mind my english (:

Categories

Resources