Python idiom for 'Try until no exception is raised' - python

I want my code to automatically try multiple ways to create a database connection. As soon as one works, the code needs to move on (i.e. it shouldn't try to other ways anymore). If they all fail well, then the script can just blow up.
So in - what I thought was, but most likely isn't - a stroke of genius I tried this:
import psycopg2
from getpass import getpass
# ouch, global variable, ooh well, it's just a simple script eh
CURSOR = None
def get_cursor():
"""Create database connection and return standard cursor."""
global CURSOR
if not CURSOR:
# try to connect and get a cursor
try:
# first try the bog standard way: db postgres, user postgres and local socket
conn = psycopg2.connect(database='postgres', user='postgres')
except psycopg2.OperationalError:
# maybe user pgsql?
conn = psycopg2.connect(database='postgres', user='pgsql')
except psycopg2.OperationalError:
# maybe it was postgres, but on localhost? prolly need password then
conn = psycopg2.connect(database='postgres', user='postgres', host='localhost', password=getpass())
except psycopg2.OperationalError:
# or maybe it was pgsql and on localhost
conn = psycopg2.connect(database='postgres', user='pgsql', host='localhost', password=getpass())
# allright, nothing blew up, so we have a connection
# now make a cursor
CURSOR = conn.cursor()
# return existing or new cursor
return CURSOR
But it seems that the second and subsequent except statements aren't catching the OperationalErrors anymore. Probably because Python only catches an exception once in a try...except statement?
Is that so? If not: is there anything else I'm doing wrong? If so: how do you do something like this then? Is there a standard idiom?
(I know there are ways around this problem, like having the user specify the connection parameters on the command line, but that's not my question ok :) )
EDIT:
I accepted retracile's excellent answer and I took in gnibbler's comment for using the for..else construct. The final code became (sorry, I'm not really following the max characters per line limit from pep8):
EDIT 2: As you can see from the comment on the Cursor class: I don't really know how to call this kind of class. It's not really a singleton (I can have multiple different instances of Cursor) but when calling get_cursor I do get the same cursor object everytime. So it's like a singleton factory? :)
import psycopg2
from getpass import getpass
import sys
class UnableToConnectError(Exception):
pass
class Cursor:
"""Cursor singleton factory?"""
def __init__(self):
self.CURSOR = None
def __call__(self):
if self.CURSOR is None:
# try to connect and get a cursor
attempts = [
{'database': 'postgres', 'user': 'postgres'},
{'database': 'postgres', 'user': 'pgsql'},
{'database': 'postgres', 'user': 'postgres', 'host': 'localhost', 'password': None},
{'database': 'postgres', 'user': 'pgsql', 'host': 'localhost', 'password': None},
]
for attempt in attempts:
if 'password' in attempt:
attempt['password'] = getpass(stream=sys.stderr) # tty and stderr are default in 2.6, but 2.5 uses sys.stdout, which I don't want
try:
conn = psycopg2.connect(**attempt)
attempt.pop('password', None)
sys.stderr.write("Succesfully connected using: %s\n\n" % attempt)
break # no exception raised, we have a connection, break out of for loop
except psycopg2.OperationalError:
pass
else:
raise UnableToConnectError("Unable to connect: exhausted standard permutations of connection dsn.")
# allright, nothing blew up, so we have a connection
# now make a cursor
self.CURSOR = conn.cursor()
# return existing or new cursor
return self.CURSOR
get_cursor = Cursor()

Approximately:
attempts = [
{ 'database'='postgres', 'user'='pgsql', ...},
{ 'database'='postgres', 'user'='postgres', 'host'='localhost', 'password'=getpass()},
...
]
conn = None
for attempt in attempts:
try:
conn = psycopg2.connect(**attempt)
break
except psycopg2.OperationalError:
pass
if conn is None:
raise a ruckus
CURSOR = conn.cursor()
Now, if you don't want to call getpass() unless it is necessary, you'd want to check if 'password' in attempt: attempt['password'] = getpass() or so.
Now about that global....
class MyCursor:
def __init__(self):
self.CURSOR = None
def __call__(self):
if self.CURSOR is None:
<insert logic here>
return self.CURSOR
get_cursor = MyCursor()
... though I think there are a couple of other ways to accomplish the same thing.
Bringing it all together:
class MyCursor:
def __init__(self):
self.CURSOR = None
def __call__(self):
if self.CURSOR is None:
attempts = [
{'database'='postgres', 'user'='postgres'},
{'database'='postgres', 'user'='pgsql'},
{'database'='postgres', 'user'='postgres', 'host'='localhost', 'password'=True},
{'database'='postgres', 'user'='pgsql', 'host'='localhost', 'password'=True},
]
conn = None
for attempt in attempts:
if 'password' in attempt:
attempt['password'] = getpass()
try:
conn = psycopg2.connect(**attempt)
break # that didn't throw an exception, we're done
except psycopg2.OperationalError:
pass
if conn is None:
raise a ruckus # nothin' worked
self.CURSOR = conn.cursor()
return self.CURSOR
get_cursor = MyCursor()
Note: completely untested

You're close. Probably the best thing to do in this case is nesting the second and subsequent attempts in the except block. Thus the critical part of your code would look like:
if not CURSOR:
# try to connect and get a cursor
try:
# first try the bog standard way: db postgres, user postgres and local socket
conn = psycopg2.connect(database='postgres', user='postgres')
except psycopg2.OperationalError:
# maybe user pgsql?
try:
conn = psycopg2.connect(database='postgres', user='pgsql')
except psycopg2.OperationalError:
# maybe it was postgres, but on localhost? prolly need password then
try:
conn = psycopg2.connect(database='postgres', user='postgres', host='localhost', password=getpass())
except psycopg2.OperationalError:
# or maybe it was pgsql and on localhost
conn = psycopg2.connect(database='postgres', user='pgsql', host='localhost', password=getpass())

Related

What is the right way to design Python / SQLite application without repeating connection part?

im trying to develop a simple application where multiple business objects are saved to the sqlite database. I have several business objects for which I want to program save/update/delete methods. For each of the classes and methods within those classes I always create a new connection. For example:
import sqlite3
db = "mydb.db"
class BusinessObject1:
...
def save_to_db(self, db):
conn = sqlite3.connect(db)
cur = conn.cursor()
with conn:
cur.execute("...")
def delete_from_db(self, db):
conn = sqlite3.connect(db)
cur = conn.cursor()
with conn:
cur.execute("...")
class BusinessObject2:
...
def save_to_db(self, db):
conn = sqlite3.connect(db)
cur = conn.cursor()
with conn:
cur.execute("...")
def delete_from_db(self, db):
conn = sqlite3.connect(db)
cur = conn.cursor()
with conn:
cur.execute("...")
It doesn't feel like a good design solution (not DRY). Can someone propose a better design for this? I have around 20 Business objects and 4-8 methods for each and a record table where all these objects are included. Typing conn=sqlite3.connect(db) every time simply cannot be the right way. And if I'll decide to change to MySQL or PostgreSQL I would need to refactor the whole project.
Thanks!
Then you might want to separate the connection handler from the class object or you can do as the comment above suggesting to put the connection inside the class constructor.
If for some reason you can't do both you can still add a staticmethod to your class that will handle the connection. It's not ideal but it's still better than your solution:
#staticmethod
def create_connection(db_file):
conn = None
try:
with sqlite3.connect(db_file) as conn:
return conn
except sqlite3.Error as e:
print("Can't connect to database, error:", e, sep="\n")
return conn
Then you add a parameter to your other methods which will be the returned create_connection method:
def save_to_db(self, connection):
cur = connection.cursor()
cur.execute("...")
This way you can separate your connect object from your class object:
if __name__ == "__main__":
conn = BusinessObject1.create_connection(db)
b1 = BusinessObject1(...)
b1.save_to_db(conn)

How to avoid writing "MySQLdb.connect" again and again?

I wonder if there is a way to avoid writing MySQLdb.connect(host='localhost', user='xyz', password='xyz', db='xyz')
again and again in Python?
Example:
def Add_New_User(self):
self.db = MySQLdb.connect(host='localhost', user='xyz', password='xyz', db='xyz')
self.cur = self.db.cursor()
after two or three lines (when new def is required and a lot of def is required), I need to write all over again the same string many times
def Add_New_User(self):
self.db = MySQLdb.connect(host='localhost', user='xyz', password='xyz', db='xyz')
self.cur = self.db.cursor()
I hope that there is a way to call it in a simple way, like a connection code written and saved in, e.g., MyConString.py then in the new def say, index.py, I would simply call the function MyConString.
Create a function called connection:
def conn(database):
import mysql.connector
connection = mysql.connector.connect(host='localhost',
database=database,
user='root',
password='yourpass')
cursor = connection.cursor(buffered=True)
return connection, cursor
Now, whenever you have to connect in any function do this:
def insert():
connection_data = conn("mydatabase")
connection = connection_data[0]
cursor = connection_data[1]
cursor.execute("show tables;")
connection.commit()
connection.close()
You can even make an entirely new script for functions that are repeatedly used to keep your main file clean.
Then you can just add import connections.py(say, that is your file name which contains conn() function)
# This is your main.py
import connections
def insert():
connection_data = connections.conn("mydatabase")
connection = connection_data[0]
cursor = connection_data[1]
cursor.execute("show tables;")
connection.commit()
connection.close()
Hope this helped!

"AttributeError: __exit__" when importing data in SQL database

I am currently following alongside a book (cpp for quantitative finance) and I am trying to import the symbols for S&P500 from wiki into a sql database I've created. However, I am getting the AttributeError: exit with regards to my "with con" statement (see below). I have read posts from similar errors but I cannot seem to fix mine. I am extremely new to python so perhaps there is some fundamental misunderstanding on my part. I have included the relevant code below, any advice would be hugely appreciated.
"""
Insert the S&P500 symbols into the MySQL database.
"""
# Connect to the MySQL instance
db_host = 'localhost'
db_user = 'sec_user'
db_pass = 'database_password'
db_name = 'database_name'
con = mdb.connect(
host=db_host, user=db_user, passwd=db_pass, db=db_name
)
# Create the insert strings
column_str = """ticker, instrument, name, sector,
currency, created_date, last_updated_date
"""
insert_str = ("%s, " * 7)[:-2]
final_str = "INSERT INTO symbol (%s) VALUES (%s)" % \
(column_str, insert_str)
# Using the MySQL connection, carry out
# an INSERT INTO for every symbol
with con:
cur = con.cursor()
cur.executemany(final_str, symbols)
if __name__ == "__main__":
symbols = obtain_parse_wiki_snp500()
insert_snp500_symbols(symbols)
print("%s symbols were successfully added." % len(symbols))
The error is telling you that the object returned by mdb.connect is not a context manager, that is it cannot be used in a with statement. You'll need to close the connection manually once you've finished with it (con.close()) or use a package that provides a connection that is a context manager.
A quick study of commonly used connectors suggests you want to use pymysql
>>> import MySQLdb
>>> import mysql.connector
>>> import pymysql
>>> params = {'host': 'localhost', 'user': 'root', 'password': '', 'database': 'test'}
>>> for pkg in (MySQLdb, mysql.connector, pymysql):
... conn = pkg.connect(**params)
... try:
... with conn:
... pass
... except AttributeError as ex:
... print(pkg.__name__, 'failed with', ex)
...
MySQLdb failed with __enter__
mysql.connector failed with __enter__
If you have to use a connection that is not a context manager, you can emulate it in a try/except/finally suite:
import MySQLdb
conn = MySQLdb.connect(host='localhost', user='root', password='', database='test')
try:
cursor = conn.cursor()
cursor.execute('SELECT * FROM my_table;')
for row in cursor.fetchall():
print(row)
cursor.close()
conn.commit()
except:
# log the error here
conn.rollback()
finally:
conn.close()
Or you can make your own context manager using the tools provided in contextlib:
import contextlib
import MySQLdb
#contextlib.contextmanager
def managed_connection(conn):
try:
yield
conn.commit()
except:
# log the error here
conn.rollback()
finally:
conn.close()
conn = MySQLdb.connect(host='localhost', user='root', password='', database='test')
with managed_connection(conn) as mc:
cursor = mc.cursor()
cursor.execute('SELECT * FROM my_table;')
for row in cursor.fetchall():
print(row)
cursor.close()
(You can make a cursor context manager too, or have the context manager yield a cursor rather than the connection).

python try/except in loop

Im using python 3 and i have this loop, which iterate on this list -
self.settings["db"]["host"] = ["db-0", "db-1"]
My problem is that it seems to send in return self.conn the first option all the time,
db-0 instead of trying with db-1
I have 2 db container servers, and when i stop one of them - for example db-0 it should try psycopg2.connect with db-1
def db_conn(self):
try:
for server in self.settings["db"]["host"]:
self.conn = psycopg2.connect(
host=server,
user=self.settings["db"]["user"],
password=self.settings["db"]["password"],
database=self.settings["db"]["database"],
connect_timeout=5,
)
return self.conn
except Exception:
pass
if loop has not succeeded i dont want it to return self.conn, only if the try worked.
I also tried :
def db_conn(self):
try:
for server in self.settings["db"]["host"]:
self.conn = psycopg2.connect(
host=server,
user=self.settings["db"]["user"],
password=self.settings["db"]["password"],
database=self.settings["db"]["database"],
connect_timeout=5,
)
except Exception:
pass
return self.conn
You are looping within a try.
Do it the other way around,
push the try down within the loop.
The DbC contract the current code is attempting to offer
is to return a valid connection.
Let's make that more explicit, by writing a very simple helper.
We will spell out its contract in a docstring.
def first_true(iterable, default=False, pred=None):
# from https://docs.python.org/3/library/itertools.html
return next(filter(pred, iterable), default)
def get_conn_or_none(self, server):
"""Returns DB connection to server, or None if that's not possible."""
try:
return psycopg2.connect(
host=server,
user=self.settings["db"]["user"],
password=self.settings["db"]["password"],
database=self.settings["db"]["database"],
connect_timeout=5,
)
except Exception:
return None
Now db_conn is simply:
def db_conn(self):
return first_true(map(self.get_conn_or_none, self.settings["db"]["host"]))
That uses the same logic as in your question.
You may want to have db_conn additionally check whether
all connection attempts failed, and raise fatal error in that case.
BTW, it's very odd that you're apparently storing a list of server hostnames
in self.settings["db"]["host"], given that an individual user / pw / db is
stored in the other fields.
Consider renaming that list to self.servers.
You can try to check the connection status before returning it:
def db_conn(self):
try:
for server in self.settings["db"]["host"]:
self.conn = psycopg2.connect(
host=server,
user=self.settings["db"]["user"],
password=self.settings["db"]["password"],
database=self.settings["db"]["database"],
connect_timeout=5,
)
**if self.conn.isOk()**
return self.conn
except Exception:
pass
This code worked for me:
def db_conn(self):
for server in self.settings["db"]["host"]:
try:
self.print_info("TRYING", server)
self.conn = psycopg2.connect(
host=server,
user=self.settings["db"]["user"],
password=self.settings["db"]["password"],
database=self.settings["db"]["database"],
)
except:
self.print_info("SERVER DOWN", server)
continue
return self.conn
continue will continue the rest of the code if i get exception ( failed connection)
then it goes back to for loop with the second item in list.

How to enable MySQL client auto re-connect with MySQLdb?

I came across PHP way of doing the trick:
my_bool reconnect = 1;
mysql_options(&mysql, MYSQL_OPT_RECONNECT, &reconnect);
but no luck with MySQLdb (python-mysql).
Can anybody please give a clue? Thanks.
I solved this problem by creating a function that wraps the cursor.execute() method since that's what was throwing the MySQLdb.OperationalError exception. The other example above implies that it is the conn.cursor() method that throws this exception.
import MySQLdb
class DB:
conn = None
def connect(self):
self.conn = MySQLdb.connect()
def query(self, sql):
try:
cursor = self.conn.cursor()
cursor.execute(sql)
except (AttributeError, MySQLdb.OperationalError):
self.connect()
cursor = self.conn.cursor()
cursor.execute(sql)
return cursor
db = DB()
sql = "SELECT * FROM foo"
cur = db.query(sql)
# wait a long time for the Mysql connection to timeout
cur = db.query(sql)
# still works
I had problems with the proposed solution because it didn't catch the exception. I am not sure why.
I have solved the problem with the ping(True) statement which I think is neater:
import MySQLdb
con=MySQLdb.Connect()
con.ping(True)
cur=con.cursor()
Got it from here: http://www.neotitans.com/resources/python/mysql-python-connection-error-2006.html
If you are using ubuntu Linux there was a patch added to the python-mysql package that added the ability to set that same MYSQL_OPT_RECONNECT option (see here). I have not tried it though.
Unfortunately, the patch was later removed due to a conflict with autoconnect and transations (described here).
The comments from that page say:
1.2.2-7 Published in intrepid-release on 2008-06-19
python-mysqldb (1.2.2-7) unstable; urgency=low
[ Sandro Tosi ]
* debian/control
- list items lines in description starts with 2 space, to avoid reformat
on webpages (Closes: #480341)
[ Bernd Zeimetz ]
* debian/patches/02_reconnect.dpatch:
- Dropping patch:
Comment in Storm which explains the problem:
# Here is another sad story about bad transactional behavior. MySQL
# offers a feature to automatically reconnect dropped connections.
# What sounds like a dream, is actually a nightmare for anyone who
# is dealing with transactions. When a reconnection happens, the
# currently running transaction is transparently rolled back, and
# everything that was being done is lost, without notice. Not only
# that, but the connection may be put back in AUTOCOMMIT mode, even
# when that's not the default MySQLdb behavior. The MySQL developers
# quickly understood that this is a terrible idea, and removed the
# behavior in MySQL 5.0.3. Unfortunately, Debian and Ubuntu still
# have a patch right now which *reenables* that behavior by default
# even past version 5.0.3.
I needed a solution that works similarly to Garret's, but for cursor.execute(), as I want to let MySQLdb handle all escaping duties for me. The wrapper module ended up looking like this (usage below):
#!/usr/bin/env python
import MySQLdb
class DisconnectSafeCursor(object):
db = None
cursor = None
def __init__(self, db, cursor):
self.db = db
self.cursor = cursor
def close(self):
self.cursor.close()
def execute(self, *args, **kwargs):
try:
return self.cursor.execute(*args, **kwargs)
except MySQLdb.OperationalError:
self.db.reconnect()
self.cursor = self.db.cursor()
return self.cursor.execute(*args, **kwargs)
def fetchone(self):
return self.cursor.fetchone()
def fetchall(self):
return self.cursor.fetchall()
class DisconnectSafeConnection(object):
connect_args = None
connect_kwargs = None
conn = None
def __init__(self, *args, **kwargs):
self.connect_args = args
self.connect_kwargs = kwargs
self.reconnect()
def reconnect(self):
self.conn = MySQLdb.connect(*self.connect_args, **self.connect_kwargs)
def cursor(self, *args, **kwargs):
cur = self.conn.cursor(*args, **kwargs)
return DisconnectSafeCursor(self, cur)
def commit(self):
self.conn.commit()
def rollback(self):
self.conn.rollback()
disconnectSafeConnect = DisconnectSafeConnection
Using it is trivial, only the initial connect differs. Extend the classes with wrapper methods as per your MySQLdb needs.
import mydb
db = mydb.disconnectSafeConnect()
# ... use as a regular MySQLdb.connections.Connection object
cursor = db.cursor()
# no more "2006: MySQL server has gone away" exceptions now
cursor.execute("SELECT * FROM foo WHERE bar=%s", ("baz",))
you can separate the commit and the close for the connection...that's not cute but it does it.
class SqlManager(object):
"""
Class that handle the database operation
"""
def __init__(self,server, database, username, pswd):
self.server = server
self.dataBase = database
self.userID = username
self.password = pswd
def Close_Transation(self):
"""
Commit the SQL Query
"""
try:
self.conn.commit()
except Sql.Error, e:
print "-- reading SQL Error %d: %s" % (e.args[0], e.args[1])
def Close_db(self):
try:
self.conn.close()
except Sql.Error, e:
print "-- reading SQL Error %d: %s" % (e.args[0], e.args[1])
def __del__(self):
print "close connection with database.."
self.conn.close()
I had a similar problem with MySQL and Python, and the solution that worked for me was to upgrade MySQL to 5.0.27 (on Fedora Core 6; your system may work fine with a different version).
I tried a lot of other things, including patching the Python libraries, but upgrading the database was a lot easier and (I think) a better decision.
In addition to Liviu Chircu solution ... add the following method to DisconnectSafeCursor:
def __getattr__(self, name):
return getattr(self.cursor, name)
and the original cursor properties like "lastrowid" will keep working.
You other bet it to work around dropped connections yourself with code.
One way to do it would be the following:
import MySQLdb
class DB:
conn = None
def connect(self):
self.conn = MySQLdb.connect()
def cursor(self):
try:
return self.conn.cursor()
except (AttributeError, MySQLdb.OperationalError):
self.connect()
return self.conn.cursor()
db = DB()
cur = db.cursor()
# wait a long time for the Mysql connection to timeout
cur = db.cursor()
# still works

Categories

Resources