I'm trying to run the below code to connect to a database using python 3.6, I'm not sure I'm using Python correctly, I read in the config file and create and instance of the Dao class and pass the config details back to the parent class of Dao which is Db.
When I then go an try and open and close a connection on the Dao object it says that the dbhost isn't set.
Any help greatly appreciated.
Error
Traceback (most recent call last):
File "parse.py", line 12, in <module>
sources = daoObj.getSourceUrls()
File "E:\classes\Dao.py", line 14, in getSourceUrls
conn = super().open()
File "E:\classes\Db.py", line 22, in open
conn = pymysql.connect(dbhost, dbuser, dbpass, dbname);
NameError: name 'dbhost' is not defined
parse.py
import configparser
from classes.Dao import Dao
# Load configuration settings
config = configparser.ConfigParser()
config.read("./config.ini")
# Create instance of database class
daoObj = Dao(config)
# Test database connection
daoObj.test()
Dao.py
from classes.Db import Db
class Dao(Db):
"""Contains all SQL queries used for database interaction"""
node = None
def __init__(self, config):
"""Default constructor"""
super().__init__(config)
def getSourceUrls(self):
conn = super().open()
super().close(conn)
Db.py
import pymysql
class Db:
"""Database connection class, handles all opening and closing of MySQL database connections."""
dbuser = None
dbpass = None
dbhost = None
dbname = None
def __init__(self, config):
"""Default constructor"""
# Assign the database login credentials
dbuser = config["DB"]["USER"]
dbpass = config["DB"]["PASS"]
dbhost = config["DB"]["HOST"]
dbname = config["DB"]["DATABASE"]
def open(self):
"""Open database connection."""
conn = None
try:
conn = pymysql.connect(dbhost, dbuser, dbpass, dbname);
except pymysql.err.InternalError as e:
print("Error connecting to database.")
return conn
def close(self, conn):
"""Close passed in database connection"""
conn.close()
config.ini
[DB]
USER=username
PASS=password
HOST=127.0.0.l
DATABASE=database
Related
I'm attempting to run an executable, "main.exe", that was built from three python modules (with "main.py" being the main script) using the pyinstaller module. The command that was used to build an executable from the scripts is,
pyinstaller --onefile main.py
This script invokes functions from the "tictactoe_office_release.py" script which establishes connection to a MySQL 8.0.31 server database for performing CRUD operations. When running the executable from the command line, I receive the following string of errors:
Error: 'Authentication plugin 'caching_sha2_password' is not supported'
Traceback (most recent call last):
File "main.py", line 24, in <module>
File "tictactoe_office_release.py", line 42, in __init__
File "mysql_python_lib.py", line 124, in __init__
File "mysql_python_lib.py", line 96, in read_query
AttributeError: 'NoneType' object has no attribute 'cursor'
[25743] Failed to execute script 'main' due to unhandled exception!
It is important to note, however, that my main.py script executes without errors when run outside of the executable. Now, I have troubleshooted the errors using numerous comments from Authentication Plugin 'caching_sha2_password' is not supportedincluding the following
uninstalling 'mysql-connector' and installing 'mysql-connector-python'
2)Setting the 'auth_plugin' parameter to 'mysql_native_password' in the 'mysql.connector.connect()' function calls
3)Modifying the mysql encryption by running
ALTER USER 'root'#'localhost' IDENTIFIED WITH caching_sha2_password BY 'Panther021698';
but am receiving the same error after I re-build and run the executable.
The relevant code in my "tictactoe_office_release.py" module that depicts the function definitions for enabling communication between the Python interpreter and the MySQL server, and database is provided below:
from distutils.util import execute
import mysql.connector
from mysql.connector import Error
from mysql.connector.locales.eng import client_error
class mysql_python_connection:
''' Provide class definition for creating connection to MySQL server,
initializing database, and executing queries '''
def __init__(self):
self.host_name = "localhost"
self.user_name = "root"
self.passwd = "Panther021698"
def create_server_connection(self):
''' This function establishes a connection between Python
interpreter and the MySQL Community Server that we are attempting
to connect to '''
self.connection = None # Close any existing connections
try:
self.connection = mysql.connector.connect(
host = self.host_name,
user = self.user_name,
passwd = self.passwd
)
print("MySQL connection successful")
except Error as err:
print(f"Error: '{err}'")
def create_database(self, query):
''' This function initializes a new database
on the connected MySQL server'''
cursor = self.connection.cursor()
try:
cursor.execute(query)
print("Database created successfully")
except Error as err:
print(f"Error: '{err}'")
def create_db_connection(self, db_name):
''' This function establishes a connection between Python
the MySQL Community Server and a database that we
are initializing on the server '''
self.connection = None # Close any existing connections
self.db_name = db_name
try:
self.connection = mysql.connector.connect(
host = self.host_name,
user = self.user_name,
passwd = self.passwd,
database = self.db_name
)
print("MySQL Database connection successful")
except Error as err:
print(f"Error: '{err}'")
def execute_query(self, query):
''' This function takes SQL queries stored
in Python as strings and passes them
to the "cursor.execute()" method to
execute them on the server'''
cursor = self.connection.cursor()
try:
cursor.execute(query)
self.connection.commit() # Implements commands detailed in SQL queries
print(query + "Query successful")
except Error as err:
print(f"Error: '{err}'")
def read_query(self,query):
''' This function reads and returns data from
a MySQL database using the specified query '''
cursor = self.connection.cursor()
print("cursor datatype is ")
print(type(cursor))
#result = None
try:
cursor.execute(query)
result = cursor.fetchall()
return result
except Error as err:
print(f"Error: '{err}'")
Additionally, my MySQL environment variables are provided in the image below.
I'm getting following error
Traceback (most recent call last):
File "/databricks/spark/python/pyspark/serializers.py", line 473, in dumps
return cloudpickle.dumps(obj, pickle_protocol)
File "/databricks/spark/python/pyspark/cloudpickle/cloudpickle_fast.py", line 73, in dumps
cp.dump(obj)
File "/databricks/spark/python/pyspark/cloudpickle/cloudpickle_fast.py", line 563, in dump
return Pickler.dump(self, obj)
TypeError: cannot pickle 'psycopg2.extensions.cursor' object
PicklingError: Could not serialize object: TypeError: cannot pickle 'psycopg2.extensions.cursor' object
while running the below script
def get_connection():
conn_props = brConnect.value
print(conn_props)
#extract value from broadcast variables
database = conn_props.get("database")
user = conn_props.get("user")
pwd = conn_props.get("password")
host = conn_props.get("host")
db_conn = psycopg2.connect(
host = host,
user = user,
password = pwd,
database = database,
port = 5432
)
return db_conn
def process_partition_up(partition, db_cur):
updated_rows = 0
try:
for row in partition:
process_row(row, myq, db_cur)
except Exception as e:
print("Not connected")
return updated_rows
def update_final(df, db_cur):
df.rdd.coalesce(2).foreachPartition(lambda x: process_partition_up(x, db_cur))
def etl_process():
for id in ['003']:
conn = get_connection()
for t in ['email_table']:
query = f'''(select * from public.{t} where id= '{id}') as tab'''
df_updated = load_data(query)
if df_updated.count() > 0:
q1 = insert_ops(df_updated, t) #assume this function returns a insert query
query_props = q1
sc = spark.sparkContext
brConnectQ = sc.broadcast(query_props)
db_conn = get_connection()
db_cur = db_conn.cursor()
update_final(df_updated, db_cur)
conn.commit()
conn.close()
Explanation:
Here etl_process() internally calling get_connection() which returns a psycopg2 connection object. After that it's calling a update_final() which takes dataframe and psycopg2 cursor object as an arguments.
Now update_final() is calling process_partition_up() on each partition(df.rdd.coalesce(2).foreachPartition) which takes dataframe and psycopg2 cursor object as an arguments.
Here after passing psycopg2 cursor object to the process_partition_up(), I'm not getting cursor object rather I'm getting above error.
Can anyone help me out to resolve this error?
Thank you.
I think that you don't understand what's happening here.
You are creating a database connection in your driver(etl_process), and then trying to ship that live connection from the driver, across your network to executor to do the work.(your lambda in foreachPartitions is executed on the executor.)
That is what spark is telling you "cannot pickle 'psycopg2.extensions.cursor'". (It can't serialize your live connection to the database to ship it to an executor.)
You need to call conn = get_connection() from inside process_partition_up this will initialize the connection to the database from inside the executor.(And any other book keeping you need to do.)
FYI: The worst part that I want to call out is that this code will work on your local machine. This is because it's both the executor and the driver.
I am really confused by writing a config file for connecting python to SQL Server specifically using pyodbc. I have wrote a class which connects to the database. However my boss keeps yelling at me the connection should be in a config file and I'm struggling with how to do it and even understand it. So far my code is below. As you can tell I'm new to coding so can someone please help me understand the purpose of a config file and help me with the process?
import pyodbc
import sqlalchemy as sa
import urllib
import pandas as pd
class SQL_Database:
def __init__(self, database, driver='SQL Server', server='.\TEST_SERVER'):
self.driver = driver
self.server = server
self.database = database
def create_server_connection(self):
connection = None
try:
connection = pyodbc.connect(f'Driver={self.driver};'
f'Server={self.server};'
f'Database={self.database};'
'Trusted_Connection=yes;')
print("MySQL Database connection successful")
except pyodbc.Error as err:
print("Connection failed")
return connection
conn = SQL_Database(database='index_changes').create_server_connection()
Here's an example for loading the values from a json file.
Create a config file named config.json.
{
"driver": "DriverName",
"server": "ServerName",
"database": "DatabaseName"
}
Read in the config parameters in your class.
import pyodbc
import json
class SQL_Database():
def __init__(self):
with open('path/to/config.json','r') as fh:
config = json.load(fh)
self.driver = config['driver']
self.server = config['server']
self.database = config['database']
connection = pyodbc.connect(
f'Driver={self.driver};'
f'Server={self.server};'
f'Database={self.database};'
)
SQL_Database()
I want to create a query timeout in sqlalchemy. I have an oracle database.
I have tried following code:
import sqlalchemy
engine = sqlalchemy.create_engine('oracle://db', connect_args={'querytimeout': 10})
I got following error:
TypeError: 'querytimeout' is an invalid keyword argument for this function
I would like a solution looking like:
connection.execute('query').set_timeout(10)
Maybe it is possible to set timeout in sql query? I found how to do it in pl/sql, but i need just sql.
How could i set a query timeout?
The only way how you can set connection timeout for the Oracle engine from the Sqlalchemy is create and configure the sqlnet.ora
Linux
Create file sqlnet.ora in folder
/opt/oracle/instantclient_19_9/network/admin
Windows
For windows please create such folder as \network\admin
C:\oracle\instantclient_19_9\network\admin
Example sqlnet.ora file
SQLNET.INBOUND.CONNECT_TIMEOUT = 120
SQLNET.SEND_TIMEOUT = 120
SQLNET.RECV_TIMEOUT = 120
More parameters you can find here https://docs.oracle.com/cd/E11882_01/network.112/e10835/sqlnet.htm
The way to do it in Oracle is via resource manager. Have a look here
timeout decorator
Get your session handle as you normally would. (Notice that the session has not actually connected yet.) Then, test the session in a function that is decorated with wrapt_timeout_decorator.timeout.
#!/usr/bin/env python3
from time import time
from cx_Oracle import makedsn
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import text
from wrapt_timeout_decorator import timeout
class ConnectionTimedOut(Exception):
pass
class Blog:
def __init__(self):
self.port = None
def connect(self, connection_timeout):
#timeout(connection_timeout, timeout_exception=ConnectionTimedOut)
def test_session(session):
session.execute(text('select dummy from dual'))
session = sessionmaker(bind=self.engine())()
test_session(session)
return session
def engine(self):
return create_engine(
self.connection_string(),
max_identifier_length=128
)
def connection_string(self):
driver = 'oracle'
username = 'USR'
password = 'solarwinds123'
return '%s://%s:%s#%s' % (
driver,
username,
password,
self.dsn()
)
def dsn(self):
host = 'hn.com'
dbname = 'ORCL'
print('port: %s expected: %s' % (
self.port,
'success' if self.port == 1530 else 'timeout'
))
return makedsn(host, self.port, dbname)
def run(self):
self.port = 1530
session = self.connect(connection_timeout=4)
for r in session.execute(text('select status from v$instance')):
print(r.status)
self.port = 1520
session = self.connect(connection_timeout=4)
for r in session.execute(text('select status from v$instance')):
print(r.status)
if __name__ == '__main__':
Blog().run()
In this example, the network is firewalled with port 1530 open. Port 1520 is blocked and leads to a TCP connection timeout. Output:
port: 1530 expected: success
OPEN
port: 1520 expected: timeout
Traceback (most recent call last):
File "./blog.py", line 68, in <module>
Blog().run()
File "./blog.py", line 62, in run
session = self.connect(connection_timeout=4)
File "./blog.py", line 27, in connect
test_session(session)
File "/home/exagriddba/lib/python3.8/site-packages/wrapt_timeout_decorator/wrapt_timeout_decorator.py", line 123, in wrapper
return wrapped_with_timeout(wrap_helper)
File "/home/exagriddba/lib/python3.8/site-packages/wrapt_timeout_decorator/wrapt_timeout_decorator.py", line 131, in wrapped_with_timeout
return wrapped_with_timeout_process(wrap_helper)
File "/home/exagriddba/lib/python3.8/site-packages/wrapt_timeout_decorator/wrapt_timeout_decorator.py", line 145, in wrapped_with_timeout_process
return timeout_wrapper()
File "/home/exagriddba/lib/python3.8/site-packages/wrapt_timeout_decorator/wrap_function_multiprocess.py", line 43, in __call__
self.cancel()
File "/home/exagriddba/lib/python3.8/site-packages/wrapt_timeout_decorator/wrap_function_multiprocess.py", line 51, in cancel
raise_exception(self.wrap_helper.timeout_exception, self.wrap_helper.exception_message)
File "/home/exagriddba/lib/python3.8/site-packages/wrapt_timeout_decorator/wrap_helper.py", line 178, in raise_exception
raise exception(exception_message)
__main__.ConnectionTimedOut: Function test_session timed out after 4.0 seconds
Caution
Do not decorate the function that calls sessionmaker, or you will get:
_pickle.PicklingError: Can't pickle <class 'sqlalchemy.orm.session.Session'>: it's not the same object as sqlalchemy.orm.session.Session
SCAN
This implementation is a "connection timeout" without regard to underlying cause. The client could time out before trying all available SCAN listeners.
I have config.ini:
[mysql]
host=localhost
port=3306
user=root
passwd=abcdefgh
db=testdb
unix_socket=/opt/lampp/var/mysql/mysql.sock
I have this class:
#!/usr/bin/python
import MySQLdb,ConfigParser
config = ConfigParser.ConfigParser()
config.read("config.ini")
class MySQL( object ):
def __init__( self ):
self.host = config.get("mysql","host")
self.port = config.get("mysql","port")
self.user = config.get("mysql","user")
self.passwd = config.get("mysql","passwd")
self.db = config.get("mysql","db")
self.unix_socket = config.get("mysql","unix_socket")
self.conn = MySQLdb.Connect(self.host,
self.port,
self.user,
self.passwd,
self.db,
self.unix_socket)
self.cursor = self.conn.cursor ( MySQLdb.cursors.DictCursor )
def __del__( self ):
self.cursor.close()
self.conn.close()
and this:
#!/usr/bin/env python
from mysql import MySQL
class Incident( MySQL ):
def getIncidents( self ):
self.cursor.execute("""*VALID QUERY*""")
return self.cursor.fetchall()
and finally this:
import subprocess, os, alarm
from Queue import Queue
from incident_model import Incident
fileQueue = Queue()
def enumerateFilesPath():
global fileQueue
incident = Incident()
incidents = incident.getIncidents()
for i in incidents:
fileQueue.put("MD5")
def main():
global fileQueue
enumerateFilesPath()
Output:
Traceback (most recent call last):
File "./mwmonitor.py", line 202, in
main() File "./mwmonitor.py", line 184, in main
enumerateFilesPath() File "./mwmonitor.py", line 86, in
enumerateFilesPath
incident = Incident() File "/usr/share/mwanalysis/core/mysql.py",
line 23, in init
self.unix_socket) File "/usr/lib/pymodules/python2.6/MySQLdb/init.py",
line 81, in Connect
return Connection(*args, **kwargs) File
"/usr/lib/pymodules/python2.6/MySQLdb/connections.py",
line 170, in init
super(Connection, self).init(*args, **kwargs2)
TypeError: an integer is required
Exception AttributeError: "'Incident'
object has no attribute 'cursor'" in
0xa03d46c>> ignored
If someone can help detect and correct the error would greatly appreciate.
Thanks in advance.
Your __del__ method is causing confusion. Specifically, it refers to self.cursor and self.conn which may never get created if, for example, MySQLdb.Connect raises an exception (which is what seems to happen).
I suggest you modify your class as follows:
class MySQL( object ):
def __init__( self ):
self.conn = None
self.cursor = None
self.host = config.get("mysql","host")
self.port = config.get("mysql","port")
self.user = config.get("mysql","user")
self.passwd = config.get("mysql","passwd")
self.db = config.get("mysql","db")
self.unix_socket = config.get("mysql","unix_socket")
self.conn = MySQLdb.Connect(self.host,
self.port,
self.user,
self.passwd,
self.db,
self.unix_socket)
self.cursor = self.conn.cursor ( MySQLdb.cursors.DictCursor )
def __del__( self ):
if self.cursor is not None:
self.cursor.close()
if self.conn is not None:
self.conn.close()
This won't solve the problem, but should give better diagnostic.
Now to the actual problem that you're experiencing. I strongly suspect that you're supplying the arguments to Connect in the wrong order, or the types aren't quite right, or something along those lines. To quote the docstring for Connection.__init__:
Create a connection to the database. It is strongly recommended
that you only use keyword parameters. Consult the MySQL C API
documentation for more information.
host
string, host to connect
user
string, user to connect as
passwd
string, password to use
db
string, database to use
port
integer, TCP/IP port to connect to
unix_socket
string, location of unix_socket to use
...
"It is strongly that you only use keyword parameters." I recommend that you do just that when you call MySQLdb.Connect. Also, make sure that port is an int and not a string.
I suspect it's expecting port to be an integer rather than a string. Try:
self.port = int(config.get("mysql","port"))
I am not sure if this is a connectivity error. Have you checked the type of the incident_model ?
TypeError: an integer is required
Exception AttributeError: "'Incident'object has no attribute 'cursor'" in