Get PostGIS table list with PyQGIS 3.10 and newer - python

I have to load some PostGIS layers with PyQGIS to QGIS projects. Until Version 3.8 of QGIS I had a working solution. The Connection is made with this code:
from qgis.core import QgsDataSourceUri, QgsVectorLayer, QgsDataSourceUri
import re
import time
import db_manager.db_plugins.postgis.connector as con
...
class PgConnection:
def __init__(self, host, dbname, port):
self.uri = QgsDataSourceUri()
self.host = host
self.dbname = dbname
self.port = port
self.authcfg = None
self.user = ''
self.passwd = ''
settings = QgsSettings()
settings.beginGroup('PostgreSQL/connections')
for connectionName in settings.childGroups():
if settings.value(f'{connectionName}/host') == host and \
settings.value(f'{connectionName}/database') == dbname:
self.authcfg = settings.value(f'{connectionName}/authcfg')
break
if self.authcfg is None:
self.uri.setConnection(self.host, port, self.dbname, None, None)
connInfo = self.uri.connectionInfo()
(success, user, passwd) = QgsCredentials.instance().get(connInfo, None, None)
if success:
self.uri.setPassword(passwd)
self.uri.setUsername(user)
else:
self.uri.setConnection(self.host, self.port, self.dbname, None, None, authConfigId=self.authcfg)
Now I need to get all tables from a specific schema. Until QGIS version 3.8 I used following code for this (in the same class):
def getPgTableNames(self, schema):
tablenames = con.PostGisDBConnector(self.uri)
return tablenames.getTables(schema)
Since version 3.10 this is not longer working as con.PostGisDBConnector(self.uri) throws an error. What is the correct way to get the tablenames?

It took a while to find my error. The above code does not establish a connection to the database. I did this later when adding layers to the project. However i had to add following line:
self.conn = QgsProviderRegistry.instance().providerMetadata('postgres').createConnection(self.uri.uri(),{})
the function to return the tablelist is now:
def getPgTableNames(self):
tablenames = self.conn.tables(self.schema)
return tablenames
And the complete code:
from qgis.core import QgsDataSourceUri, QgsVectorLayer, QgsProviderRegistry, QgsProviderMetadata
import re
import time
#Klassendefinitionen
class PgConnection:
def __init__(self, host, dbname, port, schema):
self.uri = QgsDataSourceUri()
self.host = host
self.dbname = dbname
self.port = port
self.schema = schema
self.authcfg = None
self.user = ''
self.passwd = ''
pgProvider = QgsProviderRegistry.instance().providerMetadata('postgres')
settings = QgsSettings()
settings.beginGroup('PostgreSQL/connections')
for connectionName in settings.childGroups():
if settings.value(f'{connectionName}/host') == host and \
settings.value(f'{connectionName}/database') == dbname:
self.authcfg = settings.value(f'{connectionName}/authcfg')
break
if self.authcfg is None:
self.uri.setConnection(self.host, port, self.dbname, None, None)
connInfo = self.uri.connectionInfo()
(success, user, passwd) = QgsCredentials.instance().get(connInfo, None, None)
if success:
self.uri.setPassword(passwd)
self.uri.setUsername(user)
else:
self.uri.setConnection(self.host, self.port, self.dbname, None, None, authConfigId=self.authcfg)
self.conn = QgsProviderRegistry.instance().providerMetadata('postgres').createConnection(self.uri.uri(),{})
def getPgTableNames(self):
tablenames = self.conn.tables(self.schema)
return tablenames
#Verbindungsdaten angeben
host = 'some_host'
dbname = 'some_db'
schema = 'some_schema'
port = '1234'
# Verbindung zu Postgis aufbauen
uri = PgConnection(host,dbname, port, schema)
tableList = uri.getPgTableNames()
for t in tableList:
print (t.defaultName())

I just use the connection name and it works. I have a connection named ftth and using such name, I can get the connection out of it.
conn = QgsProviderRegistry.instance().providerMetadata('postgres').createConnection('ftth')
conn.tables()

Related

Python simple DNS resolver: memory leak

This code is a DNS resolver that check from a DB for an entry not older than 5 minutes.
#!/usr/bin/python3
from MySQLdb import _mysql as MySQL
from dnslib import RR, QTYPE, RCODE, A
from dnslib.label import DNSLabel
from dnslib.server import DNSServer, BaseResolver
from time import sleep, time
class MariaResolver(BaseResolver):
DELTA = 300
def __init__(self):
self.password = "********************"
def resolve(self, request, handler):
reply = request.reply()
qname = request.q.qname
fqdn = str(request.q.qname)
try:
if fqdn.find("iut-") == -1:
reply.header.rcode = RCODE.REFUSED
else:
hostname = fqdn.split(".")[0]
timestamp = int(time()) - self.DELTA
query = "SELECT ip FROM dns WHERE record='{}' AND timestamp>{}"
db = MySQL.connect("localhost", "dns", self.password, "salles")
db.query(query.format(hostname, timestamp))
result = db.store_result()
row = result.fetch_row(how=1)
if row:
ip = row[0]["ip"].decode("utf-8")
reply.add_answer(RR(qname, QTYPE.A, ttl=0,
rdata=A(ip)))
else:
reply.header.rcode = RCODE.REFUSED
db.close()
except Exception as e:
print(e)
reply.header.rcode = RCODE.REFUSED
return reply
if __name__ == '__main__':
resolver = MariaResolver()
udp_server = DNSServer(resolver, port=53)
udp_server.start_thread()
while udp_server.isAlive():
sleep(0.1)
This code leaks over time and I do not understand why.
In the Proxmox screenshot, you can see service restarted at the and.

Data Tables uploading to local PostgreSQL

I am running a data parser/web scraper with python. The parser then pushes the data (SQL Tables) to postgresql. However, I can't find the tables in pgadmin. This is part of a full stack django webapp, using docker, which I did not create but I am trying to get to run locally. As far as I can tell docker containers are working as intended, and so is the dataparsing script. Since I don't know much about issues like this please let me know if there is anything else I should include
Database connection in python
import psycopg2
import logging
import sys
import os
class DatabaseConnection(object):
def __init__(self, user="postgres", password="1234", host="127.0.0.1", port="5432", database="postgres",
course_table="course_info", prereqs_table="prereqs", antireqs_table="antireqs",
requirements_table="requirements", communications_table="communications",
breadth_table="breadth_table"):
if os.getenv("UWPATH_ENVIRONMENT") is not None and os.getenv("UWPATH_ENVIRONMENT") == "docker":
host = "db"
if os.getenv("DB_PASS") is not None:
password = os.getenv("DB_PASS")
if os.getenv("DB_USER") is not None:
user = os.getenv("DB_USER")
if os.getenv("DB_NAME") is not None:
database = os.getenv("DB_NAME")
if os.getenv("DB_HOST") is not None:
host = os.getenv("DB_HOST")
if os.getenv("DB_PORT") is not None:
port = os.getenv("DB_PORT")
self.connection = psycopg2.connect(user=user, password=password, host=host, port=port, database=database)
self.cursor = self.connection.cursor()
self.course_table = course_table
self.prereqs_table = prereqs_table
self.antireqs_table = antireqs_table
self.requirements_table = requirements_table
self.communications_table = communications_table
self.breadth_table = breadth_table
self.root = self.__Logger()
def __Logger(self):
self.logger = logging.getLogger()
if not len(self.logger.handlers):
self.logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
self.logger.addHandler(handler)
return self.logger
def execute(self, command):
try:
# self.root.info(command)
print(command)
self.cursor.execute(command)
return True
except Exception as e:
print(command)
self.root.error(e)
return False
def commit(self):
if self.connection:
self.connection.commit()
def close(self):
self.connection.close()
def select(self, what, table, condition=""):
"""
SELECT <what> FROM <table> <condition>;
:param what: string
:param table: string
:param condition: string
:return: list
"""
command = "SELECT " + what + " FROM " + table + " " + condition + ";"
self.execute(command)
return self.cursor.fetchall()
Trying to access the backend in browser returns this, which makes me believes the tables don't exist in postgresql
Output in txt file (roughly 300,000 lines, IDK if this is useful in analyzing the problem but thought I'd include it either way)

Best practice to handle a MySQL connection in a long running python script

I currently have a long running python script (multiple days), which occasionally executes queries in a mysql db. The queries are executed from different threads.
I connect to the db as following:
import mysql.connector
import time
class DB():
connection = None
def __init__(self, host, user, password, database):
self.host = host;
self.user = user;
self.password = password;
self.database = database;
self.connect()
def connect(self):
try:
self.connection = mysql.connector.connect(
host = self.host,
user = self.user,
password = self.password,
database = self.database,
autocommit = True,
)
except Exception as exception:
print("[DBCONNECTION] {exception} Retrying in 5 seconds.".format(exception = str(exception)))
time.sleep(5)
self.connect()
db = DB("11.111.11.111", "mydb", "mydb", "mydb")
One of the functions to execute a query:
def getUsersDB(self):
db.connection.ping(reconnect=True)
cursor = db.connection.cursor()
cursor.execute("SELECT * FROM Users")
users = cursor.fetchall()
return users
I believe, that I'm far away from a good practice how I handle the connection. What is the correct way for such a case?
The problem with your approach is that db.connection.ping(reconnect=True) doesn't promise you a live connection.
You can read the function's description here:
https://dev.mysql.com/doc/connector-python/en/connector-python-api-mysqlconnection-ping.html
You can try to use this:
class DB:
connection = None
def __init__(
self,
host: str,
user: str,
password: str,
database: str
) -> None:
self.host = host
self.user = user
self.password = password
self.database = database
self.connection = self.init_conn()
def init_conn(
self,
):
return mysql.connector.connect(
host=self.host,
user=self.user,
password=self.password,
database=self.database,
)
def get_cursor(
self,
):
# This will try to reconnect also.
# In case it fails, you will have to create a new connection
try:
self.connection.ping(
reconnect=True,
attempts=3,
delay=5,
)
except mysql.connector.InterfaceError as err:
self.connection = self.init_conn()
return self.connection.cursor()
And use the DB object like this:
def getUsersDB(self):
cursor = db.get_cursor() # cursor from a live connection
cursor.execute("SELECT * FROM Users")
users = cursor.fetchall()
return users

Login into Server AND MySQL from Python

I'm trying to login to my MySQL server that I'm running on DigitalOcean, but unfortunately I have no clue as to how to push the login through python. I've got the MySQL part implemented, but don't know how to login to the actual server itself (the computer). What other code do I need to add to accomplish this? I've already added the variables mySqlUser and mySqlPassword to the top of the file.
Here is the code I have so far:
import MySQLdb
class Database:
host = 'some ip address'
user = 'root'
password = '123'
mySqlUser = 'root'
mySqlPassword = 'someotherpassword'
db = 'test'
def __init__(self):
self.connection = MySQLdb.connect(self.host, self.user, self.password, self.db)
self.cursor = self.connection.cursor()
def insert(self, query):
try:
self.cursor.execute(query)
self.connection.commit()
except:
self.connection.rollback()
def query(self, query):
cursor = self.connection.cursor( MySQLdb.cursors.DictCursor )
cursor.execute(query)
return cursor.fetchall()
def __del__(self):
self.connection.close()
if __name__ == "__main__":
db = Database()
#CleanUp Operation
del_query = "DELETE FROM basic_python_database"
db.insert(del_query)
# Data Insert into the table
query = """
INSERT INTO basic_python_database
(`name`, `age`)
VALUES
('Mike', 21),
('Michael', 21),
('Imran', 21)
"""
# db.query(query)
db.insert(query)
# Data retrieved from the table
select_query = """
SELECT * FROM basic_python_database
WHERE age = 21
"""
people = db.query(select_query)
for person in people:
print "Found %s " % person['name']
You can Try this:
def __init__(self):
self.host = 'some ip address'
self.user = 'root'
self.password = '123'
self.mySqlUser = 'root'
self.mySqlPassword = 'someotherpassword'
self.connection = MySQLdb.connect(self.host, self.user, self.password, self.db)
self.cursor = self.connection.cursor()
or
def __init__(self):
self.connection = MySQLdb.connect(host, user, password, db)
self.cursor = self.connection.cursor()
and you batter transfer parameter when instantiation you class , instead of fixed values in class.
just a suggest and don't mind my english (:

Using attributes of an object in python

I have one class, which is reading data from a JSON file:
import os
import json
from db_connection import DB_Connection
class RA_Admin:
def __init__(self):
data = None
self.load_access_data()
def load_access_data(self):
with open('../docs/db_data.json') as data_file:
self.data = json.load(data_file)
a = RA_Admin()
db_con = DB_Connection(a.data)
db_con.read_data()
I wrote a second class to connect to a database:
import mysql.connector
class DB_Connection:
def __init__(self, data):
database_connection = None
cursor = None
user = data["database"]["user"]
paw = data["database"]["paw"]
ip_address = data["database"]["ip_adress"]
db_name = data["database"]["database_name"]
port = data["database"]["port"]
def read_data(self):
database_connection = mysql.connector.connect(user = self.user, password = self.paw, host=self.ip_address, port=self.port, database=self.db_name)
self.cursor = database_connection.cursor(dictionary=True)
I get the following error:
database_connection = mysql.connector.connect(user = self.user, password = self.paw, host=self.ip_address, port=self.port, database=self.db_name)
AttributeError: DB_Connection instance has no attribute 'user'
I can print the user in the __init__ method and the other attributes, but why are they not used in read_data?
You need self:
self.user = data["database"]["user"]
self.paw = data["database"]["paw"]
....
what-is-the-purpose-of-self-in-python

Categories

Resources