I am trying to store a pickled nested dictionary in Postgresql (I am aware that this is a quick & dirty method and won't be able to access dictionary contents from Postgresql - usually bad practice)
# boilerplate, preamble and upstream work.
import psycopg2
''' Inputs: nd = dictionary to be pickled '''
pickled = pickle.dumps(nd)
connection = psycopg2.connect(user = "-----",
password = "----",
host = "----",
port = "----",
database = "----")
name = 'database1'
print('Connected...')
cursor = connection.cursor()
print(connection.get_dsn_parameters(),"\n")
cursor.execute("CREATE TABLE thetable (name TEXT, ablob BYTEA)")
print('Created Table...')
cursor.execute("INSERT INTO thetable VALUES(%s)",(psycopg2.Binary(pickled),))
connection.commit()
print('Added Data...')
cursor.close()
connection.close()
print('Connection closed...')
When I come to data data retrieval, I am having many issues importing the data from Postgres - essentially the data is to be opened, unpickled back to the dictionary and visualised. I have tried:
import psycopg2
from io import BytesIO
connection = psycopg2.connect(user = "----",
password = "----",
host = "----",
port = "----",
database = "----")
cursor = connection.cursor()
cursor.execute("SELECT ablob FROM thetable")
result, = cursor.fetchone()
cursor.close()
connection.rollback()
result = BytesIO(result)
print(pickle.load(result))
As per this link: https://www.oreilly.com/library/view/python-cookbook/0596001673/ch08s08.html, and consulted: Insert an image in postgresql database and: saving python object in postgres table with pickle, however have been unable to return the pickled dictionary.
Any advice in achieving this is greatly appreciated!
When your CREATE TABLE lists two fields, you have to list in INSERT which ones you want to fill, unless you fill them all.
import psycopg2
import pickle
dict = {
"foo": "bar"
}
p = pickle.dumps(dict)
connection = psycopg2.connect(database = "test")
cursor = connection.cursor()
cursor.execute("CREATE TABLE thetable (name TEXT, ablob BYTEA)")
cursor.execute("INSERT INTO thetable VALUES(%s,%s)",('test',p))
connection.commit()
cursor.close()
connection.close()
and reading
import psycopg2
import pickle
connection = psycopg2.connect(database = "test")
cursor = connection.cursor()
cursor.execute("SELECT ablob FROM thetable WHERE name='test';")
result = cursor.fetchone()
print pickle.loads(result[0])
cursor.close()
connection.close()
Related
My Goal is to parse the API via pagination. Store as a JSON feed, and then send it off to the MySQL DB. Once stored, I want to check if any new rows have been added, if so delete the database and add all new rows. (maybe not the best approach?) However for some strange reason nothing is storing in mySQLDB anymore, and my prints aren't working. Any thoughts on what I messed up?
PYTHON
import requests
import json
def dbconnect():
try:
db = MySQLdb.connect(
host='localhost',
user='root',
passwd='',
db='watch',
)
except Exception as e:
sys.exit("Can't connect to database")
return db
#init db
db = dbconnect()
cursor = db.cursor()
# Start getting all entries
def get_all_cracked_entries():
# results will be appended to this list
all_time_entries = []
# loop through all pages and return JSON object
for page in range(1, 4):
url = "https://api.watch.com/api?page="+str(page)
response = requests.get(url=url).json()
all_time_entries.append(response)
page += 1
for product in response:
print("id:", product["_id"])
print("title:", product["title"])
print("slug:", product["slug"])
print("releaseDate:", product["releaseDate"])
cursor.execute("INSERT INTO jsondump (id, title, slug, releaseDate) VALUES (%s,%s,%s,%s)", (product["_id"], product["title"], product["slug"], product["releaseDate"]))
db.commit()
#Check Row Count
cursor.execute("SELECT * FROM `jsondump`")
cursor.fetchall()
rc = cursor.rowcount
print("%d"%rc)
if rc > rc+1:
rs = cursor.fetchall()
else:
cursor.execute("TRUNCATE TABLE jsondump")
for product in response:
print("id:", product["_id"])
print("title:", product["title"])
print("slug:", product["slug"])
print("releaseDate:", product["releaseDate"])
print('---')
db = dbconnect()
cursor = db.cursor()
cursor.execute("INSERT INTO jsondump (id, title, slug, releaseDate) VALUES (%s,%s,%s)", (product["_id"], product["title"], product["slug"], product["releaseDate"]]))
db.commit()
cursor.close()
# prettify JSON
data = json.dumps(all_time_entries, sort_keys=True, indent=0)
#
return data
SAMPLE JSON
[{
"_id":"xxxxxxx",
"releaseDate":"2020-02-13T21:00:00-03:00",
"slug":"table-manners",
"title":"Table Manners","
}] ```
I have an issue to run my SQL queries on a Postgres ElephantSql hosted:
This is my code to connect (except dynamo, user, password which are replaced by XXX
DATABASE_URL = 'postgres://YYYY:ZZZZ#drona.db.elephantsql.com:5432/YYYY'
# ---------------------------- CONNECT ELEPHANT DB
def ElephantConnect():
up.uses_netloc.append("postgres")
url = up.urlparse(DATABASE_URL)
conn = psycopg2.connect(dbname='YYYY',
user='YYYY',
password='ZZZZ',
host='drona.db.elephantsql.com',
port='5432'
)
cursor = conn.cursor()
# cursor.execute("CREATE TABLE notes(id integer primary key, body text, title text);")
#conn.commit()
# conn.close()
return conn
this code seems to connect well to db
My issue is when I want to delete a table:
def update(df, table_name, deleteYes= 'Yes'):
conn = ElephantConnect()
db = create_engine(DATABASE_URL)
cursor =conn.cursor()
if deleteYes == 'Yes': # delete
queryCount = "SELECT count(*) FROM {};".format(table_name)
queryDelete = "DELETE FROM {};".format(table_name)
count = db.execute(queryCount)
rows_before = count.fetchone()[0]
try:
db.execute(queryDelete)
logging.info('Deleted {} rows into table {}'.format(rows_before, table_name))
except:
logging.info('Deleted error into table {}'.format(table_name))
else:
pass
It seems when I run db.execute(queryDelete), it goes to the exception.
I have no message of error. But the query with count data is working...
thanks
I think that the reason for the error is because there are foreign keys against the table. In order to be sure, assign the exception into a variable and print it:
except Exception as ex:
print(ex)
By the way, if you want to quickly delete all of the rows from a table then
It will be much more efficient to truncate the table instead of deleting all the rows:
truncate table table_name
Delete is more useful when you want to delete rows under some conditions:
delete from table_name where ...
I have this code
import MySQLdb
db = MySQLdb.connect("localhost", "root", "password", "db_name")
cursor = db.cursor()
cursor.execute("INSERT INTO directedEdges (`originLbl`, `targetLbl`) VALUES
('user1#enron.com', 'user2#enron.com' )")
data = cursor.fetchone()
print data
but when I execute this script the output is None and and I can't insert the values in the db. Why ?
In a first moment I thought it was a problem whit db connection, but if I execute
import MySQLdb
db = MySQLdb.connect("localhost", "root", "password", "db_name")
cursor = db.cursor()
cursor.execute("SELECT * FROM directedEdges")
data = cursor.fetchone()
print data
I see the content of the table directedEdges.
Thanks
You issued the cursor.fetchone() command immediately after inserting into the database. You don't have any queried data like that. You need to have queried some data before using fetchone(). Try this:
import MySQLdb
db = MySQLdb.connect("localhost", "root", "password", "db_name")
cursor = db.cursor()
cursor.execute("INSERT INTO directedEdges (`originLbl`, `targetLbl`) VALUES
('user1#enron.com', 'user2#enron.com' )")
# Commit your insert
db.commit()
# Query for data
cursor.execute("SELECT * FROM directedEdges")
data = cursor.fetchone()
print data
I am trying to update a mysql table with variable names. Below is the code that is not working for me:
import mysql.connector
conn= mysql.connector.connect(
host=host,
user=user,
passwd=password,
database=database
)
cur = conn.cursor()
cur.execute("update player_list set country = '%s', region = '%s',name = '%s' where id = %s "
% (country, region,name, id))
Running the "cur execute" line returns the following error:
mysql.connector.errors.InternalError: Unread result found
The ID column is an integer if it has any importance.
I don't see any code here how you've created your cursor, but looks like you need to specify buffered mode for your sql class to read.
Please, refer to official documentation and change your code to use buffer=True while creating your cursor and use it afterwards.
https://dev.mysql.com/doc/connector-python/en/connector-python-api-mysqlcursorbuffered.html
Try
with conn.cursor() as cur:
sql = "update player_list set country = '%s', region = '%s',name = '%s' where id = %s" % (country, region,name, id)
cur.execute(sql)
conn.commit()
and add buffered = True into your conn like
connection = mysql.connector.connect([...], buffered = True)
I need to use multiple connections in my python test code. But the problem I'm facing is that the second connection does not see statements executed in the first one. As far as I understand autocommit should be ON by default.
Here is the code
import testing.postgresql
from sqlalchemy import create_engine
def test_simple():
with testing.postgresql.Postgresql() as postgresql:
try:
engine = create_engine(postgresql.url())
conn = engine.connect().connection
with conn.cursor() as cur:
cur.execute("""CREATE TABLE country (id integer, name text);
INSERT INTO country(id, name) VALUES (1, 'Mali');
INSERT INTO country(id, name) VALUES (2, 'Congo');
""")
# OK
cur.execute('select * from country')
countries = cur.fetchall()
print(str(countries))
# ERROR psycopg2.ProgrammingError: relation "country" does not exist
conn1 = engine.connect().connection
with conn1.cursor() as cur1:
cur1.execute('select * from country')
countries1 = cur1.fetchall()
print(str(countries1))
finally:
conn.close()
conn1.close()
How can I use multiple connections in my test?