Psycopg2 - Inserting complex query with strings + numbers? [duplicate] - python

I have a tuple as below
data = ({'weather station name': 'Substation', 'wind': '6 km/h', 'barometer': '1010.3hPa', 'humidity': '42%', 'temperature': '34.5 C', 'place_id': '001D0A00B36E', 'date': '2016-05-10 09:48:58'})
I am trying to push the values from the above tuple to the postgres table using the code below:
try:
con = psycopg2.connect("dbname='WeatherForecast' user='postgres' host='localhost' password='****'")
cur = con.cursor()
cur.executemany("""INSERT INTO weather_data(temperature,humidity,wind,barometer,updated_on,place_id) VALUES (%(temperature)f, %(humidity)f, %(wind)f, %(barometer)f, %(date)s, %(place_id)d)""", final_weather_data)
ver = cur.fetchone()
print(ver)
except psycopg2.DatabaseError as e:
print('Error {}'.format(e))
sys.exit(1)
finally:
if con:
con.close()
Where datatype of each field in the DB is as below:
id serial NOT NULL,
temperature double precision NOT NULL,
humidity double precision NOT NULL,
wind double precision NOT NULL,
barometer double precision NOT NULL,
updated_on timestamp with time zone NOT NULL,
place_id integer NOT NULL,
When i run the code to push the data into postgres table using psycopg2, it is raising an error "ValueError: unsupported format character 'f'"
I hope the issue is in formatting. Am using Python3.4

Have a look at the documentation:
The variables placeholder must always be a %s, even if a different placeholder (such as a %d for integers or %f for floats) may look more appropriate:
>>> cur.execute("INSERT INTO numbers VALUES (%d)", (42,)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (42,)) # correct
While, your SQL query contains all type of placeholders:
"""INSERT INTO weather_data(temperature,humidity,wind,barometer,updated_on,place_id)
VALUES (%(temperature)f, %(humidity)f, %(wind)f, %(barometer)f, %(date)s, %(place_id)d)"""

Related

connect a json file data to mysql database (empty table)

im a beginner in python , Trying to connect access database to python with a json file loaded in my program so I can read it and eventually analyze it for certain things. But I can't connect to it and tried different approaches still getting the same error.
import mysql.connector
import json
# create the key
from mysql.connector import cursor
mydb = mysql.connector.connect(host='localhost', port='3306', user='root', password='nihad147', database='tweets')
mycursor = mydb.cursor()
sql_tweet = """INSERT INTO tweet ( tweet_id,
id_user,
text,
tweet_location,
created_at,
name_screen,
categorie_id,
)
VALUES (%s,%s,%s,%s,%s,%s,%s)"""
sql_user = """INSERT INTO tweetuser (
id_user,
name_screen,
location_user,
count_followers,
friends_count,
statuse_count)
VALUES (%s,%s,%s,%s,%s,%s)"""
sql_location = """"insert into tweet_location (
location_id,
latitude,
longitude
tweet_id
VALUES(%s,%s,%s,%s)"""
myJsonFile = open('tweets.json', encoding="utf-8")
mycursor.execute("DELETE FROM tweet")
mycursor.execute("DELETE FROM tweetuser")
mycursor.execute("DELETE FROM tweet_location")
c = 0
for line in myJsonFile:
c = c + 1
print("tweet number ", c, " is uploading to the server")
data = json.loads(line)
# insert into tweet
val_tweet = (
data['tweet_id'], data['user_id_str'], data['raw_text'],data['location']['address']['city'],data['date'], data['user_screen_name'])
mycursor.execute(sql_tweet,sql_location, val_tweet)
mydb.commit()
# testing ifthe user already exist
user = "SELECT * FROM tweetuser WHERE id_user = '" + str(data['user_id_str']) + "'"
mycursor.execute(user)
myresult = mycursor.fetchall()
row_count = mycursor.rowcount
if row_count == 0:
val_user = (data['user_id_str'], data['user_screen_name'], data['location']['address']['city'],data['user_followers_count'],
data['user_friends_count'], data['user_statuses_count'])
mycursor.execute(sql_user, val_user)
mydb.commit()
print('done')
here's an example of json file data :
{
"tweet_id":"1261276320878788609",
"date":"Fri May 15 12:44:42 +0000 2020",
"raw_text":"برنامج وطني لدعم المبدعين في مواجهة #كورون",
"geo_source":"user_location",
"location":{
"address":{
"country":"Tunisia",
"country_code":"tn",
"state_district":"غزالة",
"county":"العرب",
"state":"Bizerte"
},
"response":"{'place_id': 235309103, 'licence': 'Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright', 'osm_type': 'relation', 'osm_id': 7124228, 'boundingbox': ['37.105957', '37.2033466', '9.4739053', '9.6124953'], 'lat': '37.1551868', 'lon': '9.54834183807249', 'display_name': 'العرب, غزالة, Bizerte, Tunisia', 'class': 'boundary', 'type': 'administrative', 'importance': 0.45, 'icon': '/data/nominatimimages/mapicons/poi_boundary_administrative.p.20.png','address':{'county': 'العرب', 'state_district': 'غزالة', 'state': 'Bizerte', 'country': 'Tunisia', 'country_code': 'tn'}}",
"geohash":"snwg37buskzd",
"query_term":"arab",
"lon":9.54834183807249,
"lat":37.1551868
},
"user_friends_count":61,
"user_description":"I love UAE and his great leadership",
"user_created_at":"Wed Oct 09 11:41:41 +0000 2013",
"user_screen_name":"SikandarMirani",
"user_id_str":"706377881",
"user_verified":false,
"user_statuses_count":50804,
"user_followers_count":946,
"user_location":"Dubai United Arab Emirates"
}
thanks to you guys , i was able to solve the previous error since i didn't check tha data type of the id user it has to be bigint not int since it's a large data .
i had no problem connecting my jsonfile to my database but it got inserted only in tweetuser table but not in tweet table .
the tweet table is empty.
i would appreciate any kind of help thank you
The error
mysql.connector.errors.DataError: 1264 (22003): Out of range value for column 'id_user' at row 1
suggests that the value you are trying to use as the id_user is numerically too large.
Since you haven't posted the table definitions, my guess is you are using MEDIUMINT or SMALLINT or TINYINT for id_user and the actual user ID that you are trying to write into the database is too large for that data type.
In your example user_id_str is 706377881, however, the maximum value for MEDIUMINT is 8388607 and 16777215 (unsigned), respectively.
Check the data types in the table definitions.
You are connecting to your DB, that is not the problem.
The problem is that the user id that you are trying to insert has a length that surpasses the maximum allowed by MySQL for the datatype of that field. See here and here for more info related to your error.

SQLite3: ""sqlite3.OperationalError: no such column: dateandtime"" when making Primary Key?

I'm currently trying to create a database using SQLlite3 with Python, however I'm having trouble setting up a Primary Key. I'm aware of what one is, and how it uniquely identifies the table, but I want to change it from the standard "rowid" it comes with to the current date. When I try to add things into the table however it comes up with this:
File "Economic_Analyser.py", line 258, in <module>
startup()
File "Economic_Analyser.py", line 243, in startup
c.execute("INSERT INTO economicdata VALUES (dateandtime, up_GDPgrowthRate, up_GDP, up_GNP, up_GDPperCapita, up_GDPagriculture, up_GDPconstruction, up_GDPmanufacturing, up_GDPmining, up_GDPpublicadmin, up_GDPservices, up_GDPtransport, up_GDPtourism, up_UnemploymentRate, up_EmploymentRate, up_InflationRate, up_CPI, up_InterestRate, up_BalanceOfTrade, up_CurrentAccount, up_Imports, up_Exports, up_FDI, up_GovernmentSpending, up_GovernmentDebt, up_BusinessConfidence, up_Bankruptcies, up_CompetitiveRank, up_CorruptionRank, up_ConsumerConfidence, up_CorporateTaxRate, up_IncomeTaxRate)")
sqlite3.OperationalError: no such column: dateandtime
As you can see from my actual code below, I've declared that the date is the Primary Key but cannot add the data to it. I've changed the code multiple times based on what I've seen other people do but it hasn't worked. Just to clarify this isn't all of my code - just the parts that I think matter. Any help would be appreciated!!
try:
data_attempt = open("Economic_Analyser.db")
except:
print("- Database not found. Creating 'Economic_Analyser.db' .")
databasevariables()
c.execute("""CREATE TABLE economicdata (
dateandtime text NOT NULL PRIMARY KEY,
GDPgrowthRate decimal NOT NULL,
GDP decimal NOT NULL,
GNP decimal NOT NULL,
GDPperCapita decimal NOT NULL,
GDPagriculture decimal NOT NULL,
GDPconstruction decimal NOT NULL,
GDPmanufacturing decimal NOT NULL,
GDPmining decimal NOT NULL,
GDPpublicadmin decimal NOT NULL,
GDPservices decimal NOT NULL,
GDPtransport decimal NOT NULL,
GDPtourism decimal NOT NULL,
UnemploymentRate decimal NOT NULL,
EmploymentRate decimal NOT NULL,
InflationRate decimal NOT NULL,
CPI decimal NOT NULL,
InterestRate decimal NOT NULL,
BalanceOfTrade decimal NOT NULL,
CurrentAccount decimal NOT NULL,
Imports decimal NOT NULL,
Exports decimal NOT NULL,
FDI decimal NOT NULL,
GovernmentSpending decimal NOT NULL,
GovernmentDebt decimal NOT NULL,
BusinessConfidence decimal NOT NULL,
Bankruptcies decimal NOT NULL,
CompetitiveRank decimal NOT NULL,
CorruptionRank decimal NOT NULL,
ConsumerConfidence decimal NOT NULL,
CorporateTaxRate decimal NOT NULL,
IncomeTaxRate decimal NOT NULL
)""")
conn.commit()
c.execute("""CREATE TABLE users (
username text,
password text
)""")
conn.commit()
conn.close()
if internet_access == True:
databasevariables()
c.execute("INSERT INTO economicdata VALUES (dateandtime, up_GDPgrowthRate, up_GDP, up_GNP, up_GDPperCapita, up_GDPagriculture, up_GDPconstruction, up_GDPmanufacturing, up_GDPmining, up_GDPpublicadmin, up_GDPservices, up_GDPtransport, up_GDPtourism, up_UnemploymentRate, up_EmploymentRate, up_InflationRate, up_CPI, up_InterestRate, up_BalanceOfTrade, up_CurrentAccount, up_Imports, up_Exports, up_FDI, up_GovernmentSpending, up_GovernmentDebt, up_BusinessConfidence, up_Bankruptcies, up_CompetitiveRank, up_CorruptionRank, up_ConsumerConfidence, up_CorporateTaxRate, up_IncomeTaxRate)")
conn.commit()
conn.close()
print("- Most recent data has been saved.")
else:
print("- Failed.")
def databasevariables():
global conn
conn = sqlite3.connect("Economic_Analyser.db")
global c
c = conn.cursor()
You're putting the column names in the wrong place in your INSERT statement. They should go immediately after the tablename enclosed in parens/brackets, then you need a placeholder for each value to be inserted.
Instead you may do something like this:
columns = ['dateandtime',
'up_GDPgrowthRate',
'up_GDP',
'up_GNP',
'up_GDPperCapita',
'up_GDPagriculture',
'up_GDPconstruction',
'up_GDPmanufacturing',
'up_GDPmining',
'up_GDPpublicadmin',
'up_GDPservices',
'up_GDPtransport',
'up_GDPtourism',
'up_UnemploymentRate',
'up_EmploymentRate',
'up_InflationRate',
'up_CPI',
'up_InterestRate',
'up_BalanceOfTrade',
'up_CurrentAccount',
'up_Imports',
'up_Exports',
'up_FDI',
'up_GovernmentSpending',
'up_GovernmentDebt',
'up_BusinessConfidence',
'up_Bankruptcies',
'up_CompetitiveRank',
'up_CorruptionRank',
'up_ConsumerConfidence',
'up_CorporateTaxRate',
'up_IncomeTaxRate']
placeholders = ",".join('?'*len(columns))
insert_stmt = f"""INSERT INTO economicdata ({columns})
VALUES ({placeholders});"""
c.execute(insert_stmt)

using python json object insert query [duplicate]

This question already has answers here:
How do I put a variable’s value inside a string (interpolate it into the string)?
(9 answers)
Closed last month.
i am very new to python.. i have a json object. below is the code.
jsondata = json.loads(data)
jsondata looks like this
{u'approvalId': u'0', u'size-1': 202, u'indRate': u'0.003', u'orderNo': u'ROMA2-20200508-00001', u'brokerSymbol': u'', u'requestedFor': u'r101115', u'aggUnit': u'', u'repId': u'O70', u'noGoodShares': 0, u'requestedBy': u'r101115', u'status': 4, u'timestamp': u'May 08, 2020 02:29:52', u'symbol': u'IBM', u'broker': u'APOC RT', u'usedShares': 3, u'pubKey': u'O70.ROMA2-20200508-00001', u'locateBrokerId': u'APOC', u'goodShares': 500, u'rtLeafId': 900059, u'availableShares': 497, u'requestedShares': 500, u'brokerOrderNo': u'', u'indicativeRate': 0.0030000000000000001, u'pendingShares': 0}
i want to insert this in sybase database not all few of the details. below insert statement i tried.
cur = conn.cursor()
sql = "INSERT into RT24.dbo.SLOrd(status, rtLeafId, orderNo, repId, symbol, broker, timestamp, modifiedTime) VALUES (d["status"], d["rtLeafId"], d["orderNo"], d["repId"], d["symbol"\
], d["broker"], d["timestamp"], d["timestamp"])"
cur.execute(sql)
conn.commit()
Can someone please help me how can i do this..
Regards,
Prashant.
You can't stick variables in the middle of a string like that. You should put placeholders in the SQL string, and then provide a parameters dictionary as the second argument to cur.execute().
sql = """INSERT into RT24.dbo.SLOrd(status, rtLeafId, orderNo, repId, symbol, broker, timestamp, modifiedTime)
VALUES (#status, #rtLeafId, #orderNo, #repId, #symbol, #broker, #timestamp, #timestamp)"""
cur.execute(sql, {"#status": d["status"], "#rtLeafId": d["rtLeafId"], "#orderNo": d["orderNo"], "#repId": d["repId"], "#symbol": d["symbol"], "#broker": d["broker"], "#timestamp": d["timestamp"], "#timestamp": d["timestamp"]})
See the description of the execute() method in the python-sybase manual
If this isn't working, you can use string formatting, but then you have the possibility of SQL-injection if you don't sanitize the data.
sql = f"""INSERT into RT24.dbo.SLOrd(status, rtLeafId, orderNo, repId, symbol, broker, timestamp, modifiedTime)
VALUES ({d["status"]}, {d["rtLeafId"]}, '{d["orderNo"]}', '{d["repId"]}', '{d["symbol"]}', '{d["broker"]}', '{d["timestamp"]}', '{d["timestamp"]}')"""
cur.execute(sql)

load CSV into MySQL table with ODO python package - date error 1292

I'm trying to import a simple CSV file that I downloaded from Quandl into a MySQL table with the odo python package
t = odo('C:\ProgramData\MySQL\MySQL Server 5.6\Uploads\WIKI_20160725.partial.csv', 'mysql+pymysql://' + self._sql._user+':'
+ self._sql._password +'#localhost/testDB::QUANDL_DATA_WIKI')
The first row looks like this in the CSV:
A 7/25/2016 46.49 46.52 45.92 46.14 1719772 0 1 46.49 46.52 45.92 46.14 1719772
The MySQL table is defined as follows:
Ticker varchar(255) NOT NULL,
Date date NOT NULL,
Open numeric(15,2) NULL,
High numeric(15,2) NULL,
Low numeric(15,2) NULL,
Close numeric(15,2) NULL,
Volume bigint NULL,
ExDividend numeric(15,2),
SplitRatio int NULL,
OpenAdj numeric(15,2) NULL,
HighAdj numeric(15,2) NULL,
LowAdj numeric(15,2) NULL,
CloseAdj numeric(15,2) NULL,
VolumeAdj bigint NULL,
PRIMARY KEY(Ticker,Date)
It throws an exception 1292 with the following info:
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1292, "Incorrect date value: '7/25/2016' for column 'Date' at row 1") [SQL: 'LOAD DATA INFILE %(path)s\n INTO TABLE QUANDL_DATA_WIKI\n CHARACTER SET %(encoding)s\n FIELDS\n TERMINATED BY %(delimiter)s\n ENCLOSED BY %(quotechar)s\n ESCAPED BY %(escapechar)s\n LINES TERMINATED BY %(lineterminator)s\n IGNORE %(skiprows)s LINES\n '] [parameters: {'path': 'C:\ProgramData\MySQL\MySQL Server 5.6\Uploads\WIKI_20160725.partial.csv', 'quotechar': '"', 'skiprows': 0, 'lineterminator': '\r\n', 'escapechar': '\', 'delimiter': ',', 'encoding': 'utf8'}]
Does anyone have an idea what is wrong with the date in the first row? It doesn't seem to match it to the MySql database
mysql has problems with date conversions. I noticed when I defined the date field as a varchar
Date varchar(255) NOT NULL
then the csv file was read properly
in my SQL the conversion of the string to date format then looks like this:
STR_TO_DATE(Date, "%m/%d/%Y")

Insert dictionary keys values into a MySQL database

I have executed this code to insert a dictionary into my table in database,
d = {'err': '0', 'tst': '0', 'Type o': 'FTP', 'recip': 'ADMIN', 'id': '101', 'origin': 'REPORT', 'Type recip': 'SMTP', 'date': '2010-01-10 18:47:52'}
db = MySQLdb.connect("localhost","admin","password","database")
cursor = db.cursor()
cursor.execute("""INSERT INTO mytable(ID, ERR, TST, DATE, ORIGIN, TYPE_O, RECIP, TYPE_RECIP) VALUES (%(id)s, %(err)s, %(tst)s, %(date)s, %(origin)s, %(Type o)s, %(recip)s, %(Type recip)s)""", d)
db.commit()
db.close()
Create statement of my table:
CREATE TABLE mytable (
`ID` tinyint unsigned NOT NULL,
`ERR` tinyint NOT NULL,
`TST` tinyint unsigned NOT NULL,
`DATE` datetime NOT NULL,
`ORIGIN` varchar(30) NOT NULL,
`TYPE_O` varchar(10) NOT NULL,
`RECIP` varchar(30) NOT NULL,
`TYPE_RECIP` varchar(10) NOT NULL,
PRIMARY KEY (`ID`,`DATE`)
) ENGINE = InnoDB;
But i have an error, it says:
1064, "you have an error in your SQL syntax; check the manual that
corresponds to you MySQL server version... )
Be aware of SQL injections and use the second argument to execute for inserting your query parameters:
cursor.execute("""
INSERT INTO
table
(name, age, origin, date)
VALUES
(%(name)s, %(age)s, %(origin)s, %(date)s)
""", d)

Categories

Resources