I need to save data from a PLC to a PostgreSQL database for which I have this code:
#!/usr/bin/env python
'''
Created on 12.09.2016
#author: Stefan Rossmann
'''
# #UnresolvedImport
from easymodbus.modbusClient import *
# #UnresolvedImport
import psycopg2
conn = psycopg2.connect(host="127.0.0.1",
port="5432",
user="americo",
password="123456",
database="rapidscada")
modbusClient = ModbusClient('192.168.5.1', 502)
#modbusClient = ModbusClient('COM4')
modbusClient.connect()
#discreteInputs = modbusClient.read_discreteinputs(3, 1)
holding_registers = modbusClient.read_holdingregisters(3, 1)
print (holding_registers)
cursor = conn.cursor()
cursor.execute("INSERT INTO mitablita (datito, detalle) VALUES(%s, %s)", (holding_registers, 'tg1010_2'))
conn.commit() # <- We MUST commit to reflect the inserted data
cursor.close()
conn.close()
modbusClient.close()
It gives me data type error. This is strange because the data to save and the table field are both integers.
Related
I am trying to execute the following script. but I don't get neither the desired results nor a error message ,and I can't figure out where I'm doing wrong.
import pyodbc
cnxn = pyodbc.connect("Driver={SQL Server Native Client 11.0};"
"Server=mySRVERNAME;"
"Database=MYDB;"
"uid=sa;pwd=MYPWD;"
"Trusted_Connection=yes;")
cursor = cnxn.cursor()
cursor.execute('select DISTINCT firstname,lastname,coalesce(middlename,\' \') as middlename from Person.Person')
for row in cursor:
print('row = %r' % (row,))
any ideas ? any help is appreciated :)
You have to use a fetch method along with cursor. For Example
for row in cursor.fetchall():
print('row = %r' % (row,))
EDIT :
The fetchall function returns all remaining rows in a list.
If there are no rows, an empty list is returned.
If there are a lot of rows, *this will use a lot of memory.*
Unread rows are stored by the database driver in a compact format and are often sent in batches from the database server.
Reading in only the rows you need at one time will save a lot of memory.
If we are going to process the rows one at a time, we can use the cursor itself as an interator
Moreover we can simplify it since cursor.execute() always returns a cursor :
for row in cursor.execute("select bla, anotherbla from blabla"):
print row.bla, row.anotherbla
Documentation
I found this information useful to retrieve data from SQL database to python as a data frame.
import pandas as pd
import pymssql
con = pymssql.connect(server='use-et-aiml-cloudforte-aiops- db.database.windows.net',user='login_username',password='login_password',database='database_name')
cursor = con.cursor()
query = "SELECT * FROM <TABLE_NAME>"
cursor.execute(query)
df = pd.read_sql(query, con)
con.close()
df
import mysql.connector as mc
connection creation
conn = mc.connect(host='localhost', user='root', passwd='password')
print(conn)
#create cursor object
cur = conn.cursor()
print(cur)
cur.execute('show databases')
for i in cur:
print(i)
query = "Select * from employee_performance.employ_mod_recent"
emp_data = pd.read_sql(query, conn)
emp_data
This is the script that I wrote so far. The first blocker that I find is that I am not being able to install MySQLdb package - Maybe I could use a different module?
import soundcloud
import pandas as pd
from pandas import DataFrame
import MySQLdb
client =
soundcloud.Client(client_id='696b5ca70f5401cc46c9011c78831877')
userId = '110652450'
tracks = client.get('/users/'+userId+'/tracks')
data = []
for x in tracks:
data.append({'Track_Name':x.title,'plays':str(x.playback_count)})
df = pd.DataFrame(data)
database = MySQLdb.connect (host="127.0.0.1",user ="root",passwd="XXX",db="soundcloudstore")
cursor = database.cursor()
query = """INSERT INTO Tracks (Track_Name, Plays) VALUES (%s,%s)"""
for x in df:
Track_Name = df[['Track_Name']].value
Plays = df[['plays']].value
values = (Track_Name, Plays)
cursor.execute(query, values)
cursor.close()
database.commit()
database.close()
Download the adapter here: https://dev.mysql.com/doc/connector-python/en/connector-python-installation.html
Then you would use it as so:
import mysql.connector
data = []
for x in tracks:
data.append((x.title, str(x.playback_count)))
conn = mysql.connector.connect(user='', password='',
host='',
database='')
cursor = conn.cursor()
q = """INSERT INTO Tracks (Track_Name, Plays) VALUES (%s,%s)"""
cursor.executemany(q, data)
This will save you from loading into a dataframe for no reason and executemany is optimized for inserts.
I have some Python code the selects data from Oracle spatial and inserts into Spatialite. My problem is that the cursor contains the geometry in binary and I can’t figure out how to read the binary into the Spatialite insert statement. Just to added this all works if I use WKT but some of the geometries are too long hence the reason for the binary format.
Can anyone help please?
# Import system modules
import cx_Oracle
from pyspatialite import dbapi2 as sl_db
def db_connect():
# Build connect from TNS names
o_db = cx_Oracle.connect("xxxxx", "xxxxx", "xxxxx_gl_dev")
cursor = o_db.cursor()
return cursor
def db_lookup(cursor):
# Select records
sql = "SELECT sdo_util.to_wkbgeometry(a.shape), a.objectid FROM span a WHERE a.objectid = 1382372"
cursor.execute(sql)
row = cursor.fetchall()
return row
def db_insert(row):
# Insert Rows in new spatailite table
database_name = 'C:\\Temp\\MYDATABASE.sqlite'
db_connection = sl_db.connect(database_name)
db_cursor = db_connection.cursor()
sql = 'INSERT INTO "SPAN_OFL" ("geometry", "OBJECTID") Values GeomFromWKB(?,27700),?);'
db_cursor.executemany(sql, row)
db_connection.commit()
db_connection.close()
# main code
cursor = db_connect()
row = db_lookup(cursor)
db_insert(row)
I'm trying to export a python dataframe to a SQL Server table.
Is there a better way to do this? I'm getting errors.
Dataframe - results_out
Output SQL table - FraudCheckOutput
cnn_out = pyodbc.connect('driver={SQL Server};server=XYZ;database=BulkLog;uid=sa;pwd=test')
results_out.to_sql(con=cnn_out, name='FraudCheckOutput', if_exists='replace', flavor='sqlite_master')
Thanks.
Ok, this supposed to make the work done:
import pypyodbc
def database_insert(query, params=())
conn_params = 'driver={SQL Server};server=XYZ;database=BulkLog;uid=sa;pwd=test'
try:
conn = pypyodbc.connect(conn_params)
except pypyodbc.Error, e:
print str(e)
else:
if conn.connected:
db = conn.cursor()
db.execute(query, params).commit()
finally:
if conn:
conn.close()
SQL_INSERT_QUERY = """
INSERT INTO table_name (
[field_name1],
[field_name2]
)
VALUES (
1,
'vale string'
)
WHERE
field_name3 = ?
"""
database_insert(SQL_INSERT_QUERY, ('field_name3_value',))
in pyodbc usage is very similar
cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER=localhost;DATABASE=testdb;UID=me;PWD=pass')
cursor = cnxn.cursor()
cursor.execute("insert into products(id, name) values ('pyodbc', 'awesome library')")
cnxn.commit()
more on http://code.google.com/p/pyodbc/wiki/GettingStarted
I'm attempting to get a python script to insert data into a database without having it drop the table first.. I'm sure this isn't hard to do but I can't seem to get the code right..
Here is the full python script..
#!/usr/bin/python
# -*- coding: utf-8 -*-
import requests
import hashlib
import time
import MySQLdb
#Dont forget to fill in PASSWORD and URL TO saveTemp (twice) in this file
sensorids = ["28-000004944b63", "28-000004c01b2c"]
avgtemperatures = []
for sensor in range(len(sensorids)):
temperatures = []
for polltime in range(0,3):
text = '';
while text.split("\n")[0].find("YES") == -1:
# Open the file that we viewed earlier so that python can see what is in it. Replace the serial number as before.
tfile = open("/sys/bus/w1/devices/"+ sensorids[sensor] +"/w1_slave")
# Read all of the text in the file.
text = tfile.read()
# Close the file now that the text has been read.
tfile.close()
time.sleep(1)
# Split the text with new lines (\n) and select the second line.
secondline = text.split("\n")[1]
# Split the line into words, referring to the spaces, and select the 10th word (counting from 0).
temperaturedata = secondline.split(" ")[9]
# The first two characters are "t=", so get rid of those and convert the temperature from a string to a number.
temperature = float(temperaturedata[2:])
# Put the decimal point in the right place and display it.
temperatures.append(temperature / 1000 * 9.0 / 5.0 + 32.0)
avgtemperatures.append(sum(temperatures) / float(len(temperatures)))
print avgtemperatures[0]
print avgtemperatures[1]
#connect to db
db = MySQLdb.connect("localhost","user","password","temps" )
#setup cursor
cursor = db.cursor()
#create temps table
cursor.execute("DROP TABLE IF EXISTS temps")
sql = """CREATE TABLE temps (
temp1 FLOAT,
temp2 FLOAT )"""
cursor.execute(sql)
#insert to table
try:
cursor.execute("""INSERT INTO temps VALUES (%s,%s)""",(avgtemperatures[0],avgtemperatures[1]))
db.commit()
except:
db.rollback()
#show table
cursor.execute("""SELECT * FROM temps;""")
print cursor.fetchall()
((188L, 90L),)
db.close()
This is the part I need assistance with..
If I have it drop the table it works fine but I don't want it to drop the table, just insert the new data into the same table.
#connect to db
db = MySQLdb.connect("localhost","user","pasword1","temps" )
#setup cursor
cursor = db.cursor()
#create temps table
cursor.execute("DROP TABLE IF EXISTS temps")
sql = """CREATE TABLE temps (
temp1 FLOAT,
temp2 FLOAT )"""
cursor.execute(sql)
#insert to table
try:
cursor.execute("""INSERT INTO temps VALUES (%s,%s)""",(avgtemperatures[0],avgtemperatures[1]))
db.commit()
except:
db.rollback()
#show table
cursor.execute("""SELECT * FROM temps;""")
print cursor.fetchall()
((188L, 90L),)
db.close()
You shouldn`t have to drop a table each time you want to enter data. In fact, it defeats the whole purpose of the database since you will remove all the previous data each time you run your script.
You should ask to create the table but only if it does not exists. Use the following.
sql = """CREATE TABLE IF NOT EXISTS temps (
temp1 FLOAT,
temp2 FLOAT )"""
cursor.execute(sql)
I've had this problem with updating. Try adding COMMIT to the end of your sql. I use psycopg2 to connect to a postgresql database. Here is an example.
def simple_insert():
sql = '''INSERT INTO films VALUES ('UA502', 'Bananas', 105, '1971-07-13', 'Comedy', '82 minutes'); COMMIT;'''
try:
conn = psycopg2.connect(database)
cur = conn.cursor()
cur.execute(sql)
except:
raise
I think your problem is your not saving the transaction and the COMMIT command should fix it.