I have the following code:
def create_table():
c.execute('CREATE TABLE IF NOT EXISTS TEST(SITE TEXT, SPORT TEXT, TOURNAMENT TEXT, TEAM_1 TEXT, TEAM_2 TEXT, DOUBLE_CHANCE_1X TEXT, DOUBLE_CHANCE_X2 TEXT, DOUBLE_CHANCE_12 TEXT, DRAW_1 TEXT, DRAW_2 TEXT DATE_ODDS TEXT, TIME_ODDS TEXT)')
create_table()
def data_entry():
c.execute("INSERT INTO TEST(SITE, SPORT, TOURNAMENT, TEAM_1, TEAM_2, DOUBLE_CHANCE_1X, DOUBLE_CHANCE_X2, DOUBLE_CHANCE_12, DATE_ODDS, TIME_ODDS) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(Site, sport.strip(), tournament.strip(), team_1.strip(), team_2.strip(), x_odd.strip(), y_odd.strip(), z_odd.strip(), Date_odds, Time_odds))
conn.commit()
def double_chance():
c.execute("UPDATE TEST SET DOUBLE_CHANCE_1X = x_odd, DOUBLE_CHANCE_X2 = y_odd, DOUBLE_CHANCE_12 = z_odd WHERE TOURNAMENT = tournament and TEAM_1 = team_1 and TEAM_2 = team_2 and DATE_ODDS = Date_odds and TIME_ODDS = Time_odds")
conn.commit()
driver.get(link)
Date_odds = time.strftime('%Y-%m-%d')
Time_odds = time.strftime('%H:%M')
sport = (driver.find_element_by_xpath(".//*[#id='breadcrumb']/li[2]/a")).text #example Footbal
tournament = (driver.find_element_by_xpath(".//*[#id='breadcrumb']/li[4]/a")).text #example Premier League
try:
div = (driver.find_element_by_xpath(".//*[#id='breadcrumb']/li[5]/a")).text #to find any division if exists
except NoSuchElementException:
div = ""
market = driver.find_element_by_xpath(".//*[contains(#id,'ip_market_name_')]")
market_name = market.text
market_num = market.get_attribute('id')[-9:]
print market_num
team_1 = (driver.find_element_by_xpath(".//*[#id='ip_marketBody" + market_num + "']/tr/td[1]//*[contains(#id,'name')]")).text
team_2 = (driver.find_element_by_xpath(".//*[#id='ip_marketBody" + market_num + "']/tr/td[3]//*[contains(#id,'name')]")).text
print sport, tournament, market_name, team_1, team_2
data_entry() #first SQL call
for ip in driver.find_elements_by_xpath(".//*[contains(#id,'ip_market3')]"):
num = ip.get_attribute('id')[-9:]
type = (driver.find_element_by_xpath(".//*[contains(#id,'ip_market_name_" + num + "')]")).text
if type == 'Double Chance':
print type
print num
x_odd = (driver.find_element_by_xpath(".//*[#id='ip_market" + num + "']/table/tbody/tr/td[1]//*[contains(#id,'price')]")).text
y_odd = (driver.find_element_by_xpath(".//*[#id='ip_market" + num + "']/table/tbody/tr/td[2]//*[contains(#id,'price')]")).text
z_odd = (driver.find_element_by_xpath(".//*[#id='ip_market" + num + "']/table/tbody/tr/td[3]//*[contains(#id,'price')]")).text
print x_odd, y_odd, z_odd
double_chance() #second SQL call
c.close()
conn.close()
Update:
Based on the answer below I updated the code, but I can't make it work.
When I run it, I get the following error:
sqlite3.OperationalError: no such column: x_odd
What should I do?
Update 2:
I found the solution:
I created an unique ID in order to be able to select exactly the row I want when I run the second SQL query. In this case it doesn't modify any other rows:
def double_chance():
c.execute("UPDATE TEST SET DOUBLE_CHANCE_1X = (?), DOUBLE_CHANCE_X2 = (?), DOUBLE_CHANCE_12 = (?) WHERE ID = (?)",(x_odd, y_odd, z_odd, ID_unique))
conn.commit()
Now it works perfectly.
Use the UPDATE statement to update columns in an existing row.
UPDATE TEST SET DRAW_1=value1,DRAW_2=value2 WHERE column3=value3;
If data_entry(1) is always called first, then change the statement in data_entry_2() to UPDATE. If not you will need to check if the row exists in both cases and INSERT or UPDATE accordingly.
Related
Just started using ib_insync. I am trying to get the tick data into a dataframe.
Here is the relevant code:
def onPendingTickers(tickers, conn=conn):
for t in tickers:
# 'CREATE TABLE IF NOT EXISTS {} (timestamp timestamp, bid_qty INT, bid REAL, ask REAL, ' \
# 'ask_qty INT, high REAL, low REAL, close REAL, open REAL, contractID INT)'
# print(t)
c.execute('INSERT INTO {} (timestamp, bid_qty, bid, ask, ask_qty, high, low, close, open, contractID)'
' VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?);'.format(t.contract.pair()),
(t.time, t.bidSize, t.bid, t.ask, t.askSize, t.high, t.low, t.close, t.open, t.contract.conId))
# print(t.time, t.bidSize, t.bid, t.ask, t.askSize, t.high, t.low, t.close, t.open, t.contract.conId)
conn.commit()
ib.pendingTickersEvent += onPendingTickers
ib.sleep(60*60)
ib.pendingTickersEvent -= onPendingTickers
When I run this code in a terminal, it prints the ticker, I am not sure what exactly needs to be changed here.
If you just want to get ticks without displaying the information, here's some sample code that you should be able to run:
from ib_insync import *
import pandas as pd
import numpy as np
# Connect to IB; args are (IP address, device number, client ID)
def ibConnect(port,clientID):
connection = ib.connect('127.0.0.1', port, clientID)
ib.sleep(0)
return ()
# Disconnect from IB
def ibDisconnect():
ib.disconnect()
ib.sleep(0)
return
# Set up a futures contract
def ibFuturesContract(symbol, expirationDate, exchange):
futuresContract = Future(symbol, expirationDate, exchange)
return futuresContract
# Realtime Ticks Subscription
def ibGetTicker (contract):
ticker = ib.ticker(contract)
return [ticker]
ib = IB()
ibConnect(7496,300)
contract = ibFuturesContract('YM',20210618,'ECBOT')
# Start the real-time tick subscription
ib.reqMktData(contract, '', False, False)
# Real Time Ticks
global ticker
ticker = ibGetTicker(contract)
# Get just the last tick each second and put it into a data table
x = 0
while x < 10:
ib.sleep(1)
if ticker is not None:
df = util.df(ticker)
if (x == 0):
dt = df
else:
dt = dt.append(df)
x = x + 1
print (dt)
ib.cancelMktData(contract)
ibDisconnect()
I keep getting the error: AttributeError: 'NodeList' object has no attribute 'data', but I am just trying to check if that node is empty, and if so, just pass a -1 instead of the value. My understanding is temp_pub.getElementsByTagName("pages").data should return None. How do I fix this?
(p.s.- I've tried != None and is None)
xmldoc = minidom.parse('pubsClean.xml')
#loop through <pub> tags to find number of pubs to grab
root = xmldoc.getElementsByTagName("root")[0]
pubs = [a.firstChild.data for a in root.getElementsByTagName("pub")]
num_pubs = len(pubs)
count = 0
while(count < num_pubs):
temp_pages = 0
#get data from each <pub> tag
temp_pub = root.getElementsByTagName("pub")[count]
temp_ID = temp_pub.getElementsByTagName("ID")[0].firstChild.data
temp_title = temp_pub.getElementsByTagName("title")[0].firstChild.data
temp_year = temp_pub.getElementsByTagName("year")[0].firstChild.data
temp_booktitle = temp_pub.getElementsByTagName("booktitle")[0].firstChild.data
#handling no value
if temp_pub.getElementsByTagName("pages").data != None:
temp_pages = temp_pub.getElementsByTagName("pages")[0].firstChild.data
else:
temp_pages = -1
temp_authors = temp_pub.getElementsByTagName("authors")[0]
temp_author_array = [a.firstChild.data for a in temp_authors.getElementsByTagName("author")]
num_authors = len(temp_author_array)
count = count + 1
XML being processed
<pub>
<ID>5010</ID>
<title>Model-Checking for L<sub>2</sub</title>
<year>1997</year>
<booktitle>Universität Trier, Mathematik/Informatik, Forschungsbericht</booktitle>
<pages></pages>
<authors>
<author>Helmut Seidl</author>
</authors>
</pub>
<pub>
<ID>5011</ID>
<title>Locating Matches of Tree Patterns in Forest</title>
<year>1998</year>
<booktitle>Universität Trier, Mathematik/Informatik, Forschungsbericht</booktitle>
<pages></pages>
<authors>
<author>Andreas Neumann</author>
<author>Helmut Seidl</author>
</authors>
</pub>
Full code from edit (with to ElementTree)
#for execute command to work
import sqlite3
import xml.etree.ElementTree as ET
con = sqlite3.connect("publications.db")
cur = con.cursor()
from xml.dom import minidom
#use this to clean the foreign characters
import re
def anglicise(matchobj):
if matchobj.group(0) == '&':
return matchobj.group(0)
else:
return matchobj.group(0)[1]
outputFilename = 'pubsClean.xml'
with open('test.xml') as inXML, open(outputFilename, 'w') as outXML:
outXML.write('<root>\n')
for line in inXML.readlines():
if (line.find("<sub>") or line.find("</sub>")):
newline = line.replace("<sub>", "")
newLine = newline.replace("</sub>", "")
outXML.write(re.sub('&[a-zA-Z]+;',anglicise,newLine))
outXML.write('\n</root>')
tree = ET.parse('pubsClean.xml')
root = tree.getroot()
xmldoc = minidom.parse('pubsClean.xml')
#loop through <pub> tags to find number of pubs to grab
root2 = xmldoc.getElementsByTagName("root")[0]
pubs = [a.firstChild.data for a in root2.getElementsByTagName("pub")]
num_pubs = len(pubs)
count = 0
while(count < num_pubs):
temp_pages = 0
#get data from each <pub> tag
temp_ID = root.find(".//ID").text
temp_title = root.find(".//title").text
temp_year = root.find(".//year").text
temp_booktitle = root.find(".//booktitle").text
#handling no value
if root.find(".//pages").text:
temp_pages = root.find(".//pages").text
else:
temp_pages = -1
temp_authors = root.find(".//authors")
temp_author_array = [a.text for a in temp_authors.findall(".//author")]
num_authors = len(temp_author_array)
count = count + 1
#process results into sqlite
pub_params = (temp_ID, temp_title)
cur.execute("INSERT OR IGNORE INTO publication (id, ptitle) VALUES (?, ?)", pub_params)
cur.execute("INSERT OR IGNORE INTO journal (jtitle, pages, year, pub_id, pub_title) VALUES (?, ?, ?, ?, ?)", (temp_booktitle, temp_pages, temp_year, temp_ID, temp_title))
x = 0
while(x < num_authors):
cur.execute("INSERT OR IGNORE INTO authors (name, pub_id, pub_title) VALUES (?, ?, ?)", (temp_author_array[x],temp_ID, temp_title))
cur.execute("INSERT OR IGNORE INTO wrote (name, jtitle) VALUES (?, ?)", (temp_author_array[x], temp_booktitle))
x = x + 1
con.commit()
con.close()
print("\nNumber of entries processed: ", count)
You can use the attributes method to get a dictionary-like object (Doc) and then query the dictionary:
if temp_pub.getElementsByTagName("pages").attributes.get('data'):
As the error message suggest getElementsByTagName() returns neither single node nor None, but `NodeList. So you should check the length to see if the returned list contains any item :
if len(temp_pub.getElementsByTagName("pages")) > 0:
temp_pages = temp_pub.getElementsByTagName("pages")[0].firstChild.data
or you can pass the list directly to if since empty list is falsy :
if temp_pub.getElementsByTagName("pages"):
temp_pages = temp_pub.getElementsByTagName("pages")[0].firstChild.data
Side note, despite the title and the tag of this question, your code suggests that you're using minidom instead of ElementTree. Your code could be simpler using ElementTree, for example :
# minidom
temp_ID = temp_pub.getElementsByTagName("ID")[0].firstChild.data
# finding single element can be using elementtree's `find()`
temp_ID = temp_pub.find(".//ID").text
....
# minidom
temp_author_array = [a.firstChild.data for a in temp_authors.getElementsByTagName("author")]
# finding multiple elements using elementtree's `find_all()`
temp_author_array = [a.text for a in temp_authors.find_all(".//author")]
Getting an error:
c.execute('CREATE TABLE IF NOT EXISTS top15' +today +'(symbol TEXT, ending_price REAl, volume REAL, percent REAL)')
AttributeError: 'str' object has no attribute 'execute'
Below is original code where no error occurs.
conn = sqlite3.connect('Top15.db')
c = conn.cursor()
def create_table():
c.execute('CREATE TABLE IF NOT EXISTS top15' +today +'(symbol TEXT, ending_price REAl, volume REAL, percent REAL)')
f15= ['hgd', 'bbd.a', 'mri.u', 'iam', 'hnd', 'tth', 'bbd.b', 'bbd.pr.c', 'esp', 'enl', 'rmp', 'amm', 'vrx', 'xtc', 'cxr']
f45=['4,433,389', '2.96', '-13.7', '1,209,421', '2.25', '-13.1', '3,000', '8.60', '-8.5', '1,000', '1.06', '-7.8', '1,180,466', '21.76', '-7.6', '41,777', '0.97', '-7.6', '32,423,597', '1.89', '-7.4', '43,737', '15.20', '-7.3', '87,604', '1.96', '-7.1', '5,239', '34.00', '-6.2', '2,688,261', '1.83', '-5.7', '63,301', '1.39', '-5.4', '1,664,689', '41.83', '-5.4', '63,453', '13.45', '-5.3', '1,642,197', '36.48', '-5.0']
def dynamic_data_entry():
volume = first_45[i]
ending_price = first_45[i+1]
percent = first_45[i+2]
symbol = first_15[z]
c.execute("INSERT INTO top15" +today +"(symbol,ending_price, volume, percent) VALUES (?, ?, ?, ?)",
(symbol,ending_price, volume, percent))
conn.commit()
create_table()
for i, z in zip(range(0,45,3),range(15)):
dynamic_data_entry()
c.close
conn.close
Below is the new setup. Nothing else has changed other than turning the two lists (f15,f45) into a single list (result) with internal lists. However,now I get the c.execute error. Ive read about c.execute errors and cannot find a solution
conn = sqlite3.connect('Top15.db')
c = conn.cursor()
def create_table():
c.execute('CREATE TABLE IF NOT EXISTS top15' +today +'(symbol TEXT, ending_price REAl, volume REAL, percent REAL)')
f15=[15 list items]
f45=[45 list items]
f45i = iter(f45)
result = [[a, c, b, d] for (a, b, c, d) in zip(f15, f45i, f45i, f45i)]
result = filter(lambda l: l[0].count('.') <= 1, result)
print result
result=[['hgd', '2.96', '4,433,389', '-13.7'], ['bbd.a', '2.25', '1,209,421', '-13.1'], ['mri.u', '8.60', '3,000', '-8.5'], ['iam', '1.06', '1,000', '-7.8'], ['hnd', '21.76', '1,180,466', '-7.6'], ['tth', '0.97', '41,777', '-7.6'], ['bbd.b', '1.89', '32,423,597', '-7.4'], ['esp', '1.96', '87,604', '-7.1'], ['enl', '34.00', '5,239', '-6.2'], ['rmp', '1.83', '2,688,261', '-5.7'], ['amm', '1.39', '63,301', '-5.4'], ['vrx', '41.83', '1,664,689', '-5.4'], ['xtc', '13.45', '63,453', '-5.3'], ['cxr', '36.48', '1,642,197', '-5.0']]
def dynamic_data_entry():
symbol = result[i][0]
ending_price = result[i][1]
volume = result[i][2]
percent = result[i][3]
c.execute("INSERT INTO top15" +today +"(symbol, ending_price, volume, percent) VALUES (?, ?, ?, ?)",
(symbol,ending_price, volume, percent))
conn.commit()
create_table()
i=0
while i <len(result):
dynamic_data_entry()
i+=1
c.close
conn.close
Is there something about the list inside the list creating the problem? Not too sure why this would now result in this error. When it worked perfectly fine below
Using psycopg2, I'm able to select data from a table in one PostgreSQL database connection and INSERT it into a table in a second PostgreSQL database connection.
However, I'm only able to do it by setting the exact feature I want to extract, and writing out separate variables for each column I'm trying to insert.
Does anyone know of a good practice for either:
moving an entire table between databases, or
iterating through features while not having to declare variables for every column you want to move
or...?
Here's the script I'm currently using where you can see the selection of a specific feature, and the creation of variables (it works, but this is not a practical method):
import psycopg2
connDev = psycopg2.connect("host=host1 dbname=dbname1 user=postgres password=*** ")
connQa = psycopg2.connect("host=host2 dbname=dbname2 user=postgres password=*** ")
curDev = connDev.cursor()
curQa = connQa.cursor()
sql = ('INSERT INTO "tempHoods" (nbhd_name, geom) values (%s, %s);')
curDev.execute('select cast(geom as varchar) from "CCD_Neighborhoods" where nbhd_id = 11;')
tempGeom = curDev.fetchone()
curDev.execute('select nbhd_name from "CCD_Neighborhoods" where nbhd_id = 11;')
tempName = curDev.fetchone()
data = (tempName, tempGeom)
curQa.execute (sql, data)
#commit transactions
connDev.commit()
connQa.commit()
#close connections
curDev.close()
curQa.close()
connDev.close()
connQa.close()
One other note is that python allows the ability to explicitly work with SQL functions / data type casting, which for us is important as we work with the GEOMETRY data type. Above you can see I'm casting it to TEXT then dumping it into an existing geometry column in the source table - this will work with MSSQL Server, which is a huge feature in the geospatial community...
In your solution (your solution and your question have a different order of statements) change the lines which start with 'sql = ' and the loop before '#commit transactions' comment to
sql_insert = 'INSERT INTO "tempHoods" (nbhd_id, nbhd_name, typology, notes, geom) values '
sql_values = ['(%s, %s, %s, %s, %s)']
data_values = []
# you can make this larger if you want
# ...try experimenting to see what works best
batch_size = 100
sql_stmt = sql_insert + ','.join(sql_values*batch_size) + ';'
for i, row in enumerate(rows, 1):
data_values += row[:5]
if i % batch_size == 0:
curQa.execute (sql_stmt , data_values )
data_values = []
if (i % batch_size != 0):
sql_stmt = sql_insert + ','.join(sql_values*(i % batch_size)) + ';'
curQa.execute (sql_stmt , data_values )
BTW, I don't think you need to commit. You don't begin any transactions. So there should not be any need to commit them. Certainly, you don't need to commit a cursor if all you did was a bunch of selects on it.
Here's my updated code based on Dmitry's brilliant solution:
import psycopg2
connDev = psycopg2.connect("host=host1 dbname=dpspgisdev user=postgres password=****")
connQa = psycopg2.connect("host=host2 dbname=dpspgisqa user=postgres password=****")
curDev = connDev.cursor()
curQa = connQa.cursor()
print "Truncating Source"
curQa.execute('delete from "tempHoods"')
connQa.commit()
#Get Data
curDev.execute('select nbhd_id, nbhd_name, typology, notes, cast(geom as varchar) from "CCD_Neighborhoods";') #cast geom to varchar and insert into geometry column!
rows = curDev.fetchall()
sql_insert = 'INSERT INTO "tempHoods" (nbhd_id, nbhd_name, typology, notes, geom) values '
sql_values = ['(%s, %s, %s, %s, %s)'] #number of columns selecting / inserting
data_values = []
batch_size = 1000 #customize for size of tables...
sql_stmt = sql_insert + ','.join(sql_values*batch_size) + ';'
for i, row in enumerate(rows, 1):
data_values += row[:5] #relates to number of columns (%s)
if i % batch_size == 0:
curQa.execute (sql_stmt , data_values )
connQa.commit()
print "Inserting..."
data_values = []
if (i % batch_size != 0):
sql_stmt = sql_insert + ','.join(sql_values*(i % batch_size)) + ';'
curQa.execute (sql_stmt, data_values)
print "Last Values..."
connQa.commit()
# close connections
curDev.close()
curQa.close()
connDev.close()
connQa.close()
I'm having trouble inserting data into my table. I have a list of stocks that I pass to the function getStockData.
I use a for loop to iterate through the list and get the data for each ticker symbol. At the end I put all the information into a dictionary. My final step is to insert the data into a table. I've been unsuccessful at inserting the data in the dictionary into my table.
def getStockData(x):
nowdate = raw_input("What Is Todays Date?: ")
print "Todays list has %d stocks on it\n" % len(x)
for stock in x:
stockPrice = ystockquote.get_price(stock)
stockPriceChange = ystockquote.get_change(stock)
originalPrice = float(stockPrice) + (float(stockPriceChange) * -1)
changePercentage = (float(stockPriceChange) / originalPrice) * 100
stockDict = {'Date': nowdate, 'Ticker Symbol': stock, 'Closing Price': stockPrice,
'Price Change': stockPriceChange, 'Percentage Changed': changePercentage}
conn = db.connect('stocks.db')
cursor = conn.cursor()
cursor.execute('insert into losers values (?, ?, ?, ?, ?)', (stockDict['Date'], stockDict['Ticker Symbol'], stockDict['Price Change'],
stockDict['Percentage Changed'], stockDict['Closing Price']) )
conn.close()
I think you forget to commit your data to your DB before close.
Try
conn.commit()