import time
import subprocess
from tkinter import *
from w1thermsensor import W1ThermSensor
import time
import datetime
import sqlite3
root = Tk()
id = 1
conn = sqlite3.connect('temp_sensor2.db')
c = conn.cursor()
sensor = W1ThermSensor()
temperature = sensor.get_temperature()
t = int(time.time())
date = str (datetime.datetime.fromtimestamp(t).strftime('%d-%m-%Y %H:%M:%S'))
global tempLabel1
def get_temp(period_ms):
temperature = sensor.get_temperature()
tempLabel1['text'] = temperature
tempLabel1.after(period_ms, get_temp, period_ms)
c.execute('''INSERT INTO datetemp VALUES (?, ?, ?)''',(id, date, temperature));
conn.commit()
root.title('Temperature')
tempLabel2 = Label(root, text="Temperature is ")
tempLabel2.pack()
tempLabel1 = Label(root, width=25)
tempLabel1.pack()
get_temp(1000)
root.mainloop()
I have a program here that monitors temperature and automatically updates in a tkinter label. I also wish to have it update a SQLite DB, however it enters multiple entries into the table with the exact same datestamp (although temperature reading will be different). Any ideas would be appreciated!
t = int(time.time())
date = str (datetime.datetime.fromtimestamp(t).strftime('%d-%m-%Y %H:%M:%S'))
These two lines of code run once, when your program starts up, and the date variable never changes after that.
If you move these lines inside the get_temp() function, then it will use the current timestamp.
Related
I try to make a line graph with python and the graph only appears a little in the end of the canvas in the GUI.
import sqlite3
###----------------Connecting to the database-------------#####
DB = sqlite3.connect ("personal_project.db")
CURSOR = DB.cursor()
###----------------create the SQL command to create the table and save data-------------######
COMMAND1 = """CREATE TABLE IF NOT EXISTS
balance (
UserID INTEGER PRIMARY KEY,
Date TEXT,
Amount TEXT,
Descriotion)"""
CURSOR.execute(COMMAND1)
from tkinter import *
from tkinter import messagebox
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
###----------------Create the window-------------#####
main_WINDOW = Tk()
main_WINDOW.title("Study App")
main_WINDOW.geometry("1940x1080")#width*length
main_WINDOW.configure(bg="#ffffff")
###------Show Information Using Graph-------###
graquery = '''SELECT Date, Amount FROM balance'''
CURSOR.execute(graquery)
graresults = CURSOR.fetchall()
Date = [result[0] for result in graresults]
Amount = [result[1] for result in graresults]
figure = plt.figure()
plt.plot(Date, Amount)
plt.xlabel('Date')
plt.ylabel('Amount')
plt.title('Balance graph Graph')
gracanvas = Canvas(main_WINDOW, width=1070, height=452)
gracanvas.pack()
gracanvas.place(x=356, y=270)
figure_canvas = FigureCanvasTkAgg(figure, gracanvas)
gracanvas.create_window(0,0,window=figure_canvas.get_tk_widget())
How do I make the checkboxes stay ticked? I want it to save to a local database. Here is where I gave up:
import sqlite3
import tkinter as tk
from tkinter import ttk
from tkinter import *
from tkinter.ttk import *
from sqlite3 import *
import json
box = Tk()
box.geometry('600x450')
box.title('November Assesment Study List')
box.resizable(False,False)
checkbox1 = tk.StringVar()
checkbox2 = tk.StringVar()
checkbox3 = tk.StringVar()
checkboxes = []
sql_as_text = json.dumps(checkboxes)
checkboxes.append(checkbox1)
checkboxes.append(checkbox2)
checkboxes.append(checkbox3)
connection = sqlite3.connect("Checkedboxes.db")
cursor = connection.cursor()
# cursor.execute("CREATE TABLE Valees (checked integer)")
# query = "INSERT INTO Valees (checked) VALUES (?)"
cursor.execute("INSERT INTO Valees (checked) VALUES )")
# cursor.execute(query, [sql_as_text])
r = cursor.fetchall()
def btn1_checked():
w = checkbox1.get()
checkboxes.append(w)
print(str(checkboxes))
print(r)
def openNewWindow1():
newWindow = Toplevel(box)
newWindow.title("Maths")
newWindow.geometry("400x200")
ttk.Checkbutton(newWindow, text= 'Algebra', command=btn1_checked, variable=checkbox1, onvalue='1', offvalue='0').pack()
ttk.Checkbutton(newWindow, text= 'Calculus', onvalue='1', offvalue='0').pack()
ttk.Checkbutton(newWindow, text= 'Trig', onvalue='1', offvalue='0').pack()
btn = Button(box,
text ="Maths",
command = openNewWindow1)
btn.pack(pady = 10)
cursor.close()
connection.commit()
connection.close()
box.mainloop()
You could also use a local file. Would be a different approach, but then there is no need to install mysql first.
But you were right. You need to store the state of the checkbox somewhere.
checkbox1 = True
f = open("file.txt", "a") # "a" will append to the end of the file.
f.write("checkbox1=" + str(checkbox1) + "\n") # "\n" will create a new paragraph
f.close()
# open read the file
f = open("file.txt", "r")
print(f.read())
Something like this ;)
I'm going to start out by saying this is the 1st python program I have ever written and have no real background in the language so my code is probably pretty rough. Take it easy on me!
When I wrote this it works fine for a small number of tickers but when I have a file with 6k+ it is very slow. Now I know there are other ways I can improve performance BUT I want to try and tackle the async thing first.
I thought it was as simple as making the read_ticker_file function async and adding await in front of the yfin_options() call but obviously that didn't work.
I'm thinking possibly I need to restructure the way things are called but kind of stuck here. Hoping someone can point me in the right direction! Thanks in advance
import logging
import pyodbc
import config
import yahoo_fin as yfin
from yahoo_fin import options
from datetime import datetime, date
from selenium import webdriver
def main():
read_ticker_file()
def init_selenium():
driver = webdriver.Chrome(config.CHROME_DRIVER)
return driver
def yfin_options(symbol):
logging.basicConfig(filename='yfin.log', level=logging.INFO)
logging.basicConfig(filename='no_options.log', level=logging.ERROR)
try:
# get all options dates (in epoch) from dropdown on yahoo finance options page
dates = get_exp_dates(symbol)
# iterate each date to get all calls and insert into sql db
for date in dates:
arr = yfin.options.get_calls(symbol, date)
arr_length = len(arr.values)
i = 0
for x in range(0, arr_length):
strike = str(arr.values[i][2])
volume = str(arr.values[i][8])
open_interest = str(arr.values[i][9])
convert_epoch = datetime.fromtimestamp(int(date))
try:
sql_insert(symbol, strike, volume, open_interest, convert_epoch)
i += 1
except Exception as insert_fail:
print("I failed at sqlinsert {0}".format(insert_fail))
file_name_dir = "C:\\temp\\rh\\options{0}{1}.xlsx".format(symbol, date)
logging.info(arr.to_excel(file_name_dir))
except Exception as e:
bad_tickers_file_dir = config.BAD_TICKERS
f = open(bad_tickers_file_dir, "a")
f.write(symbol)
f.write('\n')
def sql_insert(symbol, strike, volume, open_interest, exp_date):
conn_string = ('Driver={SQL Server};'
'Server={0};'
'Database={1};'
'Trusted_Connection=yes;').format(config.SERVER, config.DATABASE)
conn = pyodbc.connect(conn_string)
cursor = conn.cursor()
insert_string = """INSERT INTO dbo.options (Ticker, Strike, Volume, OpenInterest, expDate)
VALUES
(?, ?, ?, ?, ?)"""
cursor.execute(insert_string, symbol, strike, volume, open_interest, str(exp_date))
conn.commit()
def get_exp_dates(symbol):
url = "https://finance.yahoo.com/quote/" + symbol + "/options?p=" + symbol
chromedriver = init_selenium()
chromedriver.get(url)
# Yahoo Finance options dropdown class name (find better way to do this)
select_dropdown = chromedriver.find_element_by_css_selector("div[class='Fl(start) Pend(18px)'] > select")
options_list = [x for x in select_dropdown.find_elements_by_tag_name("option")]
dates = []
for element in options_list:
dates.append(element.get_attribute("value"))
return dates
def read_ticker_file():
file1 = open(config.TICKER_FILE, 'r')
lines = file1.readlines()
count = 0
# loop to read each ticker in file
for line in lines:
count += 1
line = line.strip('\n')
line = line.strip()
yfin_options(line)
if __name__ == "__main__":
main()
I am currently using Psycopg2 to run 4 separate SQL queries from 4 different tables. The data itself needs to be separated for what I intend to use it for, which is why I am doing it this way. Of the 4 SQL tables that I am pulling from, 3 are under 2mil rows, while the 4th is significantly larger at nearly 24mil rows. It is a very simple statement, basically:
SELECT row1, row2, row3, row4 FROM largetable WHERE row1 = {value};
Which returns usually 10-20 matching rows.
I am designing an app for my coworkers to look up this data and display it via a Tkinter window (which I will leave out of the MCVE). Given what they need to do with it, I need it to populate as fast as possible. The entire load up and populate runs about 10 seconds, with about 5-6 seconds being spent solely on this one SQL. The script grants read-only access to the database, so manipulation of the table is not possible.
Here is an MCVE of the part I need to speed up in my py script. The SQL files all follow the simple outline above but pull from different tables. We can say query_d is the largest.
import psycopg2
from config import config
import tkinter as tk
from tkinter import *
from tkinter.ttk import *
import tkinter.messagebox
def get_val():
class GetValue(tk.Tk):
def __init__(self):
tk.Tk.__init__(self)
self.label = tk.Label(text="Input Control")
self.label.config(font=("Ariel", 24))
self.entry = tk.Entry(self)
self.entry.config(font=("Ariel",18),justify='center')
self.button = tk.Button(self, text="Populate",
command=self.on_button)
self.label.pack()
self.entry.pack()
self.button.pack(pady=5)
self.bind("<Return>",self.on_button)
self.bind("<KP_Enter>",self.on_button)
self.entry.focus_force()
def on_button(self, event=None):
global val
try:
val = int(self.entry.get())
except:
tk.messagebox.showerror("Invalid Entry", "Entry must be a
number.")
else:
if control:
conn = None
try:
params = config()
conn = psycopg2.connect(**params)
cur = conn.cursor()
global value
value = {'value':value}
query_a = open("query_a.sql", "r")
a = query_a.read()
a = a.format(**value)
cur.execute(a)
global response_a
response_a = cur.fetchall()
query_a.close()
query_b = open("query_b.sql", "r")
b = query_b.read()
b = b.format(**value)
cur.execute(b)
global response_b
response_b = cur.fetchall()
query_b.close()
query_c = open("query_c.sql", "r")
c = query_c.read()
c = c.format(**value)
cur.execute(c)
global response_c
response_c = cur.fetchall()
query_c.close()
query_d = open("query_d.sql", "r")
d = query_d.read()
d = d.format(**value)
cur.execute(d)
global response_d
response_d = cur.fetchall()
query_d.close()
finally:
if conn is not None:
conn.close()
app = GetValue()
app.mainloop()
if __name__ == '__main__':
get_control()
With these factors in mind, is it possible to speed up this query?
Per #jordanm and #Belayer I have added an index to each table and increased the speed from about 7-8 seconds to about 0.12 seconds.
Ok so I've recently written a python-based punchclock program for my company to replace their old paper-and-stamp punchcard system.
The idea is that everyone has access key cards to get in the doors in the morning, so we're using an RFID reader connected to a Surface Pro just inside the door to clock in with. The reader is programmed so it'll read the card and issue a 'RETURN' event. When it hits, it'll trigger the numbers it just input(EmpID) to be run against a SQLite table(Emps) that returns the corresponding name(EmpName). Then, EmpName is used as input to a different table(Swipes), along with the theDate and theTime. The EmpName and theTime values are also displayed in the text beneath the Input box with an .Update() command.
It.. mostly works. It'll switch names when different cards are run over the reader, but it won't change time. When I check the DB with SQLiteStudio, my entrys have been made, but they all share the same timestamp.
Here's a snip of the frontend code:
import pysimplegui as sg
import datetime
import TimeclockBackEnd as be
layout = [[sg.Text('Swipe Your Card')],
[sg.InputText(size=(6,1), key='IN', focus=True)],
[sg.Text('', key='theName', size=(45,1))],
[sg.Text('', key='theTime', size=(45,1))],
[sg.Button('', key='RETURN', visible=False, bind_return_key=True)],
window = sg.Window('Clock!', layout)
rightNow = datetime.datetime.now()
theTime = rightNow.strftime("%H:%M:%S")
theDate = rightNow.strftime("%Y-%m-%d")
while True :
event, value = window.Read()
cardNumber = value['IN']
if event is None:
break
elif event is 'RETURN':
nameOfSwiper = be.Swipe(cardNumber)
be.submitToDB(nameOfSwiper, theDate, theTime)
window['theName'].Update(nameOfSwiper)
window['theTime'].Update(theTime)
window['IN'].Update('')
and here's the code it calls on in the backend(be):
def Swipe(EmpID):
con = sqlite3.connect('Timeclock.db')
cur = con.cursor()
cur.execute("SELECT EmpName FROM Emps WHERE EmpID=?", (EmpID,))
returnedValue = cur.fetchall()
con.commit()
con.close()
delisted = ''.join(map(str, returnedValue))
stripped = str(delisted).strip('()')
strippedAgain = str(stripped).strip(',')
swiperName = str(strippedAgain).strip("''")
return swiperName
def submitToDB(EmpName, theDate, theTime):
con = sqlite3.connect('Timeclock.db')
cur = con.cursor()
cur.execute("INSERT INTO Swipes VALUES (?, ?, ?)", (EmpName, theDate, theTime))
con.commit()
con.close()
Again, it writes to the DB just fine, the only thing I'm having an issue with is how theTime doesn't change from the value that was set on its initial swipe. This is the main working parts of the code but if you don't see anything wrong here, feel free to check my github, I've got the full thing there.
Thanks for helping out!