How to timestamp data collected using PySerial and export to csv? - python

I'm trying to collect serial data from multiple devices, timestamp and export it to a .csv file. I want to write separate modules for each device such that they return the data to the main module and all the writing to csv is done in that.
The following program writes date and time to csv but not the data returned from the device module.
import time
import csv
from threading import Thread
import fio2
def Csv_creator():
my_file = open('test_csv.csv', 'w+')
with my_file:
new_file = csv.writer(my_file)
def Timestamp():
date_now = time.strftime('%d/%m/%y')
time_now = time.strftime('%H:%M:%S')
return [date_now,time_now]
def Write_loop():
Csv_creator()
fio2.Initialize()
while True:
with open('test_csv.csv', 'a') as f:
[date_now,time_now] = Timestamp()
fio2_data = fio2.Reader()
print fio2_data
to_write = [date_now,time_now,fio2_data]
csv_file = csv.writer(f)
csv_file.writerow(to_write)
t = Thread(target=Write_loop)
t.daemon = True
t.start()
raw_input("Press any key to stop \n")
The device module is as shown below. It works fine on it's own but I have a hard time making it return the value and have it written onto the csv file.
import serial
ser = serial.Serial("COM6",
baudrate=2400,
bytesize=serial.EIGHTBITS,
parity =serial.PARITY_ODD,
timeout=1,
writeTimeout =1)
def Initialize():
global ser
try:
ser.isOpen()
print("\n Serial is open")
except:
print ("Error: serial Not Open")
def Reader():
global ser
if (ser.isOpen()):
try:
x = ser.readline().decode()
x = (x)
return x
except:
return "unable to print"
else:
return "cannot open serial port"

Rather than opening the file each time in your loop, I would suggest moving it outside:
with open('test_csv.csv', 'a') as f:
csv_file = csv.writer(f)
while True:
date_now, time_now = Timestamp()
fio2_data = fio2.Reader()
csv_file.writerow([date_now, time_now, fio2_data])

I figured it out. I had to remove some garbage letters that were associated with the decimal values. First, I change the received data to string and replaced the garbage letters. Here's how I changed it:
[date_now,time_now] = Timestamp()
fio2_data = str(fio2.Reader()).replace("\r\n","")
fio2_data = fio2_data.replace("\x000","")
write_list = [date_now,time_now,fio2_data]

Related

Reading from a sensor and writing to a CSV file

I have a sensor which prints to my COM4 port. I can read from this using serial like:
import serial
ser = serial.Serial('COM4')
while True:
if ser.in_waiting > 0:
temp = ser.readline()
print(temp)
Is there an efficient way to write this to a CSV file instead of printing to the console?
The problem I'm experiencing is if I stop the script midstream then the changes to the file seem to get thrown away, even if I include a step of writing the header in before the while True loop.
import csv
import serial
ser = serial.Serial('COM4')
while true:
with open("csv-file-name.csv", 'w') as csvfile:
writer = csv.writer(csvfile, delimiter=",")
if ser.in_waiting > 0:
temp = ser.readline()
writer.writerow(temp)
Check out this page for more info.
I ended up, following Pranav Hosangadi's advice, handling the sigterm call manually as
import serial
import signal
def signal_handler(signal, frame):
global interrupted
interrupted = True
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
interrupted = False
if __name__ == '__main__':
ser = serial.Serial('COM4')
with open(filename, 'w') as f:
while True:
if ser.in_waiting > 0:
temp = ser.readline()
f.write(temp)
if interrupted:
break
readline is blocking, so in_waiting is not necessary.
Instead of print, just use write to a file:
import serial
port = serial.Serial('COM4')
with open("output.csv", "w") as output:
for line in port:
output.write(line)

Saving Multiple files in Python

I am trying to create a new file each time the following runs. At the moment it creates 1 file and just overwrites it. Is there a to make it not overwrite and create a new file for each loop?
import xml.etree.ElementTree as ET
import time
import csv
with open('OrderCSV.csv', newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
orders_data = ET.Element('orders_data')
orders = ET.SubElement(orders_data, 'orders')
##Order Details
order_reference = ET.SubElement(orders, 'order reference')
order_reference.set('',"12345")
order_date = ET.SubElement(order_reference, 'order_date')
order_priority = ET.SubElement(order_reference, 'order_priority')
order_category = ET.SubElement(order_reference, 'order_category')
delivery_service = ET.SubElement(order_reference, 'delivery_service')
delivery_service.text = row['delivery_service']
timestr = time.strftime("%Y%m%d%H%M%S")
mydata = ET.tostring(orders_data)
myfile = open(timestr, "wb")
myfile.write(mydata)
You could see if the file already exists and wait a bit
while True:
timestr = time.strftime("%Y%m%d%H%M%S")
if not os.path.exists(timestr):
break
time.sleep(.1)
with open(timestr, "wb") as myfile:
mydata = ET.tostring(orders_data)
myfile.write(mydata)
Instead of waiting you could just add seconds. This will cause the file names to drift forward in time if you process a lot of them per second.
mytime = time.time()
while True:
timestr = time.strftime("%Y%m%d%H%M%S", time.localtime(mytime))
if not os.path.exists(timestr):
break
time.sleep(.1)
with open(timestr, "wb") as myfile:
mydata = ET.tostring(orders_data)
myfile.write(mydata)
Another option is to get a single timestamp before the loop and update it as you go.
mytime = time.strftime("%Y%m%d%H%M%S")
for index, row in enumerate(reader):
....
mytime = f"mytime-{index}"
....
change the variable name each time you run the loop and I would suggest using with statement for opening file as you also have to close it after you open it
with open(timestr, 'wb') as myfile:
myfile.write(mydata)
edit: only flaw I can imagine in your code is not closing the file after opening it

Adding data to JSON file

I am working with a RaspberryPi using python. I want to send the data from the temp sensor to a JSON file. However, I am not sure how to do this. I would really appreciate some guidance on this matter. Thank you!
Here is my code:
import grovepi
import math
from time import sleep
from grove_rgb_lcd import *
sensor = 4
blue = 0
white = 1
setRGB(0,255,0)
while True:
try:
[temp,humidity] = grovepi.dht(sensor,blue)
if math.isnan(temp) == False and math.isnan(humidity) == False:
print("temp = %.02f C humidity =%.02f%%"%(temp, humidity))
t = str(temp)
h = str(humidity)
setText("Temp:" + t + "C\n" + "Humidity :" + h + "%")
except (IOError, TypeError) as e:
print(str(e))
setText("")
except KeyboardInterrupt as e:
print(str(e))
setText("")
break
sleep(0.05)
You can use the json module for this, I have listed to functions below that show reading and writing to JSON files:
import json
def read_json(file_path:str) -> dict:
"""Takes in a json file path and returns it's contents"""
with open(file_path, "r") as json_file:
content = json.load(json_file)
return content
def store_json(data:dict, file_path:str):
"""Takes in a python dict and stores it as a .json file"""
with open(file_path, "w") as json_file:
json.dump(data, json_file)
Make sure you pass a dict to store_json(), or it will error out.
In your case I think you want:
data = {}
data["temp"] = t
data["humidity"] = h
store_json(data, "path/to/file.json")

Capture the last timestamp, without reading the complete file using Python

I am fairly new to python and I trying to capture the last line on a syslog file using python but unable to do so. This is a huge log file so I want to avoid loading the complete file in memory. I just want to read the last line of the file and capture the timestamp for further analysis.
I have the below code which captures all the timestamps into a python dict which take a really long time to run for it to get to the last timestamp once it completed my plan was to reverse the list and capture the first object in the index[0]:
The lastFile function uses glob module and gives me the most latest log file name which is being fed into recentEdit of the main function.
Is there a better way of doing this
Script1:
#!/usr/bin/python
import glob
import os
import re
def main():
syslogDir = (r'Location/*')
listOfFiles = glob.glob(syslogDir)
recentEdit = lastFile(syslogDir)
print(recentEdit)
astack=[]
with open(recentEdit, "r") as f:
for line in f:
result = [re.findall(r'\d{4}.\d{2}.\d{2}T\d{2}.\d{2}.\d{2}.\d+.\d{2}.\d{2}',line)]
print(result)
def lastFile(i):
listOfFiles = glob.glob(i)
latestFile = max(listOfFiles, key=os.path.getctime)
return(latestFile)
if __name__ == '__main__': main()
Script2:
###############################################################################
###############################################################################
#The readline() gives me the first line of the log file which is also not what I am looking for:
#!/usr/bin/python
import glob
import os
import re
def main():
syslogDir = (r'Location/*')
listOfFiles = glob.glob(syslogDir)
recentEdit = lastFile(syslogDir)
print(recentEdit)
with open(recentEdit, "r") as f:
fLastLine = f.readline()
print(fLastLine)
# astack=[]
# with open(recentEdit, "r") as f:
# for line in f:
# result = [re.findall(r'\d{4}.\d{2}.\d{2}T\d{2}.\d{2}.\d{2}.\d+.\d{2}.\d{2}',line)]
# print(result)
def lastFile(i):
listOfFiles = glob.glob(i)
latestFile = max(listOfFiles, key=os.path.getctime)
return(latestFile)
if __name__ == '__main__': main()
I really appreciate your help!!
Sincerely.
If you want to directly go,to the end of the file. Follow these steps:
1.Every time your program runs persist or store the last '\n' index.
2.If you have persisted index of last '\n' then you can directly seek to that index using
file.seek(yourpersistedindex)
3.after this when you call file.readline() you will get the lines starting from yourpersistedindex.
4.Store this index everytime your are running your script.
For Example:
you file log.txt has content like:
timestamp1 \n
timestamp2 \n
timestamp3 \n
import pickle
lastNewLineIndex = None
#here trying to read the lastNewLineIndex
try:
rfile = open('pickledfile', 'rb')
lastNewLineIndex = pickle.load(rfile)
rfile.close()
except:
pass
logfile = open('log.txt','r')
newLastNewLineIndex = None
if lastNewLineIndex:
#seek(index) will take filepointer to the index
logfile.seek(lastNewLineIndex)
#will read the line starting from the index we provided in seek function
lastLine = logfile.readline()
print(lastLine)
#tell() gives you the current index
newLastNewLineIndex = logfile.tell()
logfile.close()
else:
counter = 0
text = logfile.read()
for c in text:
if c == '\n':
newLastNewLineIndex = counter
counter+=1
#here saving the new LastNewLineIndex
wfile = open('pickledfile', 'wb')
pickle.dump(newLastNewLineIndex,wfile)
wfile.close()

I want to save python variable result to a file [duplicate]

This question already has answers here:
Python 2.7: Print to File
(6 answers)
Closed 6 years ago.
when this files run gives corresponding output values as;
# test BLE Scanning software
# jcs 6/8/2014
import MySQLdb as my
import blescan
import sys
import bluetooth._bluetooth as bluez
dev_id = 0
db = my.connect(host="localhost",
user="root",
passwd="root",
db="test"
)
cursor = db.cursor()
try:
sock = bluez.hci_open_dev(dev_id)
print "ble thread started"
except:
print "error accessing bluetooth device..."
sys.exit(1)
blescan.hci_le_set_scan_parameters(sock)
blescan.hci_enable_le_scan(sock)
while True:
returnedList = blescan.parse_events(sock, 10)
print "----------"
for beacon in returnedList:
print beacon
sql = "insert into beacon VALUES(null, '%s')" % \
(beacon)
number_of_rows = cursor.execute(sql)
db.commit()
db.close()
I want output stored in a text file
cf:68:cc:c7:33:10,b9407f30f5f8466eaff925556b57fe6d,13072,52423,-74,-78
cf:68:cc:c7:33:10,74696d6f74650e160a181033c7cc68cf,46608,13255,-52,-77
da:f4:2e:a0:70:b1,b9407f30f5f8466eaff925556b57fe6d,28849,11936,-74,-79
da:f4:2e:a0:70:b1,74696d6f74650e160a18b170a02ef4da,46769,28832,46,-78
dd:5d:d3:35:09:dd,8aefb0316c32486f825be26fa193487d,1,1,-64,-78
c3:11:48:9b:cf:fa,8aefb0316c32486f825be26fa193487d,0,0,-64,-73
fd:5b:12:7f:02:e4,b9407f30f5f8466eaff925556b57fe6d,740,4735,-74,-79
fd:5b:12:7f:02:e4,74696d6f74650e160a18e4027f125bfd,46820,639,18,-80
dd:5d:d3:35:09:dd,8aefb0316c32486f825be26fa193487d,1,1,-64,-77
so on... and then write store these values in text file. For text file, is it possible to generate text file as a part of script? Thanks
if what you want is to store the data in file, so you can open it later the simplest way is to use the Pickle (or the cPickle) module
something like:
import cPickle
#store the data in file
with open('/path/to/file.txt', 'wb') as f:
cPickle.dump(returnedList, f)
#to read the data
loaded_returnedList = cPickle.load(open('/path/to/file.txt'))
print loaded_returnedList == returnedList # Prints True
now if what you want is store the data for visual store (maybe to open it in excel later), the csv module is for you
import csv
with open('/path/to/file', 'w') as f:
writer = csv.writer(f)
for beacon in returnedList:
writer.writerow(beacon)
Try this for your loop
while True:
returnedList = blescan.parse_events(sock, 10)
print "----------"
f = open('bluez.txt', 'a+')
for beacon in returnedList:
print beacon
f.write(','.join(beacon)+'\n')
f.close()
EDIT: now appending mode

Categories

Resources