I am working with a RaspberryPi using python. I want to send the data from the temp sensor to a JSON file. However, I am not sure how to do this. I would really appreciate some guidance on this matter. Thank you!
Here is my code:
import grovepi
import math
from time import sleep
from grove_rgb_lcd import *
sensor = 4
blue = 0
white = 1
setRGB(0,255,0)
while True:
try:
[temp,humidity] = grovepi.dht(sensor,blue)
if math.isnan(temp) == False and math.isnan(humidity) == False:
print("temp = %.02f C humidity =%.02f%%"%(temp, humidity))
t = str(temp)
h = str(humidity)
setText("Temp:" + t + "C\n" + "Humidity :" + h + "%")
except (IOError, TypeError) as e:
print(str(e))
setText("")
except KeyboardInterrupt as e:
print(str(e))
setText("")
break
sleep(0.05)
You can use the json module for this, I have listed to functions below that show reading and writing to JSON files:
import json
def read_json(file_path:str) -> dict:
"""Takes in a json file path and returns it's contents"""
with open(file_path, "r") as json_file:
content = json.load(json_file)
return content
def store_json(data:dict, file_path:str):
"""Takes in a python dict and stores it as a .json file"""
with open(file_path, "w") as json_file:
json.dump(data, json_file)
Make sure you pass a dict to store_json(), or it will error out.
In your case I think you want:
data = {}
data["temp"] = t
data["humidity"] = h
store_json(data, "path/to/file.json")
Related
How to dump data into Json file
*as can see in the below python code I am trying the dump data in Json file so but I am struggling to do it in python code *
import time
import json
import os
def long_function(name):
cache_path = 'cache.json'
if not os.path.isfile(cache_path):
with open(cache_path, 't') as json_file:
cache_file_data = [name]
jsondump(cache_file_data, json_file)
else:
with open(cache_path, 'r') as json_file:
cache_file_data = json.load(json_file)
if name in cache_file_data:
print("Name already exist")
return name
else:
cache_file_data.append(name)
for e in range(5):
time.sleep(1)
print(e+1)
with open(cache_path, 'w') as json_file:
jsondump(cache_file_data, json_file)
print("New Name added in cache")
return name
print(long_function('nitu'))
so please resolve my problem......please help me
import json
# JSON data:
x = '{ "organization":"New_holn",
"city":"Noida",
"country":"India"}'
# python object to be appended
y = {"pin":117845}
# parsing JSON string:
z = json.loads(x)
# appending the data
z.update(y)
# the result is a JSON string:
print(json.dumps(z))
This is nothing but follow this pattern and your so your code error is ..you are not defined file mode correctly in if condition
with open (cache_path. "t") as json_file:
Instead of
with open (cache_path. "w") as json_file:
And second thing is you are not doing dump data
I am currently extracting trade data from a JSON file to create graphs. I have made a file to load in my JSON data but I want to create a a function that allows me to extract specific data points (Like a getter?), How would I go about this?
So the function is to store the data but Im not sure how to connect it back to my loaded in JSON file.
This is my function so far
class TradeInfo():
def __init__(self, sym, vol, pChange, gcount):
self.symbol = sym
self.volume = vol
self.pChange = pChange
self.gcount = gcount
def getSymbol(self):
return (self.symbol)
def getVolume(self):
return (self.volume)
def getPriceChange(self):
return (self.pChange)
def getCount(self):
return (self.gcount)
and below is the output I recive when I load in my Json file in a separate function
enter image description here
This is the code to load my JSON file
def loadfile(infileName,biDir= True):
try:
filename= infileName
with open(filename) as f:
fileObj = json.load(f)
fileObj = json.dumps(fileObj, indent=4)
except IOError as e:
print("Error in file processing: " + str(e))
return fileObj
Let's say your JSON looks like this:
{
"marketId": "LTC-AUD",
"bestBid": "67.62",
"bestAsk": "68.15",
"lastPrice": "67.75",
"volume24h": "190.19169781",
"volumeQte24h": "12885.48752662",
"price24h": "1.37",
"pricePct24h": "2.06",
"low24h": "65.89",
"high24h": "69.48",
"timestamp": "2020-10-10T11:14:19.270000Z"
}
So your loadfile function should look something like this:
import json
def load_file(infile_name) -> dict:
try:
with open(infile_name) as f:
return json.load(f)
except IOError as e:
print("Error in file processing: " + e)
data = load_file("sample_json.json")
print(json.dumps(data, indent=2, sort_keys=True))
print(data['timestamp'])
Output:
{
"bestAsk": "68.15",
"bestBid": "67.62",
"high24h": "69.48",
"lastPrice": "67.75",
"low24h": "65.89",
"marketId": "LTC-AUD",
"price24h": "1.37",
"pricePct24h": "2.06",
"timestamp": "2020-10-10T11:14:19.270000Z",
"volume24h": "190.19169781",
"volumeQte24h": "12885.48752662"
}
2020-10-10T11:14:19.270000Z
I've simplified your function and removed a redundant argument biDir because you're not using it anywhere.
I'm trying to collect serial data from multiple devices, timestamp and export it to a .csv file. I want to write separate modules for each device such that they return the data to the main module and all the writing to csv is done in that.
The following program writes date and time to csv but not the data returned from the device module.
import time
import csv
from threading import Thread
import fio2
def Csv_creator():
my_file = open('test_csv.csv', 'w+')
with my_file:
new_file = csv.writer(my_file)
def Timestamp():
date_now = time.strftime('%d/%m/%y')
time_now = time.strftime('%H:%M:%S')
return [date_now,time_now]
def Write_loop():
Csv_creator()
fio2.Initialize()
while True:
with open('test_csv.csv', 'a') as f:
[date_now,time_now] = Timestamp()
fio2_data = fio2.Reader()
print fio2_data
to_write = [date_now,time_now,fio2_data]
csv_file = csv.writer(f)
csv_file.writerow(to_write)
t = Thread(target=Write_loop)
t.daemon = True
t.start()
raw_input("Press any key to stop \n")
The device module is as shown below. It works fine on it's own but I have a hard time making it return the value and have it written onto the csv file.
import serial
ser = serial.Serial("COM6",
baudrate=2400,
bytesize=serial.EIGHTBITS,
parity =serial.PARITY_ODD,
timeout=1,
writeTimeout =1)
def Initialize():
global ser
try:
ser.isOpen()
print("\n Serial is open")
except:
print ("Error: serial Not Open")
def Reader():
global ser
if (ser.isOpen()):
try:
x = ser.readline().decode()
x = (x)
return x
except:
return "unable to print"
else:
return "cannot open serial port"
Rather than opening the file each time in your loop, I would suggest moving it outside:
with open('test_csv.csv', 'a') as f:
csv_file = csv.writer(f)
while True:
date_now, time_now = Timestamp()
fio2_data = fio2.Reader()
csv_file.writerow([date_now, time_now, fio2_data])
I figured it out. I had to remove some garbage letters that were associated with the decimal values. First, I change the received data to string and replaced the garbage letters. Here's how I changed it:
[date_now,time_now] = Timestamp()
fio2_data = str(fio2.Reader()).replace("\r\n","")
fio2_data = fio2_data.replace("\x000","")
write_list = [date_now,time_now,fio2_data]
I've only learnt the basics of Python please forgive me but I was not able to determine the fix from the other posts. I open my JSON files with 'r' and I think I'm writing to them in r but it doesn't like that. Changing it to 'r' doesn't help :(
For the following section:
if isinstance(to_write, list):
self.log_file.write(''.join(to_write) + "<r/>")
else:
self.log_file.write(str(to_write) + "<r/>")
self.log_file.flush()
The error I get is: a bytes-like object is required, not 'str'
import math
import time
from random import randint
import json
from instagram.client import InstagramAPI
class Bot:
def __init__(self, config_file, tags_file):
# Loading the configuration file, it has the access_token, user_id and others configs
self.config = json.load(config_file)
# Loading the tags file, it will be keep up to date while the script is running
self.tags = json.load(tags_file)
# Log file to output to html the debugging info about the script
self.filename = self.config["path"] + self.config["prefix_name"] + time.strftime("%d%m%Y") + ".html"
self.log_file = open(self.filename, "wb")
# Initializing the Instagram API with our access token
self.api = InstagramAPI(access_token=self.config["access_token"], client_secret=self.config['client_secret'])
# Likes per tag rate
self.likes_per_tag = math.trunc(min(self.config["follows_per_hour"],
self.config["likes_per_hour"]) / len(self.tags["tags"]))
def save_tags(self):
j = json.dumps(self.tags, indent=4)
f = open('tags.json', 'w')
print >> f, j
f.close()
def insta_write(self, to_write):
if self.filename != self.config["path"] + self.config["prefix_name"] + time.strftime("%d%m%Y") + ".html":
self.log_file.close()
self.filename = self.config["path"] + self.config["prefix_name"] + time.strftime("%d%m%Y") + ".html"
self.log_file = open(self.filename, "wb")
if isinstance(to_write, list):
self.log_file.write(''.join(to_write) + "<r/>")
else:
self.log_file.write(str(to_write) + "<r/>")
self.log_file.flush()
def going_sleep(self, timer):
sleep = randint(timer, 2 * timer)
self.insta_write("SLEEP " + str(sleep))
time.sleep(sleep)
def like_and_follow(self, media, likes_for_this_tag):
try:
var = self.api.user_relationship(user_id=media.user.id)
if self.config["my_user_id"] != media.user.id:
self.insta_write("--------------")
self.insta_write(var)
if var.outgoing_status == 'none':
self.insta_write("LIKE RESULT:")
self.insta_write(self.api.like_media(media_id=media.id))
self.insta_write("FOLLOW RESULT:")
self.insta_write(self.api.follow_user(user_id=media.user.id))
likes_for_this_tag -= 1
self.going_sleep(self.config["sleep_timer"])
else:
self.going_sleep(self.config["sleep_timer"] / 2)
except Exception as e:
self.insta_write(str(e))
self.insta_write("GOING SLEEP 30 min")
time.sleep(1800)
self.like_and_follow(media, likes_for_this_tag)
return likes_for_this_tag
def run(self):
while True:
for tag in self.tags["tags"].keys():
tag = str(tag)
self.insta_write("--------------------")
self.insta_write("TAG: " + tag)
self.insta_write("--------------------")
self.insta_write("--------------------")
self.insta_write("DICTIONARY STATUS:")
for keys, values in self.tags["tags"].items():
self.insta_write(keys)
if values is not None:
self.insta_write(values)
likes_for_this_tag = self.likes_per_tag
while likes_for_this_tag > 0 and self.tags["tags"][tag] != 0:
if self.tags["tags"][tag] is None:
media_tag, self.tags["tags"][tag] = self.api.tag_recent_media(tag_name=tag,
count=likes_for_this_tag)
else:
media_tag, self.tags["tags"][tag] = self.api.tag_recent_media(tag_name=tag,
count=likes_for_this_tag,
max_tag_id=self.tags["tags"][tag])
self.insta_write("API CALL DONE")
if len(media_tag) == 0 or self.tags["tags"][tag] is None:
self.tags["tags"][tag] = 0
likes_for_this_tag = 0
else:
self.insta_write(self.tags["tags"][tag])
self.tags["tags"][tag] = self.tags["tags"][tag].split("&")[-1:][0].split("=")[1]
self.save_tags()
for m in media_tag:
likes_for_this_tag = self.like_and_follow(m, likes_for_this_tag)
if reduce(lambda r, h: r and h[1] == 0, self.tags["tags"].items(), True):
self.insta_write("END")
exit(1)
if __name__ == '__main__':
bot = Bot(open("config_bot.json", "r"), open("tags.json", "r"))
bot.run()
You opened the file as binary:
self.log_file = open(self.filename, "wb")
but are writing str Unicode strings to it. Either open the file in text mode (with an encoding set) or encode each string, separately.
Opening the file in text mode is easiest:
self.log_file = open(self.filename, "w", encoding="utf8")
In my case, the reason for the error was the conflict between json.load function and another function from another module w/ the same name load. Specifying explicitly which load function to use i.e. json.load, solved the problem.
import re, urllib.request
patern = re.compile(r'image/\w*\W*\w*\.\jpg', re.I|re.M)
file = open('APODLinks.txt','r')
rf = file.read()
a = rf.split('\n')
file.close()
def lic(li):
if not li:
pass
else:
print(li[0])
f.write('http://apod.nasa.gov/apod/%s\n' % li[0])
def main():
for i in range(len(a)):
ur = urllib.request.urlopen(a[i])
mf = re.findall(patern, str(ur.read()))
lic(mf)
f = open('APODImgs.txt','w')
main()
f.close()
What's wrong with my code i'm try to write a txt file with all the jpg pictures
from Astronomy picture of the day but the file APODImgs.txt is empty...
The mf list some times is empty maybe this is my problem...
The APODLinks.txt contain urls like this:
apod.nasa.gov/apod/ap140815.html
apod.nasa.gov/apod/ap140814.html
apod.nasa.gov/apod/ap140813.html
7000 lines of urls
The APODImgs.txt must be like this:
apod.nasa.gov/apod/image/1408/Persei93_1abolfath.jpg
apod.nasa.gov/apod/image/1408/Supermoon_20140810.JPG
apod.nasa.gov/apod/image/1408/m57_nasagendler_3000.jpg
apod.nasa.gov/apod/image/1408/HebesChasma_esa_1024.jpg
...
Please help and sorry for my English...
Most probably not li is always true in lic, because your regexp doesn't match.
To figure it out, print the HTTP response body:
urr = urllib.request.urlopen(a[i]).read()
print repr(urr)
mf = re.findall(patern, urr)
print repr(mf)
lic(mf)
I change my code and it works!!!
import re, urllib.request
patern = re.compile(r'image/\w*\W*\w*\.jpg', re.I|re.M)
file = open('APODLinks.txt','r')
rf = file.read()
a = rf.split('\n')
file.close()
def lic(li):
if not li:
print("No matches found")
else:
print('http://apod.nasa.gov/apod/%s' % li[0])
f.write('http://apod.nasa.gov/apod/%s\n' % li[0])
def main():
for i in range(len(a)):
try:
ur = urllib.request.urlopen(a[i])
except:
print('404 not found!')
mf = re.findall(patern, str(ur.read()))
lic(mf)
f = open('APODImgs.txt','w')
main()
f.close()