I can retrieve values from datastore - python

I want to retrieve some values I put in the data store with a model class name "Ani" and I have tried using the script below to do that but I am having problem with. Can someone please, help me with it
import random
import getpass
import sys
# Add the Python SDK to the package path.
# Adjust these paths accordingly.
sys.path.append('/root/google_appengine')
sys.path.append('/root/google_appengine/lib/yaml/lib')
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.ext import db
import models
# Your app ID and remote API URL path go here.
APP_ID = 'silasanio'
REMOTE_API_PATH = '/remote_api'
def auth_func():
email_address = raw_input('Email address: ')
password = getpass.getpass('Password: ')
return email_address, password
def initialize_remote_api(app_id=APP_ID,
path=REMOTE_API_PATH):
remote_api_stub.ConfigureRemoteApi(
app_id,
path,
auth_func)
remote_api_stub.MaybeInvokeAuthentication()
def main():
initialize_remote_api()
Meanvalue = []
result = db. ("SELECT * FROM Ani ORDER BY date DESC LIMIT 1")
for res in result:
Meanvalue = res.mean
std_dev = res.stdev
print(mean)
if __name__ == '__main__':
main()
I am getting the error below:
raise KindError('No implementation for kind \'%s\'' % kind)
google.appengine.ext.db.KindError: No implementation for kind 'Ani'
Please, I need some help with it.
Thanks

You need to import the file where you define the class Ani before you can run queries on the data.

Related

How can I convert a azure function to a regular python script?

I am very new to python & I have encountered a problem which is: I have to convert an azure function code to a normal python script. I did not work with azure before so I am kind of clueless. Here is the code below,
this is a process to analyze a document and return key-value pairs, but I am not aware how to convert this code into a regular python script & run it locally.
import logging
from azure.ai.formrecognizer import DocumentAnalysisClient
from azure.core.credentials import AzureKeyCredential
from azure.storage.blob import BlockBlobService, PublicAccess
import json
import re
import uuid
logger = logging.getLogger(__name__)
import azure.functions as func
def upload_blob(account_name, container_name, account_key, blob_name):
# Create the BlockBlobService that is used to call the Blob service for the storage account
blob_service_client = BlockBlobService(
account_name=account_name,
account_key=account_key)
# Set the permission so the blobs are public.
blob_service_client.set_container_acl(container_name, public_access=PublicAccess.Container)
#blob_name = doc_path.split('/')[-1][:-4] + str(uuid.uuid4()) + ".pdf"
# Upload the created file, use blob_name for the blob name
#blob_service_client.create_blob_from_path(container_name, blob_name, doc_path)
blob_url = blob_service_client.make_blob_url(container_name, blob_name)
return blob_url
def delete_blob(account_name, container_name, account_key, blob_name):
blob_service_client = BlockBlobService(
account_name=account_name,
account_key=account_key)
# Delete blob from container
blob_service_client.delete_blob(container_name, blob_name)
def search_value(kvs, search_key):
for key, value in kvs.items():
if re.search(search_key, key, re.IGNORECASE):
return value
def analyze_general_documents(endpoint, api_key, doc_url):
document_analysis_client = DocumentAnalysisClient(
endpoint=endpoint, credential=AzureKeyCredential(api_key)
)
poller = document_analysis_client.begin_analyze_document_from_url("prebuilt-document", doc_url)
result = poller.result()
#print("----Key-value pairs found in document----")
kvs = {}
content = result.content.replace("\n", "").replace("\r", "").strip()
for kv_pair in result.key_value_pairs:
if kv_pair.key:
key = kv_pair.key.content
if kv_pair.value:
val = kv_pair.value.content
kvs[key] = val
return content, kvs
def main(req: func.HttpRequest) -> func.HttpResponse:
try:
# Query parameters
endpoint = ""
api_key = ""
account_name = ""
container_name = ""
account_key = ""
if "blob_name" in req.get_json() and "search_keys" in req.get_json():
blob_name = req.get_json()["blob_name"]
search_keys = req.get_json()["search_keys"]
logger.info(" search_keys = "+str(search_keys))
# Upload file to Azure Storage container.
logger.info("Creating blob url")
blob_url = upload_blob(account_name, container_name, account_key, blob_name)
#logger.info("Blob url = "+str(blob_url))
# Analyze the document
content, kvs = analyze_general_documents(endpoint, api_key, blob_url)
#logger.info("content = "+content)
#logger.info("kvs = "+str(kvs))
# Search for specified keys
search_results = {}
for search_key in search_keys:
val = search_value(kvs, search_key)
if val:
search_results[search_key] = val
#logger.info("search_results = "+str(search_results))
# Delete the uploaded file
delete_blob(account_name, container_name, account_key, blob_name)
# Return search results
return func.HttpResponse(json.dumps(search_results))
else:
return func.HttpResponse("Please pass in end_point, api_key, and blob_name", status_code=400)
except Exception as e:
return func.HttpResponse("Error: " + str(e), status_code=500)
First things first - this may not be the full solution to your problem but maybe helps you with deriving next steps. The script should be re-coded imho since it is based on an old library that might not be maintained any more. However, below are some ideas; this is by no means a real solution and must not be used with production data.
Your imported libraries can remain as-is. Just note that when you install the libraries through pip install {library_name}, you would need to use the old azure-storage library and not azure-storage-blob, since this one won't have the BlockBlobService.
Also, if you want to run the script from the command-line, you might want to pass the parameters that the function originally receives through the HTTP request. For this purpose you could use argparse. Furthermore, you might not want to use the credentials within your script file but rather export them as environment variables - then you would need the os library as well.
That said, your imports would look like this:
import logging
from azure.ai.formrecognizer import DocumentAnalysisClient
from azure.core.credentials import AzureKeyCredential
from azure.storage.blob import BlockBlobService, PublicAccess
import json
import re
import uuid
# Importing argparse for being able to pass parameters
import argparse
# Importing os to read environment variables
import os
You would not need import azure.functions as func any more.
Because it is running locally, you could pass the parameters blobname and searchkeys when executing the script. This would require something like this:
parser = argparse.ArgumentParser()
parser.add_argument("-n", "--blobname", type=str)
parser.add_argument("-s", "--searchkeys", type=str)
args = parser.parse_args()
blob_name = args.blobname
search_keys = args.searchkeys
This would allow you to keep the variable names as they are right now.
Like initially mentioned, the functions can remain as-is, however, the credentials should not be within the script. With os imported, you could add this...
# Query parameters
endpoint = os.getenv('form_recognizer_endpoint')
api_key = os.getenv('form_recognizer_api_key')
account_name = os.getenv('storage_account_name')
container_name = os.getenv('storage_container_name')
account_key = os.getenv('storage_account_key')
...and then use the export functionality from your shell to add them as environment variables, i.E.:
export form_recognizer_endpoint="your_endpoint"
export form_recognizer_api_key="your_api_key"
export storage_account_name="your_account_name"
export storage_container_name="your_container"
export storage_account_key="your_key"
Finally, you could remove the surrounding def main and the try-except block and also the if-statement so that your main block would be along these lines:
logger.info(" search_keys = "+str(search_keys))
# Upload file to Azure Storage container.
logger.info("Creating blob url")
blob_url = upload_blob(account_name, container_name, account_key, blob_name)
#logger.info("Blob url = "+str(blob_url))
# Analyze the document
content, kvs = analyze_general_documents(endpoint, api_key, blob_url)
#logger.info("content = "+content)
#logger.info("kvs = "+str(kvs))
# Search for specified keys
search_results = {}
for search_key in search_keys:
val = search_value(kvs, search_key)
if val:
search_results[search_key] = val
#logger.info("search_results = "+str(search_results))
# Delete the uploaded file
delete_blob(account_name, container_name, account_key, blob_name)
Finally you could change the return line to print out the result, i.E.:
# Return search results
print(json.dumps(search_results))
It could be executed like this: python script.py --blobname testfile.pdf --searchkeys "text"

Python flask server to retrieve certain records

I have this following python code for a Flask server. I am trying to have this part of the code list all my vehicles that match the horsepower that I put in through my browser. I want it to return all the car names that match the horsepower, but what I have doesn't seem to be working? It returns nothing. I know the issue is somewhere in the "for" statement, but I don't know how to fix it.
This is my first time doing something like this and I've been trying multiple things for hours. I can't figure it out. Could you please help?
from flask import Flask
from flask import request
import os, json
app = Flask(__name__, static_folder='flask')
#app.route('/HORSEPOWER')
def horsepower():
horsepower = request.args.get('horsepower')
message = "<h3>HORSEPOWER "+str(horsepower)+"</h3>"
path = os.getcwd() + "/data/vehicles.json"
with open(path) as f:
data = json.load(f)
for record in data:
horsepower=int(record["Horsepower"])
if horsepower == record:
car=record["Car"]
return message
The following example should meet your expectations.
from flask import Flask
from flask import request
import os, json
app = Flask(__name__)
#app.route('/horsepower')
def horsepower():
# The type of the URL parameters are automatically converted to integer.
horsepower = request.args.get('horsepower', type=int)
# Read the file which is located in the data folder relative to the
# application root directory.
path = os.path.join(app.root_path, 'data', 'vehicles.json')
with open(path) as f:
data = json.load(f)
# A list of names of the data sets is created,
# the performance of which corresponds to the parameter passed.
cars = [record['Car'] for record in data if horsepower == int(record["Horsepower"])]
# The result is then output separated by commas.
return f'''
<h3>HORSEPOWER {horsepower}</h3>
<p>{','.join(cars)}<p>
'''
There are many different ways of writing the loop. I used a short variant in the example. In more detail, you can use these as well.
cars = []
for record in data:
if horsepower == int(record['Horsepower']):
cars.append(record['Car'])
As a tip:
Pay attention to when you overwrite the value of a variable by using the same name.

Using Variables from Imported files in Python

I am trying to get a variable and its value from another python file that has been Imported . I have the file Main.py and Write.py .In the Main file I am trying to get the variable user_id to equal the new_id variable from the Imported file Write.py .The new_id is creating a hash for the new user when they scan their tag (rfid). The issue is after the tag is scanned the Variable user_id still remains empty. I think I might be grabbing the variable before the tag is scanned, any thoughts? I have posted the code below along with some comments.
from tkinter import *
#Second File
import Write
from tkcalendar import DateEntry
from firebase import firebase
data = {}
global user_id
# Firebase
firebase= firebase.FirebaseApplication("https://xxxxxxx.firebaseio.com/",None)
# button click
def sub ():
global user_id
#setting Variables from user input
name = entry_1.get()
last = entry_2.get()
number = phone.get()
# issue is here
try:
#Calling Function from other file
Write.scan()
if Write.scan():
#getting the New User Id
user_id= new_id
#User Info being sent to the Database
data = {
'Name #': name,
'Last': last,
'Number': number,
'Card #':user_id
}
results = firebase.post('xxxxxxxx/User',data)
except Exception as e:
print(e)
Write.py
import string
from random import*
import RPi.GPIO as GPIO
from mfrc522 import SimpleMFRC522
reader = SimpleMFRC522()
#Function being called
def scan():
try:
#Creating user hash
c = string.digits + string.ascii_letters
new_id = "".join(choice(c) for x in range(randint(25,25)))
print("Please Scan tag")
#Writing to tag
reader.write(new_id)
if reader.write(new_id):
print("Tag Scanned")
else:
print("Scan Tag First")
print("Scanning Complete")
finally:
GPIO.cleanup()
Not sure if this is the actual issue or just the formatting in here, but your indentations for the try: statement are wrong in your main file.

Pass flag from a python script to another

Hello if i have a flag in one script, is it possible to pass real time change on others? I mean that, for example i wrote this script. let's name it script1.py which reads data from a serial communication and saves it to a .txt file. When the data that i receive is '0' i would like to pass the flag sent_json to script2. So when script2 gets the trigger, POST the data. Any suggestions ?
while True:
try :
a = ser.readline()
timestamped = str(datetime.datetime.now())
suma = timestamped + "\t " + a.decode('utf-8')
f = open("current_data.txt", 'a')
f.write(suma)
if (a.decode().strip() == '0'):
sent_json = True
saveData()
print("New data is saved!")
sent_json = False
except :
print("Unexpected error: ")
break
and i have another script, lets name it script2.py, in which is the main Flask app :
import sqlite3, json
from flask import Flask, render_template, request
from serialNumber_id import serial_number
import sys
app = Flask(__name__)
#app.route("/")
def PostData():
''' Connect to DB, set the temperature to 2 decimal float, POST Data to DB'''
with open("data.json") as dataFile:
data = json.load(dataFile)
for key, value in data.items():
temperature = "{0:.2f}".format(float(value['data']))
date = value['date']
conn = sqlite3.connect('sensordata.db')
cur = conn.cursor()
cur.execute( """INSERT INTO Temperature_data(temperature, currentdat, currenttime, device) VALUES ((?) , (?), time("now"), (?))""", (temperature, date, serial_number))
conn.commit()
open('data.json', 'w').close()
#######
Code something like
while True:
if sent_json :
do something
else:
do something
# if __name__ == "__main__":
# app.run(host='0.0.0.0', port=8181, debug=True)
Note that i have tried in script2
from script1 import sent_json
Also the scripts are in the same folder.
So the absolute simplest thing to do, since you're polling anyway, is to write to some kind of shared resource like a file. So script1 writes something, maybe a timestamp, to a file, and script2 continually polls that file to check to see whether it's changed.
Let me warn you that this, like anything that relies on the filesystem, is a terrible solution if you want performance or efficiency.
Why not just POST the data to your flask app instead of trying to pass it from script to script?
#script1.py
import requests
my_data = {'name':'FooBar'}
requests.post('http://localhost:3000/', json=data)
#script2.py
from flask import request
...
#app.route("/", methods=["POST"])
def PostData():
data = request.get_json(force=True)
print(data)
# {'name':'FooBar'}
do_something(data)

CSS file not being applied to TPL in bottle

my first question here, and im pretty sure its a case of me not knowing completely what im doing
when using a .tpl file (the one in question is 'comments.tpl' stored in /views). it does not retrieve the .css file i would like associated with it.
I've read through a few questions posted on here and on other places but am yet to find something that i can comprehend as a solution.
Here is my code:
import bottle
import pymongo
import sys
from bottle import static_file
def connect_db():
db = pymongo.MongoClient()
db = db
return db.reddit.comments
#bottle.route('/static/css/style.css')
def stylesheets(comments):
return static_file(comments, root='./static/css')
#bottle.route('/')
def home_page():
db = connect_db()
result = db.find(limit = 10)
return bottle.template('comments', username = "YOU", result = result)
#bottle.route('/test_page')
def test_page():
return "You made it!<br> <a href='/'>Now come back!</a><br>"
bottle.debug(True)
bottle.run(host = 'localhost', port = 8080)
the files are set out
/---cmt_page.py
---/views/comments.tpl
---/static/css/style.css
any help would be fantastic, thanks in advance!
-Sam
///////////////EDIT//////////////////
Sorted.
Changed :
#bottle.route('/static/css/style.css')
def stylesheets(comments):
return static_file(comments, root='./static/css')
To:
#bottle.route('/static/css/<style>')
def stylesheets(style):
return static_file(style, root='./static/css')
Always read the bloody documentation kids....
-Sam
try an absolute path in root='./static/css'

Categories

Resources