I'm trying to use Bottle framework in python with sqlite3. Then I made a Todo List application but when I tried to post a data at the first time the error happened differently from above. The second time 'database is locked' happened.
Can anyone help?
#_*_ coding:utf-8- _*_
import os, sqlite3
from bottle import route, run, get, post, request, template
#sqlite from here----------------
dbname = "todo.db"
connection = sqlite3.connect(dbname)
dbcontrol = connection.cursor()
#Making table from here--------------------
create_table = '''create table todo_list (todo text)'''
#route("/")
def index():
todo_list = get_todo()
return template("index", todo_list=todo_list)
I think I need more specific code here.
#route("/enter", method=["POST"])
def enter():
conn = sqlite3.connect("todo.db")
todo=request.POST.getunicode("todo_list")
save_todo(todo)
return redirect("/")
def save_todo(todo):
connection = sqlite3.connect('todo.db')
dbcontrol = connection.cursor()
insert="insert into todo_list(todo) values('{0}')".format(todo)
dbcontrol.execute(insert)
connection.commit()
def get_todo():
connection=sqlite3.connect('todo.db')
dbcontrol = connection.cursor()
select = "select * from todo_list"
dbcontrol.execute(select)
row = dbcontrol.fetchall()
return row
run(host="localhost", port=8080, debug=True)
Install the bottle-sqlite with:
$ pip install bottle-sqlite
An example from the plugin
import bottle
app = bottle.Bottle()
plugin = bottle.ext.sqlite.Plugin(dbfile='/tmp/test.db')
app.install(plugin)
#app.route('/show/:item')
def show(item, db):
row = db.execute('SELECT * from items where name=?', item).fetchone()
if row:
return template('showitem', page=row)
return HTTPError(404, "Page not found")
Important notes from the plugin
Routes that do not expect a db keyword argument are not affected.
The connection handle is configured so that sqlite3.Row objects can be
accessed both by index (like tuples) and case-insensitively by name.
At the end of the request cycle, outstanding transactions are
committed and the connection is closed automatically. If an error
occurs, any changes to the database since the last commit are rolled
back to keep the database in a consistent state.
Also take a look at Configuration section.
Related
I need to add a link to a photo in my database using REST API. I have tried to add it as a string, but there is some error.
My PUT call:
http://127.0.0.1:3000/addphoto/3&http://www.clker.com/cliparts/8/m/S/S/c/x/number-3-coner-num-hi.png
and what I get:
127.0.0.1 - - [09/Apr/2022 09:10:51] "PUT /addphoto/3%26http://www.clker.com/cliparts/8/m/S/S/c/x/number-3-coner-num-hi.png HTTP/1.1" 404
Here is the code for the PUT route:
#app.route('/addphoto/<int:id>&<string:link1>', methods=['PUT'])
def addphoto(id,link1):
connection = psycopg2.connect(db.data)
cursor = connection.cursor()
update_query = f"""UPDATE public."Items" SET photo = '{link1}' WHERE "Id" = {id}"""
cursor.execute(update_query)
connection.commit()
return getitems()
It seems you want to pass it through path parameters. I think it would lead to high chances of errors since the route path matching algorithm has a bit of a complicated rule.
I suggest to pass it through query params like this
from flask import request
#app.route('/addphoto', methods=['PUT'])
def addphoto():
id = request.args.get("id")
link1 = request.args.get("link")
connection = psycopg2.connect(db.data)
cursor = connection.cursor()
update_query = f"""UPDATE public."Items" SET photo = '{link1}' WHERE "Id" = {id}"""
cursor.execute(update_query)
connection.commit()
return getitems()
Then you can hit it, such as this example request:
http://127.0.0.1:5000/addphoto?id=200&link=https://www.youtube.com/watch?v=ei7kbQhK1hA
I write some tests with pytest, I want to test create user and email with post method.
With some debug, I know the issue is I open two databases in memory, but they are same database SessionLocal().
So how can I fix this, I try db.flush(), but it doesn't work.
this is the post method code
#router.post("/", response_model=schemas.User)
def create_user(
*,
db: Session = Depends(deps.get_db), #the get_db is SessionLocal()
user_in: schemas.UserCreate,
current_user: models.User = Depends(deps.get_current_active_superuser),
) -> Any:
"""
Create new user.
"""
user = crud.user.get_by_email(db, email=user_in.email)
if user:
raise HTTPException(
status_code=400,
detail="The user with this username already exists in the system.",
)
user = crud.user.create(db, obj_in=user_in)
print("====post====")
print(db.query(models.User).count())
print(db)
if settings.EMAILS_ENABLED and user_in.email:
send_new_account_email(
email_to=user_in.email, username=user_in.email, password=user_in.password
)
return user
and the test code is:
def test_create_user_new_email(
client: TestClient, superuser_token_headers: dict, db: Session # db is SessionLocal()
) -> None:
username = random_email()
password = random_lower_string()
data = {"email": username, "password": password}
r = client.post(
f"{settings.API_V1_STR}/users/", headers=superuser_token_headers, json=data,
)
assert 200 <= r.status_code < 300
created_user = r.json()
print("====test====")
print(db.query(User).count())
print(db)
user = crud.user.get_by_email(db, email=username)
assert user
assert user.email == created_user["email"]
and the test result is
> assert user
E assert None
====post====
320
<sqlalchemy.orm.session.Session object at 0x7f0a9f660910>
====test====
319
<sqlalchemy.orm.session.Session object at 0x7f0aa09c4d60>
Your code does not provide enough information to help you, the key issues are probably in what is hidden and explained by your comments.
And it seems like you are confusing sqlalchemy session and databases. If you are not familiar with these concepts, I highly recommend you to have a look at SQLAlchemy documentation.
But, looking at your code structure, it seems like you are using FastAPI.
Then, if you want to test SQLAlchemy with pytest, I recommend you to use pytest fixture with SQL transactions.
Here is my suggestion on how to implement such a test. I'll suppose that you want to run the test on your actual database and not create a new database especially for the tests. This implementation is heavily based on this github gist (the author made a "feel free to use statement", so I suppose he is ok with me copying his code here):
# test.py
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from fastapi.testclient import TestClient
from myapp.models import BaseModel
from myapp.main import app # import your fastapi app
from myapp.database import get_db # import the dependency
client = TestClient(app)
# scope="session" mean that the engine will last for the whole test session
#pytest.fixture(scope="session")
def engine():
return create_engine("postgresql://localhost/test_database")
# at the end of the test session drops the created metadata using fixture with yield
#pytest.fixture(scope="session")
def tables(engine):
BaseModel.metadata.create_all(engine)
yield
BaseModel.metadata.drop_all(engine)
# here scope="function" (by default) so each time a test finished, the database is cleaned
#pytest.fixture
def dbsession(engine, tables):
"""Returns an sqlalchemy session, and after the test tears down everything properly."""
connection = engine.connect()
# begin the nested transaction
transaction = connection.begin()
# use the connection with the already started transaction
session = Session(bind=connection)
yield session
session.close()
# roll back the broader transaction
transaction.rollback()
# put back the connection to the connection pool
connection.close()
## end of the gist.github code
#pytest.fixture
def db_fastapi(dbsession):
def override_get_db():
db = dbsession
try:
yield db
finally:
db.close()
client.app.dependency_overrides[get_db] = override_get_db
yield db
# Now you can run your test
def test_create_user_new_email(db_fastapi):
username = random_email()
# ...
I have a Flask API based on Flask RestPlus extension and is hosted on Google App Engine. The API does a basic job of fetching data from a Google Cloud SQL PostgreSQL. The API is working fine otherwise but sometimes it starts returning InterfaceError: cursor already closed.
Strangely, when I do a gcloud app deploy, the API starts working fine again.
Here's a basic format of the API:
import simplejson as json
import psycopg2
from flask import Flask, jsonify
from flask_restplus import Api, Resource, fields
from psycopg2.extras import RealDictCursor
app = Flask(__name__)
app.config['SWAGGER_UI_JSONEDITOR'] = True
api = Api(app=app,
doc='/docs',
version="1.0",
title="Title",
description="description")
app.config['SWAGGER_UI_JSONEDITOR'] = True
ns_pricing = api.namespace('cropPricing')
db_user = "xxxx"
db_pass = "xxxx"
db_name = "xxxxx"
cloud_sql_connection_name = "xxxxxx"
conn = psycopg2.connect(user=db_user,
password=db_pass,
host='xxxxx',
dbname=db_name)
#ns_pricing.route('/list')
class States(Resource):
def get(self):
"""
list all the states for which data is available
"""
cur = conn.cursor(cursor_factory=RealDictCursor)
query = """
SELECT
DISTINCT state
FROM
db.table
"""
conn.commit()
cur.execute(query)
states = json.loads(json.dumps(cur.fetchall()))
if len(states) == 0:
return jsonify(data=[],
status="Error",
message="Requested data not found")
else:
return jsonify(status="Success",
message="Successfully retreived states",
data=states)
What should I fix to not see the error anymore?
It would be good to use the ORMs such as SQLAlchemy / Flask-SQLAlchemy which would handle the establishing / re-establishing the connection part.
Though, if using psycopg2. you can use try except to catch the exception and re-establish the connection again.
try:
cur.execute(query)
except psycopg2.InterfaceError as err:
print err.message
conn = psycopg2.connect(....)
cur = conn.cursor()
cur.execute(query)
I am trying to test by mocking a database query, but receiving an error:
Asssertion error:AssertionError: Expected call: execute()
Not called
and create_table() not defined.
I want to execute() to be called and use create_table() to return response for asserting against pre-defined values.
app.py
from flask import Flask,g
#app.before_request
def before_request():
g.db = mysql.connector.connect(user='root', password='root', database='mysql')
def create_table():
try:
cur = g.db.cursor() #here g is imported form Flask module
cursor.execute ('CREATE TABLE IF NOT EXISTS Man (id INT NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(40)')
data = dict(Table='Man is created')
resp = jsonify(data)
cursor.close()
return resp
test.py
import unittest
from app import *
from mock import patch
class Test(unittest.TestCase):
def test_create(self):
with patch("app.g") as mock_g:
mock_g.db.cursor()
mock_g.execute.assert_called_with()
resp = create_table()
assertEqual(json, '{"Table":"Testmysql is created","Columns": ["id","name","email"]}')
What am I doing wrong?Can someone please tell me how to fix it
I believe you need to add your changes before closing the cursor, or the execute won't occur. Try adding cursor.commit() before (or instead of) cursor.close().
I am writing a verify email address python file for Google App Engine. (ya I know django has stuff, but I wanted to write my own because that is how I learn)
Below is the python code. The code returns "Email Account Verified" which seems to me that the queries worked. However when I look at the "active" column in the database, it is still 0.
If I run the query string that logging.info("%s",db_query) in the database itself, it works and is updated to 1.
All my other python code (with UPDATES) works fine, the only difference is those python files are called from my ios app and this one is called from a browser.
#Make the libs folder with 3rd party libraries and common methods
import sys
sys.path.insert(0, 'libs')
#Imports
import logging
import webapp2
from django.utils.html import strip_tags
import common
import MySQLdb
import json
VERIFIED_HTML = """\
<html>
<body>
<h1>Email Account Verified</h1>
</body>
</html>
"""
ERROR_HTML = """\
<html>
<body>
<h1>ERROR</h1>
</body>
</html>
"""
class VerifyEmail(webapp2.RequestHandler):
def get(self):
user_email = strip_tags(self.request.get('user_email').lower().strip())
user_activation_hash = strip_tags(self.request.get('user_activation_hash').strip())
logging.info("User Email = %s", user_email)
logging.info("User Activation Hash = %s", user_activation_hash)
#Insert the information into the users table
#Get the database connection to Google Cloud SQL
db = common.connect_to_google_cloud_sql()
db_cursor = db.cursor(MySQLdb.cursors.DictCursor)
#Check to see if user already exists
#Query for user
db_query = """SELECT \
email, activation_hash \
FROM users WHERE email='%s' AND activation_hash='%s'""" % (user_email, user_activation_hash)
db_cursor.execute(db_query)
#If there is one record containing the username check password
if(db_cursor.rowcount == 1):
db_query = """UPDATE users SET active=%s WHERE email='%s';""" % (1, user_email)
logging.info("%s" % db_query)
if(db_cursor.execute(db_query)):
self.response.write(VERIFIED_HTML)
else:
self.response.write(ERROR_HTML)
else: #either no user, or activation_hash doesn't match
self.response.write(ERROR_HTML)
Connect to Google Cloud SQL
def connect_to_google_cloud_sql():
#hostname = DEV_DB_HOSTNAME
#hostname = PROD_DB_HOSTNAME
db_username = 'dummy_user' #not real
db_password = 'dummypassword' # not real
#If PROD or Deployed Testing, use unix_socket
if(os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')):
db = MySQLdb.connect(unix_socket='/cloudsql/' + _DATABASE_HOSTNAME, db='dummydbname', user=db_username, passwd=db_password)
else: #Local Testing uses host
db = MySQLdb.connect(host=_DATABASE_HOSTNAME, port=3306, db='dummydbname', user=db_username, passwd=db_password)
logging.info("Got DB Connection")
return db
Any suggestions? Is it GAE Cloud SQL Privledges????
Maybe because I was using my browser with the local app engine running on my local ip???
You need to call .commit() on MySQLdb cursors after executing queries. This is why your update is failing. It updates inside the transaction, but when your code ends without committing the transaction, the changes to the DB are rolled back, despite having told the user of success on update.
You can also use the following method to ensure commits when using a cursor: db_cursor.autocommit(True).