I have an input field that is basically a comma delimited string (i.e. something like "deniscm, toms, peters"). That information is sent via AJAX to my Python handler SaveQueryPage. What I want to do is parse this information as a list and then insert each entry into my database. My code is as follows, but it doesn't work unfortunately. Any suggestions?
Python code:
class SaveQueryPage(webapp2.RequestHandler):
def post(self):
user = users.get_current_user()
user_nickname = user.nickname()
query_name = self.request.get('queryName')
query_collab = self.request.get('queryCollab')
query_collaborators = re.split(r'\s*[,]\s*', query_collab.strip())
query_collaborators = query_collaborators.append(user_nickname)
query_collaborators = filter(None, query_collaborators)
conn = rdbms.connect(instance=_INSTANCE_NAME, database='queryInfo')
cursor = conn.cursor()
cursor.execute('INSERT INTO queries (userNickname, queryName) VALUES (%s, %s)', (user_nickname, query_name))
conn.commit()
for item in query_collaborators:
cursor = conn.cursor()
cursor.execute('INSERT INTO collaborators (queryName, userNickname) VALUES (%s, %s)', (query_name, item))
conn.commit()
conn.close()
I finally managed to get it working. Looks like the regular expression turned the items in the list to a unicode format, which was only caught when I added some logs. I also had an error in appending a string to the list. Thanks for the pointers! The code below now works for me:
class SaveQueryPage(webapp2.RequestHandler):
def post(self):
user = users.get_current_user()
user_nickname = user.nickname()
user_email = user.email()
query_name = self.request.get('queryName')
query_description = self.request.get('queryDescription')
query_collab = self.request.get('queryCollab')
logging.info('Data read for query_collab is %s', query_collab)
query_collab_re = re.split(r'\s*[,;]\s*', query_collab.strip())
logging.info('Data read for query_collab_re is %s', query_collab_re)
query_collab_decode = []
for item in query_collab_re:
item = str(item)
query_collab_decode.append(item)
logging.info('Data read for query_collab_decode is %s', query_collab_decode)
query_collab_decode.append(user_nickname)
logging.info('Data read for query_collab_append is %s', query_collab_decode)
query_collab_filter = filter(None, query_collab_decode)
logging.info('Data read for query_collab_filter is %s', query_collab_filter)
query_value = self.request.get('queryValue') # query_value
date_created = datetime.today()
date_lastupdated = datetime.today()
active_flag = "true"
random_id = random.randint(1000000000000, 9999999999999)
unique_query_id = user_nickname + "_" + str(random_id)
conn = rdbms.connect(instance=_INSTANCE_NAME, database='userPrefs')
cursor = conn.cursor()
cursor.execute('INSERT INTO queries (userNickname, queryName, queryDescription, queryValue, dateCreated, dateLastUpdated, activeFlag, uniqueId) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)', (user_nickname, query_name, query_description, query_value, date_created, date_lastupdated, active_flag, unique_query_id))
conn.commit()
try:
for item in query_collab_filter:
cursor = conn.cursor()
cursor.execute('INSERT INTO collaborators (uniqueId, userNickname) VALUES (%s, %s)', (unique_query_id, item))
conn.commit()
except:
logging.error('There was an error inserting the values into the collaborators table. query_collaborators =' + str(query_collaborators))
conn.close()
Related
How to insert data from code below?
I have a code below
latitude1 = -6.208470935786019
longitude1 = 106.81796891087399
new_data = [[latitude1, longitude1]]
preds = model.predict(new_data)
preds
arr = [latitude1,longitude1]
arrcon = np.concatenate((arr,preds))
print(arrcon) #[-6.208470935786019 106.81796891087399 'Not Categorized']
listarcon= arrcon.tolist()
print(listarcon) #[-6.208470935786019, 106.81796891087399, 'Not Categorized']
#make the list into multi list
singlearcon = np.array(listarcon).reshape(1,3)
print(singlearcon) #[['-6.208470935786019' '106.81796891087399' 'Not Categorized']]
This is insert into database code
mycursor = conn.cursor()
sql = "INSERT INTO traveldata (Latitude,Longitude,Wisata) VALUES (%s, %s, %s)"
val = (listarcon[0],listarcon[1],listarcon[2])
mycursor.execute(sql, val)
How to insert it to database? the data didn't seem to get to the database.
After executing a transaction mycursor.execute(sql, val), we should commit the change mycursor.commit()
Reference for the commit method https://dev.mysql.com/doc/connector-python/en/connector-python-api-mysqlconnection-commit.html
An example of insert code https://dev.mysql.com/doc/connector-python/en/connector-python-example-cursor-transaction.html
I am getting some JSON data from a third party API. I am trying to add that data into my own database to be used for a website. I loop through each record in the JSON and execute a SQL query to insert that data into my database. However some records in the JSON data doesn't exist, and therefore causes my query to fail. I have set defaults for these fields for this reason however it still falls over.
isNonFoilOnly field will only appear in some of of the records in the JSON data.
models.py
class Set(models.Model):
code = models.CharField(max_length=100, unique=True)
keyrune_code = models.CharField(max_length=100)
name = models.CharField(max_length=100)
type = models.CharField(max_length=100)
release_date = models.DateField()
base_set_size = models.IntegerField()
total_set_size = models.IntegerField()
is_online_only = models.BooleanField(default=False)
is_non_foil_only = models.BooleanField(default=False)
is_foil_only = models.BooleanField(default=False)
sale_status = models.BooleanField(default=False)
def __str__(self):
return self.name
views.py
response = requests.request("GET", "https://mtgjson.com/api/v5/SetList.json")
data = response.json()["data"]
sorted_obj = sorted(data, key=lambda k: k['releaseDate'], reverse=False)
sql = """
INSERT INTO dashboard_set
(code, keyrune_code, name, type, release_date, base_set_size, total_set_size, is_online_only, is_non_foil_only, is_foil_only, sale_status)
VALUES
( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s )
ON CONFLICT (code) DO UPDATE
SET keyrune_code = %s,
name = %s,
type = %s,
release_date = %s,
base_set_size = %s,
total_set_size = %s,
is_online_only = %s,
is_non_foil_only = %s,
is_foil_only = %s;
"""
conn = None
try:
params = config()
conn = psycopg2.connect(**params)
cur = conn.cursor()
for entry in sorted_obj:
cur.execute(sql, (
entry["code"],
entry["keyruneCode"],
entry["name"],
entry["type"],
entry["releaseDate"],
entry["baseSetSize"],
entry["totalSetSize"],
entry["isOnlineOnly"],
entry["isNonFoilOnly"],
entry["isFoilOnly"],
False,
entry["keyruneCode"],
entry["name"],
entry["type"],
entry["releaseDate"],
entry["baseSetSize"],
entry["totalSetSize"],
entry["isOnlineOnly"],
entry["isNonFoilOnly"],
entry["isFoilOnly"]
))
conn.commit()
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
return redirect('dashboard:sets')
You seem to using Django and not using it at the same time. The Django way to do this is:
from yourapp.models import Set
def yourview(request):
response = requests.request("GET", "https://mtgjson.com/api/v5/SetList.json")
data = response.json()["data"]
# Not caring about sort, because why?
for entry in data:
code = data.pop('code', None)
if not code:
continue # or raise
Set.objects.update_or_create(code=code, defaults=data)
return redirect('dashboard:sets')
I am trying to write data in a dictionary back into a SQL Server database table with pymssql.
But I am getting this error:
ValueError: more placeholders in sql than params available
Here is my code:
cursor = conn.cursor()
for key in dictW:
x = dictW[key]
sql = 'UPDATE tablename SET col = %s WHERE %s = #url '
cursor.executemany(sql, (key, x))
conn.commit()
conn.close()
What am I doing wrong here?
You are attempting to execute your queries one by one but are using executemany(). You should consider using a simple execute() instead:
cursor = conn.cursor()
for key in dictW:
x = dictW[key]
sql = 'UPDATE tablename SET col = %s WHERE %s = #url '
cursor.execute(sql, (key, x))
conn.commit()
conn.close()
If you want to use executemany(), you should make a list of tuples like this:
cursor = conn.cursor()
params = [(k, v) for k, v in dictW.items()]
sql = 'UPDATE tablename SET col = %s WHERE %s = #url '
cursor.executemany(sql, params)
conn.commit()
conn.close()
I have problem with storing values of a python dictionary as data to an existing mysql table
I tried to use the code below but it's not working.
db = mysql.connect(
host="localhost",
user="root",
passwd="123456",
database="tgdb"
)
cursor = db.cursor()
val = ', '.join("'" + str(x) + "'" for x in dict.values())
sql = "INSERT INTO tgdb.channel(user_name, image_url, name,
number_of_members, description, channel_url) VALUES (%s, %s, %s, %s, %s,
%s)"
cursor.execute(sql, val)
db.commit()
print(cursor.rowcount, "record inserted.")
"you have an error in your SQL syntax"
As writed #Torxed shouldn't translate dict in string, you can write just that:
cursor.execute(sql, list(dict.values())
I am currently having major issues writing foods from a JSON to my database. The GET method works just fine. However, when I receive a JSON, it is not properly writing to the database.
What I do is the following
Write a new meal to the database (creates a new meal/cart id via autoincrement)
Utilize the LAST_INSERT_ID() command to write new foods to that meal/cart.
Once I get here writing one food is fine, however if there is numerous foods, I can't seem to get it to write the other foods from the JSON to the database.
The JSON I am receiving is as follows:
Endpoint: /meallog
Request params:{
method: ”post”,
headers: headers,
url: string,
data:{
userId: string,
date: string,
mealData:{
mealName: String,
food: [
{
id:string,
foodname:string,
numCal:int,
servingSize:int,
servingSizeUnit:string,
totalCalories:int
},
{
(repeat above)
}
]
}
}
}
Response: (JSON Object)
{
code: 200/400,
message: String
}
The Code I currently have for my post is below:
elif request.method == 'POST':
jsondata = {}
code={}
user_id = request.json['user_id']
date = request.json['date']
mealName = request.json['mealName']
food_id = request.json['id']
food_name = request.json['foodname']
food_cal = request.json['numCal']
serving_size = request.json['servingSize']
serving_unit = request.json['servingSizeUnit']
totalCal = request.json['totalCalories']
postmeal = conn.cursor()
INS_meal = "INSERT INTO user_cart (user_id, datetime, cart_cal, cart_name) VALUES (%s, %s, %s, %s);"
postmeal.execute(INS_meal, (user_id, date, totalCal, mealName))
conn.commit()
postfood = conn.cursor()
INS_food = "INSERT INTO food_log VALUES (LAST_INSERT_ID(), %s, %s, %s, %s, %s);"
postfood.execute(INS_food, (food_id, food_name, food_cal, serving_size, serving_unit))
conn.commit()
if postfood.execute and postmeal.execute:
code['code'] = '200'
code['message'] = 'Success! INSERTED values into both food_log and user_cart'
else:
code['code'] = '400'
code['message'] = 'Error Connecting to DB. Cant insert into food_log and-or user_cart'
jsondata['code'] = code['code']
jsondata['message'] = code['message']
return(json.dumps(jsondata))
Essentially I'd like it to gather every food that comes in, and write the entire food objects in the array to the food table seamlessly.
I fixed my issue and utilized LAST_INSERT_ID() in a more structured way, as well as to properly loop through the foods incoming.
elif request.method == 'POST':
jsondata = {}
code={}
#with open('inc.json') as json_data:
#d = json.load(json_data)
#print(d)
user_id = request.json['userId']
date = request.json['date']
mealName = request.json['mealData']['mealName']
foodlist = request.json['mealData']['food']
postmeal = conn.cursor()
INS_meal = "INSERT INTO user_cart (user_id, datetime, cart_name) VALUES (%s, %s, %s);"
postmeal.execute(INS_meal, (user_id, date, mealName))
conn.commit()
postmeal.execute("select LAST_INSERT_ID();")
SEL_MEAL_ID = postmeal.fetchone()[0]
food_str = ""
for food in foodlist:
print (food)
food_str+="({}, '{}', '{}', {}, {}, '{}', {}), ".format(SEL_MEAL_ID, food['id'], food['foodname'], food['numCal'], food['servingSize'], food['servingSizeUnit'], food['totalCalories'])
postfood = conn.cursor()
INS_food = "INSERT INTO food_log VALUES {};".format(food_str[:-2]) # To exclude the last comma in the food string
postfood.execute(INS_food)
conn.commit()
if postfood.execute and postmeal.execute:
code['code'] = '200'
code['message'] = 'Success! INSERTED values into both food_log and user_cart'
else:
code['code'] = '400'
code['message'] = 'Error Connecting to DB. Cant insert into food_log and-or user_cart'
jsondata['code'] = code['code']
jsondata['message'] = code['message']
return(json.dumps(jsondata))