Trying to insert from JSON file to database - python

I have to do the yelp API of a Django web app. I created db() to enter data into database but how to address the error? I'm trying to do it without Pandas:
Message=string indices must be integers
Source=C:\Users\diggt\OneDrive\College\Rowan\Fall22\10430_computing_and_informatics_capstone\yelp_VSCode\yelp.py
StackTrace:
File "C:\Users\diggt\OneDrive\College\Rowan\Fall22\10430_computing_and_informatics_capstone\yelp_VSCode\yelp.py", line 104, in <genexpr>
keys = (entry[c] for c in columns)
File "C:\Users\diggt\OneDrive\College\Rowan\Fall22\10430_computing_and_informatics_capstone\yelp_VSCode\yelp.py", line 115, in db
cur.executemany(sql, keys)
File "C:\Users\diggt\OneDrive\College\Rowan\Fall22\10430_computing_and_informatics_capstone\yelp_VSCode\yelp.py", line 153, in main
db()
File "C:\Users\diggt\OneDrive\College\Rowan\Fall22\10430_computing_and_informatics_capstone\yelp_VSCode\yelp.py", line 157, in <module> (Current frame)
main()
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import json
import csv
import pprint
import requests
import sys
import sqlite3
#import pandas as pd
from urllib.error import HTTPError
from urllib.parse import quote
API_KEY = 'secret'
# API constants, you shouldn't have to change these.
API_HOST = 'https://api.yelp.com'
SEARCH_PATH = '/v3/businesses/search'
BUSINESS_PATH = '/v3/businesses/' # Business ID will come after slash.
# Defaults
DEFAULT_TERM = 'dinner'
DEFAULT_LOCATION = 'Glassboro, NJ'
SEARCH_LIMIT = 3
OFFSET = 0
def request(host, path, api_key, url_params=None):
url_params = url_params or {}
url = '{0}{1}'.format(host, quote(path.encode('utf8')))
headers = {
'Authorization': 'Bearer %s' % api_key,
}
print(u'Querying {0} ...'.format(url))
response = requests.request('GET', url, headers=headers, params=url_params)
return response.json()
def search(api_key, term, location):
url_params = {
'term': term.replace(' ', '+'),
'location': location.replace(' ', '+'),
'limit': SEARCH_LIMIT,
'offset': OFFSET
}
return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)
def get_business(api_key, business_id):
business_path = BUSINESS_PATH + business_id
return request(API_HOST, business_path, api_key)
def query_api(term, location):
response = search(API_KEY, term, location)
businesses = response.get('businesses')
if not businesses:
print(u'No businesses for {0} in {1} found.'.format(term, location))
return
business_id = businesses[0]['id']
print(u'{0} businesses found, querying business info ' \
'for the top result "{1}" ...'.format(
len(businesses), business_id))
response = get_business(API_KEY, business_id)
print(u'Result for business "{0}" found:'.format(business_id))
pprint.pprint(response, indent=2)
str_to_write_to_file = json.dumps(response, skipkeys=True, allow_nan=True, indent=4)
with open('yelp.json', 'w') as f:
f.write(str_to_write_to_file)
def db():
with open('yelp.json', 'r') as f:
data = f.readlines()
conn = sqlite3.connect('yelp.db')
cur = conn.cursor()
# Create the table if it doesn't exist.
cur.execute(
"""CREATE TABLE IF NOT EXISTS yelp(
id INTEGER PRIMARY KEY,
alias varchar(100),
location varchar(100),
display_phone varchar(15)
);"""
)
for entry in data:
columns = ["id" "alias", "location", "display_phone"]
keys = (entry[c] for c in columns)
# Execute the command and replace '?' with the each value
# in 'values'. DO NOT build a string and replace manually.
# the sqlite3 library will handle non safe strings by doing this.
sql = """INSERT INTO yelp (id, alias, location, display_phone) VALUES(
?,
?,
?,
?
);"""
cur.executemany(sql, keys)
print(f'{entry["alias"]} data inserted Succefully')
conn.commit()
conn.close()
with sqlite3.connect("yelp.db") as conn:
cmd = """SELECT * FROM yelp;"""
cur = conn.execute(cmd)
res = cur.fetchall()
for r in res:
print(r)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-q', '--term', dest='term', default=DEFAULT_TERM,
type=str, help='Search term (default: %(default)s)')
parser.add_argument('-l', '--location', dest='location',
default=DEFAULT_LOCATION, type=str,
help='Search location (default: %(default)s)')
input_values = parser.parse_args()
try:
query_api(input_values.term, input_values.location)
except HTTPError as error:
sys.exit(
'Encountered HTTP error {0} on {1}:\n {2}\nAbort program.'.format(
error.code,
error.url,
error.read(),
)
)
db()
if __name__ == '__main__':
main()
JSON file :
{
"id": "umC69pkiPyk3qY7IB49ZYw",
"alias": "bosphorus-mediterranean-cuisine-glassboro",
"name": "Bosphorus Mediterranean Cuisine",
"image_url": "https://s3-media4.fl.yelpcdn.com/bphoto/G7VCO3tvx8NGPz5g0fSpMw/o.jpg",
"is_claimed": true,
"is_closed": false,
"url": "https://www.yelp.com/biz/bosphorus-mediterranean-cuisine-glassboro?adjust_creative=9aYQmmK21ApZ7TfokeTk1A&utm_campaign=yelp_api_v3&utm_medium=api_v3_business_lookup&utm_source=9aYQmmK21ApZ7TfokeTk1A",
"phone": "+18562432015",
"display_phone": "(856) 243-2015",
"review_count": 14,
"categories": [
{
"alias": "turkish",
"title": "Turkish"
},
{
"alias": "halal",
"title": "Halal"
},
{
"alias": "kebab",
"title": "Kebab"
}
],
"rating": 5.0,
"location": {
"address1": "524 Delsea Drive N",
"address2": null,
"address3": null,
"city": "Glassboro",
"zip_code": "08028",
"country": "US",
"state": "NJ",
"display_address": [
"524 Delsea Drive N",
"Glassboro, NJ 08028"
],
"cross_streets": ""
},
"coordinates": {
"latitude": 39.7150351328115,
"longitude": -75.1118882
},
"photos": [
"https://s3-media4.fl.yelpcdn.com/bphoto/G7VCO3tvx8NGPz5g0fSpMw/o.jpg",
"https://s3-media2.fl.yelpcdn.com/bphoto/HvhYRZO2rOYUBX0DagVE3w/o.jpg",
"https://s3-media2.fl.yelpcdn.com/bphoto/PQHr3upfVULUjwz1M-ILcw/o.jpg"
],
"hours": [
{
"open": [
{
"is_overnight": false,
"start": "1100",
"end": "2200",
"day": 0
},
{
"is_overnight": false,
"start": "1100",
"end": "2200",
"day": 1
},
{
"is_overnight": false,
"start": "1100",
"end": "2200",
"day": 2
},
{
"is_overnight": false,
"start": "1100",
"end": "2200",
"day": 3
},
{
"is_overnight": false,
"start": "1100",
"end": "2200",
"day": 4
},
{
"is_overnight": false,
"start": "1100",
"end": "2200",
"day": 5
},
{
"is_overnight": false,
"start": "1100",
"end": "2200",
"day": 6
}
],
"hours_type": "REGULAR",
"is_open_now": true
}
],
"transactions": [
"pickup",
"delivery"
],
"messaging": {
"url": "https://www.yelp.com/raq/umC69pkiPyk3qY7IB49ZYw?adjust_creative=9aYQmmK21ApZ7TfokeTk1A&utm_campaign=yelp_api_v3&utm_medium=api_v3_business_lookup&utm_source=9aYQmmK21ApZ7TfokeTk1A#popup%3Araq",
"use_case_text": "Message the Business"
}
}

You shouldn't use f.readlines() to read a JSON file, use json.load(f).
There's only one set of values in the JSON, so you don't need a loop or executemany().
def db():
with open('yelp.json', 'r') as f:
data = json.load(f)
conn = sqlite3.connect('yelp.db')
cur = conn.cursor()
# Create the table if it doesn't exist.
cur.execute(
"""CREATE TABLE IF NOT EXISTS yelp(
id INTEGER PRIMARY KEY,
alias varchar(100),
location varchar(100),
display_phone varchar(15)
);"""
)
columns = ["id" "alias", "location", "display_phone"]
keys = [entry[c] for c in columns]
# Execute the command and replace '?' with the each value
# in 'values'. DO NOT build a string and replace manually.
# the sqlite3 library will handle non safe strings by doing this.
sql = """INSERT INTO yelp (id, alias, location, display_phone) VALUES(
?,
?,
?,
?
);"""
cur.execute(sql, keys)
print(f'{entry["alias"]} data inserted Succefully')
conn.commit()
conn.close()
with sqlite3.connect("yelp.db") as conn:
cmd = """SELECT * FROM yelp;"""
cur = conn.execute(cmd)
res = cur.fetchall()
for r in res:
print(r)

So ultimately I figured it out... pretty much. I used what #Bramar said but the solution was making the json file an array and then I started getting this error sqlite3.ProgrammingError: Incorrect number of bindings supplied. The current statement uses 4, and there are 1 supplied. which turned out to be one of the entries that I had was stored in json as a dict so I eliminated it temporarily to see if I can make it work and it works, this is the code -
print(u'Result for business "{0}" found:'.format(business_id))
str_to_write_to_file = json.dumps([response], indent=4)
with open('yelp.json', 'w') as f:
f.write(str_to_write_to_file)
def db():
with open('yelp.json', 'r') as f:
data = json.load(f)
conn = sqlite3.connect('data/yelp.db')
cur = conn.cursor()
# Create the table if it doesn't exist.
cur.execute(
"""CREATE TABLE IF NOT EXISTS yelp(
id INTEGER PRIMARY KEY,
alias varchar(100),
display_phone varchar(15),
location dictionary
);"""
)
columns = ["alias", "display_phone"]
keys = [data[0][c] for c in columns]
# Execute the command and replace '?' with the each value
# in 'values'. DO NOT build a string and replace manually.
# the sqlite3 library will handle non safe strings by doing this.
sql = '''INSERT INTO yelp (alias, display_phone) VALUES(
?,
?
);'''
cur.execute(sql, keys)
conn.commit()
conn.close()
Hopefully this helps someone, this can very confusing.

Related

How to align table in a google document using google docx API and Python

I've got solution to add table without border suggested by Tanaike but I'm still facing issues in indexing.
I want to insert data in the document in the following order (function - insert_data(file_id)) -
Insert an image in a document (Index = 1)
Insert text in a document (index = 2)
Insert table in a document having invisible borders (index = 3)
Insert text in the document (index = 4)
Insert table in a document again having invisible borders (index = 5)
Insert new line (index = 6)
Insert image in a document (index = 7)
The code I'm trying is-
import io
from gdoctableapppy import gdoctableapp
SERVICE_FILENAME = 'C:/Users/XYZ/Testpython/service_account.json' # set path to service account filename
from googleapiclient.discovery import build
from google.oauth2 import service_account
from googleapiclient.http import MediaIoBaseDownload, MediaFileUpload
credentials = service_account.Credentials.from_service_account_file(SERVICE_FILENAME,
scopes=['https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/documents']
)
docs = build('docs', 'v1', credentials=credentials)
drive = build('drive', 'v3', credentials=credentials)
def create_file(file_name):
file_metadata = {
"title": file_name,
"body": {}
}
file = docs.documents().create(body=file_metadata).execute()
print('File ID: %s' % file.get('documentId'))
file_id = file.get('documentId')
try:
permission = {
"role": "writer",
"type": "user",
'emailAddress': 'xyz#gmail.com'
}
result = drive.permissions().create(fileId=file_id, body=permission).execute()
print(result)
return file_id
except Exception as e:
print('An error occurred:', e)
return None
def insert_data(file_id):
requests = []
values = [['Name of the Client/Organization', 'XYZ'], ['Industry', 'Software']]
requests.append(insert_table_data(file_id, values, index=3))
values2 = [['Country', 'India'], ['State', 'UP']]
requests.append(insert_table_data(file_id, values2, index=5))
requests.append(insert_image(index=1))
requests.append(insert_text(2, '\ntext\n'))
requests.append(insert_text(4, '\nDemo text\n'))
requests.append(insert_text(6, '\n'))
requests.append(insert_image(index=7))
result = docs.documents().batchUpdate(documentId=file_id, body={'requests': requests}).execute()
def insert_image(index):
image_data = {
'insertInlineImage': {
'location': {
'index': index
},
'uri':
'https://www.oberlo.com/media/1603970279-pexels-photo-3.jpg?fit=max&fm=jpg&w=1824',
'objectSize': {
'height': {
'magnitude': 350,
'unit': 'PT'
},
'width': {
'magnitude': 350,
'unit': 'PT'
}
}
}
}
return image_data
def insert_text(index, text):
text_data = {
"insertText":
{
"text": text,
"location":
{
"index": index
}
}
}
return text_data
def insert_table_data(file_id, values, index):
documentId = file_id
resource = {
"oauth2": credentials,
"documentId": documentId,
"rows": len(values),
"columns": len(values[0]),
# "append": True,
"createIndex": index,
"values": values,
}
gdoctableapp.CreateTable(resource)
resource = {
"oauth2": credentials,
"documentId": documentId,
}
res = gdoctableapp.GetTables(resource)
obj = {"color": {"color": {}}, "dashStyle": "SOLID", "width": {"magnitude": 0, "unit": "PT"}}
data = {
"updateTableCellStyle": {
"tableCellStyle": {
"borderBottom": obj,
"borderTop": obj,
"borderLeft": obj,
"borderRight": obj,
},
"tableStartLocation": {
"index": res['tables'][-1]['tablePosition']['startIndex']
},
"fields": "borderBottom,borderTop,borderLeft,borderRight"
}
}
# docs.documents().batchUpdate(documentId=documentId, body={'requests': requests}).execute()
return data
def download_as_docx(file_id):
results = drive.files().get(fileId=file_id, fields="id, name, mimeType, createdTime").execute()
docMimeType = results['mimeType']
mimeTypeMatchup = {
"application/vnd.google-apps.document": {
"exportType": "application/vnd.openxmlformats-officedocument.wordprocessingml.document", "docExt": "docx"
}
}
exportMimeType = mimeTypeMatchup[docMimeType]['exportType']
# docExt = mimeTypeMatchup[docMimeType]['docExt']
docName = results['name']
request = drive.files().export_media(fileId=file_id,
mimeType=exportMimeType) # Export formats : https://developers.google.com/drive/api/v3/ref-export-formats
# fh = io.FileIO(docName + "." + docExt, mode='w')
fh = io.FileIO(docName, mode='w')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download %d%%." % int(status.progress() * 100))
def download_as_pdf(file_id, file_name):
request = drive.files().export_media(fileId=file_id,
mimeType='application/pdf')
fh = io.BytesIO()
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download %d%%." % int(status.progress() * 100))
fh.seek(0)
filename = file_name.split('.docx')[0] + '.pdf'
with open(filename, 'wb') as fx:
fx.write(fh.getvalue())
def delete_gdrive_file(file_id):
"""Deleted file on Google Drive
:param file_id: ID of Google Drive file
"""
response = drive.files().delete(fileId=file_id).execute()
print(response)
if __name__ == '__main__':
file_name = 'Data.docx'
file_id = create_file(file_name)
insert_data(file_id)
download_as_docx(file_id)
download_as_pdf(file_id, file_name)
delete_gdrive_file(file_id)
Error:
returned "Invalid requests[0].insertTable: Index 4 must be less than the end index of the referenced segment, 2.". Details: "Invalid requests[0].insertTable: Index 4
must be less than the end index of the referenced segment, 2.">
I guess end index of the table goes to 67 but even if I try to insert new data at index 68, it either appends in the last cell of the table or it throws indexing error sometimes.
I should I make the whole data insertion flow dynamic in the google docs.
Modification points:
The library gdoctableapp creates the table by one call. By this, when you request the flow of your question, the index is changed for the tables. I thought that this is the reason of your issue.
In this case, how about the following modification?
Modified script:
Please modify insert_table_data as follows.
def insert_table_data(file_id, values, index):
documentId = file_id
resource = {
"oauth2": credentials,
"documentId": documentId,
"rows": len(values),
"columns": len(values[0]),
# "append": True,
"createIndex": index,
"values": values,
}
gdoctableapp.CreateTable(resource)
And also, please modify insert_data as follows.
def insert_data(file_id):
# Insert texts and images.
index = 1
requests = []
requests.append(insert_image(index))
index += 1
text1 = '\ntext\n'
requests.append(insert_text(index, text1))
index += len(text1)
table1 = index
text2 = '\nDemo text\n'
requests.append(insert_text(index, text2))
index += len(text2)
table2 = index
text3 = '\n'
requests.append(insert_text(index, text3))
index += len(text3)
requests.append(insert_image(index))
docs.documents().batchUpdate(documentId=file_id, body={'requests': requests}).execute()
# Create tables.
values2 = [['Country', 'India'], ['State', 'UP']]
insert_table_data(file_id, values2, table2)
values1 = [['Name of the Client/Organization', 'XYZ'], ['Industry', 'Software']]
insert_table_data(file_id, values1, table1)
# Remove borders of tables.
resource = {"oauth2": credentials, "documentId": file_id}
res = gdoctableapp.GetTables(resource)
obj = {"color": {"color": {}}, "dashStyle": "SOLID", "width": {"magnitude": 0, "unit": "PT"}}
reqs = []
for e in res['tables']:
data = {
"updateTableCellStyle": {
"tableCellStyle": {
"borderBottom": obj,
"borderTop": obj,
"borderLeft": obj,
"borderRight": obj,
},
"tableStartLocation": {
"index": e['tablePosition']['startIndex']
},
"fields": "borderBottom,borderTop,borderLeft,borderRight"
}
}
reqs.append(data)
docs.documents().batchUpdate(documentId=file_id, body={'requests': reqs}).execute()
In this modification, I separate the texts and images, and the tables. By this, the index of tables can be correctly retrieved.
Note:
This modified script is for your question. So when your actual situation is different from your question, this modified script might not be able to be directly used. So please be careful about this.

Django - how can i insert '.json' file to SQLite DB?

my '.json file' like
{
"users": [
{
"userId": 1,
"firstName": "AAAAA",
"lastName": "as23",
"phoneNumber": "123456",
"emailAddress": "AAAAA#test.com",
"homepage": "https://amogg.tistory.com/1"
},
{
"userId": 2,
"firstName": "BBBB",
"lastName": "h5jdd",
"phoneNumber": "123456",
"homepage": "https://amogg.tistory.com/2"
},
{
"userId": 3,
...
i was search that to google, and try to this problem.. but unresolved.
so i use pandas and sqlite3
import sqlite3 as db
import pandas as pd
df = pd.read_json('test.json')
con = db.connect('./test.db')
df.to_sql('test', con=con)
so DB is created, but .json file data dont save in DB
how can solve this problem...?
You will have to create the table 'test' beforehand, iterate over the pandas dataframe df and insert the records into the table one by one:
import sqlite3 as db
import pandas as pd
df = pd.read_json('test.json', orient='index')
con = db.connect('./test.db')
cursor = con.cursor()
cursor.execute('''create table test (userId int primary key,
firstName text,
lastName text,
phoneNumber text,
emailAddress text,
homePage text)''')
for index, row in df.iterrows():
for element in row.iteritems():
try:
firstName = element[1]['firstName']
except:
firstName = ''
try:
lastName = element[1]['lastName']
except:
lastName = ''
try:
phoneNumber = element[1]['phoneNumber']
except:
phoneNumber = ''
try:
emailAddress = element[1]['emailAddress']
except:
emailAddress = ''
try:
homepage = element[1]['homepage']
except:
homepage = ''
cursor.execute("INSERT INTO test VALUES (?,?,?,?,?,?)", (element[1]['userId'],
firstName,
lastName,
phoneNumber,
emailAddress,
homepage))
con.commit()
con.close()
Since not all the records have the same valid values for all the columns, you will need to validate the existance of the column with a try/except and store an empty string if the column does not exist in the row.

How to insert JSON file data in to table

I have a sample json file named a.json
The json data in a.json is as:
{
"a cappella": {
"word": "a cappella",
"wordset_id": "5feb6f679a",
"meanings": [
{
"id": "492099d426",
"def": "without musical accompaniment",
"example": "they performed a cappella",
"speech_part": "adverb"
},
{
"id": "0bf8d49e2e",
"def": "sung without instrumental accompaniment",
"example": "they sang an a cappella Mass",
"speech_part": "adjective"
}
]
},
"A.D.": {
"word": "A.D.",
"wordset_id": "b7e9d406a0",
"meanings": [
{
"id": "a7482f3e30",
"def": "in the Christian era",
"speech_part": "adverb",
"synonyms": [
"AD"
]
}
]
},.........
}
As suggested in my previous question I am looking on how to insert this data in to tables
Word: [word, wordset_id]
Meaning: [word, meaning_id, def, example, speech_part
Synonym: [word, synonym_word]
I tried reading file as:
import json
with open('a.json') as f:
d = json.load(f)
when I tried printing all words as:
for word in d:
print(word)
I got all words, but failed to get wordset_id for the same.
How can I insert the word and wordset_id in to the table word for the json format as above?
DBconnection as:
from flask import Flask
from flaskext.mysql import MySQL
app = Flask(__name__)
mysql = MySQL()
app.config['MYSQL_DATABASE_USER'] = 'root'
app.config['MYSQL_DATABASE_PASSWORD'] = 'root'
app.config['MYSQL_DATABASE_DB'] = 'wordstoday'
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
mysql.init_app(app)
conn = mysql.connect()
cursor =conn.cursor()
When you try to execute code:
for word in d:
print(word)
It will only print the keys of the json object, not complete value. Instead, you can try doing something like this,
for word in d:
word_obj = d[word]
wordset_id = word_obj['wordset_id']
sql = "INSERT INTO Word (word, wordset_id) VALUES (%s, %s)"
values = (word, wordset_id)
cursor.execute(sql, values)
meaning_obj_list = d[word]['meanings']
for meaning_obj in meaning_obj_list:
meaning_id = meaning_obj['id']
definition = meaning_obj['def']
example = meaning_obj.get('example', None) # since it is not guaranteed that "example" key will be present in the data, it is safer to extract the value this way
speech_part = meaning_obj['speech_part']
sql = "INSERT INTO Meaning (word, meaning_id, def, example, speech_part) VALUES (%s, %s, %s, %s, %s)"
values = (word, meaning_id, definition, example, speech_part)
cursor.execute(sql, values)
db.commit()
Also, refrain from using the keys names such as def as this is a keyword in python.

Store and use json in MSSQL with python

I'm trying to upload a test JSON string to SQL Server
json_string = """ {
"orderID": 42,
"customerName": "John Smith",
"customerPhoneN": "555-1234",
"orderContents": [
{
"productID": 23,
"productName": "keyboard",
"quantity": 1
},
{
"productID": 13,
"productName": "mouse",
"quantity": 1
}
],
"orderCompleted": true
} """
parsed_string = json.loads(json_string)
cursor.execute("update Table set Status = ? where Name like ? ",(json.dumps(parsed_string), "Blabla"))
cnxn.commit()
How to return and work with this JSON from the database?
cursor.execute("""select Status from Table where Name like ?""", "Blabla")
rows = cursor.fetchall()
How can I print the value of the JSON?
Use the JSON data type that is supported in MySQL. You can find more about it here:
https://dev.mysql.com/doc/refman/5.7/en/json.html
s = json.dumps(DATA)
cursor.execute("update Table set Status = ? where Name like ? ",(s, "Blabla"))
cnxn.commit()
and
cursor.execute("""select Status from Table where Name like ?""", "Blabla")
res = cursor.fetchall()
DATA = json.loads(res[0][X])

inserting JSON object into MySQL using Python

I'm new to python and trying to work out how to insert some JSON into MySQL table.
How can I insert my JSON Object into MySQL using Python?
Here is the code that I am using
import requests
import urllib.request
import json
import pymysql
con = pymysql.connect(host = 'localhost',user = 'root',passwd = 'root',db = 'micro')
cursor = con.cursor()
url = 'https://api.amazon.com/v1/products(onlineAvailability=true)?pageSize=100&show=upc,sku,salePrice&page=45&callback=JSON_CALLBACK&format=json'
urllib.request.urlopen(url).read()
response = urllib.request.urlopen(url).read()
json_obj = str(response, 'utf-8')
cursor.execute("INSERT INTO bestb (sku, upc, salePrice) VALUES (%s,%s,%s)", (sku, upc, salePrice))
con.commit()
con.close()
print (json_obj)
here is the JSON that i'm trying to parse.
"products": [
{
"upc": "715187763623",
"sku": 1833591,
"salePrice": 13.99
},
{
"upc": "8809269504036",
"sku": 26220187,
"salePrice": 16.99
}
]
})
Thanks in advance.
Use json.loads(string) to convert json string to Python object. And then you can use it as normal dictionary and list
BTW: you have incorrect JSON in your example
response = b'''{"products": [
{
"upc": "715187763623",
"sku": 1833591,
"salePrice": 13.99
},
{
"upc": "8809269504036",
"sku": 26220187,
"salePrice": 16.99
}
]
}'''
json_obj = json.loads(response.decode('utf-8'))
#print(json_obj["products"][0]["upc"])
for product in json_obj["products"]:
print("upc:", product["upc"])
print("sku:", product["sku"])
print("salePrice:", product["salePrice"])
print('---')
cursor.execute("INSERT INTO bestb (sku, upc, salePrice) VALUES (%s,%s,%s)", (product["sku"], product["upc"], product["salePrice"]))

Categories

Resources