Parsing HTML does not output desired data(tracking info for FedEx) - python

Im trying to make a script that grabs tracking information from fedex website.
I figured that f i just go to the url 'https://www.fedex.com/fedextrack/?tracknumbers=' and paste the tracking number at the end of it, it brings me to the tracking page which has the information i need.
I tried to feed the URL the tracking number and parse the html from the response.
This is what I tried.
import urllib
url_prefix= 'https://www.fedex.com/fedextrack/?tracknumbers='
tracking_number = '570573906561'
url = url_prefix + tracking_number
sock = urllib.urlopen(url) htmlSource = sock.read()
sock.close()
print htmlSource
this code outputs:
http://freetexthost.com/iy1ma2q1fm
I thought i would just be able to search the text from the output and find the delivery status/date but it is not in this output.
If i go to the tracking page in Chrome and inspect element, I see that the delivery date information has an id of destionDateTime,
so if i run this in the Chrome Console:
var document.getElementbyID('destinationDateTime')
it returns the output I want (delivery date)
How come my python script doesn't print actual tracking data information or that class in the html output?
I tried searching this question and tried parsing several different ways (Mechanize, Beautiful Soup, html2text) but all of these gave me the same output that does not contain any actual data about the shipment.

The website, like many others, won't work without JavaScript. It sends a HTTP POST request to a certain URL, which then returns the tracking data as a JSON-encoded object.
You'll need to simulate that with Python:
import requests
import json
tracking_number = '570573906561'
data = requests.post('https://www.fedex.com/trackingCal/track', data={
'data': json.dumps({
'TrackPackagesRequest': {
'appType': 'wtrk',
'uniqueKey': '',
'processingParameters': {
'anonymousTransaction': True,
'clientId': 'WTRK',
'returnDetailedErrors': True,
'returnLocalizedDateTime': False
},
'trackingInfoList': [{
'trackNumberInfo': {
'trackingNumber': tracking_number,
'trackingQualifier': '',
'trackingCarrier': ''
}
}]
}
}),
'action': 'trackpackages',
'locale': 'en_US',
'format': 'json',
'version': 99
}).json()
And then work with the resulting object:
{
"TrackPackagesResponse": {
"successful": true,
"passedLoggedInCheck": false,
"errorList": [{
"code": "0",
"message": "Request was successfully processed.",
"source": null
}],
"packageList": [{
"trackingNbr": "570573906561",
"trackingQualifier": "2456536000\u007e570573906561\u007eFX",
"trackingCarrierCd": "FDXE",
"trackingCarrierDesc": "FedEx Express",
"displayTrackingNbr": "570573906561",
"shipperCmpnyName": "",
"shipperName": "",
"shipperAddr1": "",
"shipperAddr2": "",
"shipperCity": "SEOUL",
"shipperStateCD": "",
"shipperZip": "",
"shipperCntryCD": "KR",
"shipperPhoneNbr": "",
"shippedBy": "",
"recipientCmpnyName": "",
"recipientName": "",
"recipientAddr1": "",
"recipientAddr2": "",
"recipientCity": "CHEK LAP KOK",
"recipientStateCD": "",
"recipientZip": "",
"recipientCntryCD": "HK",
"recipientPhoneNbr": "",
"shippedTo": "",
"keyStatus": "Delivered",
"keyStatusCD": "DL",
"lastScanStatus": "",
"lastScanDateTime": "",
"receivedByNm": ".CHOP",
"subStatus": "Signed for by\u003a .CHOP",
"mainStatus": "",
"statusBarCD": "DL",
"shortStatus": "",
"shortStatusCD": "",
"statusLocationAddr1": "",
"statusLocationAddr2": "",
"statusLocationCity": "CHEK LAP KOK",
"statusLocationStateCD": "",
"statusLocationZip": "",
"statusLocationCntryCD": "HK",
"statusWithDetails": "Delivered\u003a 9\u002f02\u002f2013 11\u003a58 am Signed for by\u003a.CHOP\u003b CHEK LAP KOK, HK",
"shipDt": "2013\u002d08\u002d31T15\u003a00\u003a00\u002b09\u003a00",
"displayShipDt": "8\u002f31\u002f2013",
"displayShipTm": "3\u003a00 pm",
"displayShipDateTime": "8\u002f31\u002f2013 3\u003a00 pm",
"pickupDt": "2013\u002d08\u002d31T15\u003a00\u003a00\u002b09\u003a00",
"displayPickupDt": "8\u002f31\u002f2013",
"displayPickupTm": "3\u003a00 pm",
"displayPickupDateTime": "8\u002f31\u002f2013 3\u003a00 pm",
"estDeliveryDt": "",
"estDeliveryTm": "",
"displayEstDeliveryDt": "",
"displayEstDeliveryTm": "",
"displayEstDeliveryDateTime": "",
"actDeliveryDt": "2013\u002d09\u002d02T11\u003a58\u003a00\u002b08\u003a00",
"displayActDeliveryDt": "9\u002f02\u002f2013",
"displayActDeliveryTm": "11\u003a58 am",
"displayActDeliveryDateTime": "9\u002f02\u002f2013 11\u003a58 am",
"nickName": "",
"note": "",
"matchedAccountList": [""],
"fxfAdvanceETA": "",
"fxfAdvanceReason": "",
"fxfAdvanceStatusCode": "",
"fxfAdvanceStatusDesc": "",
"destLink": "",
"originLink": "",
"hasBillOfLadingImage": false,
"hasBillPresentment": false,
"signatureRequired": 0,
"totalKgsWgt": "3.5",
"displayTotalKgsWgt": "3.5 kgs",
"totalLbsWgt": "7.8",
"displayTotalLbsWgt": "7.8 lbs",
"displayTotalWgt": "7.8 lbs \u002f 3.5 kgs",
"pkgKgsWgt": "3.5",
"displayPkgKgsWgt": "3.5 kgs",
"pkgLbsWgt": "7.8",
"displayPkgLbsWgt": "7.8 lbs",
"displayPkgWgt": "7.8 lbs \u002f 3.5 kgs",
"dimensions": "20x14x14 in.",
"masterTrackingNbr": "",
"masterQualifier": "",
"masterCarrierCD": "",
"originalOutboundTrackingNbr": null,
"originalOutboundQualifier": "",
"originalOutboundCarrierCD": "",
"invoiceNbrList": [""],
"referenceList": [""],
"doorTagNbrList": [""],
"referenceDescList": [""],
"purchaseOrderNbrList": [""],
"billofLadingNbrList": [""],
"shipperRefList": ["PO\u00232612 Proton housing\u005fPlastics"],
"rmaList": [""],
"deptNbrList": [""],
"shipmentIdList": [""],
"tcnList": [""],
"partnerCarrierNbrList": [""],
"hasAssociatedShipments": false,
"hasAssociatedReturnShipments": false,
"assocShpGrp": 0,
"drTgGrp": ["0"],
"associationInfoList": [{
"trackingNumberInfo": {
"trackingNumber": "",
"trackingQualifier": "",
"trackingCarrier": "",
"processingParameters": null
},
"associatedType": ""
}],
"returnReason": "",
"returnRelationship": null,
"skuItemUpcCdList": [""],
"receiveQtyList": [""],
"itemDescList": [""],
"partNbrList": [""],
"serviceCD": "INTERNATIONAL\u005fPRIORITY",
"serviceDesc": "FedEx International Priority",
"serviceShortDesc": "IP",
"packageType": "YOUR\u005fPACKAGING",
"packaging": "Your Packaging",
"clearanceDetailLink": "",
"showClearanceDetailLink": false,
"manufactureCountryCDList": [""],
"commodityCDList": [""],
"commodityDescList": [""],
"cerNbrList": [""],
"cerComplaintCDList": [""],
"cerComplaintDescList": [""],
"cerEventDateList": [""],
"displayCerEventDateList": [""],
"totalPieces": "1",
"specialHandlingServicesList": ["Deliver Weekday", "Weekend Pick\u002dUp"],
"shipmentType": "",
"pkgContentDesc1": "",
"pkgContentDesc2": "",
"docAWBNbr": "",
"originalCharges": "",
"transportationCD": "",
"transportationDesc": "",
"dutiesAndTaxesCD": "",
"dutiesAndTaxesDesc": "",
"origPieceCount": "",
"destPieceCount": "",
"goodsClassificationCD": "",
"receipientAddrQty": "0",
"deliveryAttempt": "0",
"codReturnTrackNbr": "",
"scanEventList": [{
"date": "2013\u002d09\u002d02",
"time": "11\u003a58\u003a00",
"gmtOffset": "\u002b08\u003a00",
"status": "Delivered",
"statusCD": "DL",
"scanLocation": "CHEK LAP KOK HK",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": true
}, {
"date": "2013\u002d09\u002d02",
"time": "09\u003a36\u003a00",
"gmtOffset": "\u002b08\u003a00",
"status": "On FedEx vehicle for delivery",
"statusCD": "OD",
"scanLocation": "LANTAU ISLAND HK",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d09\u002d02",
"time": "08\u003a55\u003a00",
"gmtOffset": "\u002b08\u003a00",
"status": "At local FedEx facility",
"statusCD": "AR",
"scanLocation": "LANTAU ISLAND HK",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d09\u002d02",
"time": "07\u003a12\u003a00",
"gmtOffset": "\u002b08\u003a00",
"status": "International shipment release \u002d Import",
"statusCD": "CC",
"scanLocation": "LANTAU ISLAND HK",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d09\u002d02",
"time": "04\u003a40\u003a00",
"gmtOffset": "\u002b08\u003a00",
"status": "Shipment exception",
"statusCD": "SE",
"scanLocation": "GUANGZHOU CN",
"scanDetails": "Delay beyond our control",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d09\u002d02",
"time": "03\u003a45\u003a00",
"gmtOffset": "\u002b08\u003a00",
"status": "Departed FedEx location",
"statusCD": "DP",
"scanLocation": "GUANGZHOU CN",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d09\u002d02",
"time": "01\u003a17\u003a00",
"gmtOffset": "\u002b08\u003a00",
"status": "Arrived at FedEx location",
"statusCD": "AR",
"scanLocation": "GUANGZHOU CN",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d09\u002d01",
"time": "23\u003a10\u003a00",
"gmtOffset": "\u002b08\u003a00",
"status": "In transit",
"statusCD": "IT",
"scanLocation": "SHANGHAI CN",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d09\u002d01",
"time": "17\u003a13\u003a00",
"gmtOffset": "\u002b09\u003a00",
"status": "In transit",
"statusCD": "IT",
"scanLocation": "INCHEON KR",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d08\u002d31",
"time": "19\u003a44\u003a00",
"gmtOffset": "\u002b09\u003a00",
"status": "In transit",
"statusCD": "IT",
"scanLocation": "INCHEON KR",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d08\u002d31",
"time": "16\u003a27\u003a00",
"gmtOffset": "\u002b09\u003a00",
"status": "Left FedEx origin facility",
"statusCD": "DP",
"scanLocation": "SEOUL KR",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d08\u002d31",
"time": "15\u003a00\u003a00",
"gmtOffset": "\u002b09\u003a00",
"status": "Picked up",
"statusCD": "PU",
"scanLocation": "SEOUL KR",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}, {
"date": "2013\u002d08\u002d30",
"time": "23\u003a58\u003a11",
"gmtOffset": "\u002d05\u003a00",
"status": "Shipment information sent to FedEx",
"statusCD": "OC",
"scanLocation": "",
"scanDetails": "",
"scanDetailsHtml": "",
"rtrnShprTrkNbr": "",
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": false
}],
"originAddr1": "",
"originAddr2": "",
"originCity": "SEOUL",
"originStateCD": "",
"originZip": "",
"originCntryCD": "KR",
"originLocationID": "",
"originTermCity": "SEOUL",
"originTermStateCD": "",
"destLocationAddr1": "",
"destLocationAddr2": "",
"destLocationCity": "LANTAU ISLAND",
"destLocationStateCD": "",
"destLocationZip": "",
"destLocationCntryCD": "HK",
"destLocationID": "",
"destLocationTermCity": "LANTAU ISLAND",
"destLocationTermStateCD": "",
"destAddr1": "",
"destAddr2": "",
"destCity": "CHEK LAP KOK",
"destStateCD": "",
"destZip": "",
"destCntryCD": "HK",
"halAddr1": "",
"halAddr2": "",
"halCity": "",
"halStateCD": "",
"halZipCD": "",
"halCntryCD": "",
"actualDelAddrCity": "CHEK LAP KOK",
"actualDelAddrStateCD": "",
"actualDelAddrZipCD": "",
"actualDelAddrCntryCD": "HK",
"totalTransitMiles": "",
"excepReasonList": [""],
"excepActionList": [""],
"exceptionReason": "",
"exceptionAction": "",
"statusDetailsList": [""],
"trackErrCD": "",
"destTZ": "\u002b08\u003a00",
"originTZ": "\u002b09\u003a00",
"isMultiStat": "0",
"multiStatList": [{
"multiPiec": "",
"multiTm": "",
"multiDispTm": "",
"multiSta": ""
}],
"maskMessage": "",
"deliveryService": "",
"milestoDestination": "",
"terms": "",
"originUbanizationCode": "",
"originCountryName": "",
"isOriginResidential": false,
"halUrbanizationCD": "",
"halCountryName": "",
"actualDelAddrUrbanizationCD": "",
"actualDelAddrCountryName": "",
"destUrbanizationCD": "",
"destCountryName": "",
"delToDesc": "Shipping\u002fReceiving",
"recpShareID": "",
"shprShareID": "9mbo6hrq0tqxo1i4pr7kp2yp",
"defaultCDOType": "CDO",
"mpstype": "",
"fxfAdvanceNotice": true,
"rthavailableCD": "",
"excepReasonListNoInit": [""],
"excepActionListNoInit": [""],
"statusDetailsListNoInit": [""],
"matched": false,
"isSuccessful": true,
"errorList": [{
"code": "",
"message": "",
"source": null
}],
"isCanceled": false,
"isPrePickup": false,
"isPickup": false,
"isInTransit": false,
"isInProgress": true,
"isDelException": false,
"isClearanceDelay": false,
"isException": false,
"isDelivered": true,
"isHAL": false,
"isOnSchedule": false,
"isDeliveryToday": false,
"isSave": false,
"isWatch": false,
"isHistorical": false,
"isTenderedNotification": false,
"isDeliveredNotification": true,
"isExceptionNotification": false,
"isCurrentStatusNotification": false,
"isAnticipatedShipDtLabel": false,
"isShipPickupDtLabel": true,
"isActualPickupLabel": false,
"isOrderReceivedLabel": false,
"isEstimatedDeliveryDtLabel": true,
"isDeliveryDtLabel": false,
"isActualDeliveryDtLabel": true,
"isOrderCompleteLabel": false,
"isOutboundDirection": false,
"isInboundDirection": false,
"isThirdpartyDirection": false,
"isUnknownDirection": false,
"isFSM": false,
"isReturn": false,
"isOriginalOutBound": false,
"isChildPackage": false,
"isParentPackage": false,
"isReclassifiedAsSingleShipment": false,
"isDuplicate": false,
"isMaskShipper": false,
"isHalEligible": false,
"isFedexOfficeOnlineOrders": false,
"isFedexOfficeInStoreOrders": false,
"isMultipleStop": false,
"isCustomCritical": false,
"isInvalid": false,
"isNotFound": false,
"isFreight": false,
"isSpod": true,
"isSignatureAvailable": false,
"isMPS": false,
"isGMPS": false,
"isResidential": false,
"isDestResidential": true,
"isHALResidential": false,
"isActualDelAddrResidential": false,
"isReqEstDelDt": false,
"isCDOEligible": false,
"CDOInfoList": [{
"spclInstructDesc": "",
"delivOptn": "",
"delivOptnStatus": "",
"reqApptWdw": "",
"reqApptDesc": "",
"rerouteTRKNbr": "",
"beginTm": "",
"endTm": ""
}],
"CDOExists": false,
"isMtchdByRecShrID": false,
"isMtchdByShiprShrID": false
}]
}
}

this is what I ended up with, thanks to #Blender
import requests
import json
daysdict = {1:31,2:28,3:31,4:31,5:31,6:30,7:31,8:31,9:30,10:31,11:30,12:31}
def days_in_month(month):
for key, value in daysdict.iteritems():
if key == month:
number_of_days = value
return number_of_days
def build_output(tracking_number):
data = requests.post('https://www.fedex.com/trackingCal/track', data={
'data': json.dumps({
'TrackPackagesRequest': {
'appType': 'wtrk',
'uniqueKey': '',
'processingParameters': {
'anonymousTransaction': True,
'clientId': 'WTRK',
'returnDetailedErrors': True,
'returnLocalizedDateTime': False
},
'trackingInfoList': [{
'trackNumberInfo': {
'trackingNumber': tracking_number,
'trackingQualifier': '',
'trackingCarrier': ''
}
}]
}
}),
'action': 'trackpackages',
'locale': 'en_US',
'format': 'json',
'version': 99
}).json()
return data
# finds delivery date info
ship_arrival_key = 'displayActDeliveryDateTime'
ship_time_key = 'displayShipDt'
def track(tracking_number):
data = build_output(tracking_number)
#narrowing down dictionary and lists to objects needed (ship day,arrival)
for key, value in data.iteritems():
narrow = value
#narrow more into packageList list
for key, value in narrow.iteritems():
if key == 'packageList':
narrow = value
# narrow to ship start value
for x, y in narrow[0].iteritems():
if x == ship_arrival_key:
ship_arival_value = y
exists = True
# also find ship arrival
elif x == ship_time_key:
ship_time_value = y
exists = True
# list with two items shiptime and shiparrival
return ship_time_value, ship_arival_value, exists
def print_results(tracking_number):
to_fro = track(tracking_number)
if to_fro[2] == True:
try:
daysinmonth = days_in_month(int(to_fro[0][0]))
try:
if to_fro[0][0] != to_fro[1][0]:
ship_days = str( (int(daysinmonth) - int(str((to_fro[0][2]))+str((to_fro[0][3]))) + int(to_fro[1][3])) )
print '_____________________'
print 'Shipped: ' + to_fro[0]
print 'Arrived: ' + to_fro[1]
print '_____________________'
print '\nShipping took:' +" " +ship_days
else:
ship_days = int(to_fro[1][2] + to_fro[1][3]) - int(to_fro[0][2] + to_fro[0][3])
print '_____________________'
print 'Shipped: ' + to_fro[0]
print 'Arrived: ' + to_fro[1]
print '_____________________'
print '\nShipping took:' +" " + str(ship_days)
except IndexError:
print 'Invalid Tracking Number'
pass
except IndexError:
pass
else:
pass
def raw_results(tracking_number):
to_fro = track(tracking_number)
if to_fro[2] == True:
daysinmonth = days_in_month(int(track(tracking_number)[0][0]))
try:
if to_fro[0][0] != to_fro[1][0]:
ship_days = str( (int(daysinmonth) - int(str((to_fro[0][2]))+str((to_fro[0][3]))) + int(to_fro[1][3])) )
else:
ship_days = int(to_fro[1][2] + to_fro[1][3]) - int(to_fro[0][2] + to_fro[0][3])
except IndexError:
print 'Invalid Tracking Number'
pass
else:
pass
return ship_days
#print_results(499552080632881)

Related

Retrieve data from web site after 2 POST queries

I am trying to scrape this site to get the list of offers.
The problem is that we need to fill 2 forms (2 POST queries) before receiving the final result.
This is what I have done so far:
import requests as rs
from form_data import form_data1, form_data2
base_url = "https://compare.energy.vic.gov.au/api"
with rs.Session() as s:
url_ = f"{base_url}/get-psb-details?serverCacheId=null"
r = (s.get(url_))
serverCacheId = r.json()["serverCacheId"]
r = s.post(f"{base_url}/save-form-data", data=form_data1)
r = s.post(f"{base_url}/save-form-data", data=form_data2)
Then I am trying to retrieve the offers after the second POST query:
url_ = "https://compare.energy.vic.gov.au/api/get-offers"
body = {"serverCacheId": str(serverCacheId),
"loopBack": "false",
"selectedEnergy": "/offer"}
r = s.get(url_, params=body)
print(r.json())
But unfortunately I get a message indicating a redirection:
{'status': 'redirect', 'message': 'no data'}
The 2 POSTs use the current data:
form_data1 = {
"showSolarSelection": "true",
"energyType": "Electricity",
"userType": "Residential",
"bill": "no bill",
"postcode": "3000",
"usageProfile": "0",
"averageDailyConsumption": "0",
"skipNMI": "true",
"smartMeter": "1",
"disclaimer": "true",
"hasSolar": "0",
"hasConcession": "0",
"distributor": {
"id": "4",
"name": "Citipower",
"display": "Citipower",
"phone": "1300 301 101 / 13 12 80",
"distribution_zone_id": "11",
"distribution_zone_name": "All"
},
"distributorDerived": "0",
"distributorSubmit": "true",
"pageDataType": "energyConfigData",
"loopBack": "true"
}
and
form_data2 = {
"pvCapacity": "0", "pvCapacityCap": "null", "hhSize": "1", "totalRooms": "1", "fridgeCount": "0",
"gasConnection": "4", "poolHeating": "0", "poolHeatingSolar": "false", "poolHeatingGas": "false",
"poolHeatingElectric": "false", "poolHeatingNone": "false", "spaceHeatingElectricDucted": "false",
"spaceHeatingSplitSystem": "false", "spaceHeatingElectricUnderfloor": "false",
"spaceHeatingElectricIndividual": "false", "spaceHeatingGasDucted": "false",
"spaceHeatingGasUnderfloor": "false", "spaceHeatingGasIndividual": "false", "spaceHeatingOther": "false",
"spaceHeatingNone": "true", "spaceCoolingRoomAC": "false", "spaceCoolingSplitSystem": "false",
"spaceCoolingDuctedReverse": "false", "spaceCoolingDuctedEvaporative": "false",
"spaceCoolingPortableRef": "false", "spaceCoolingPortableEvap": "false", "spaceCoolingOther": "false",
"spaceCoolingNone": "true", "seaDistance": "", "clothesDryer": "0", "clothesDryerWeekday": "",
"clothesDryerWeekend": "", "dishwasherWeekday": "", "dishwasherWeekend": "",
"waterHeatingElectric": "false", "waterHeatingElectricSolar": "false", "waterHeatingGasStorage": "false",
"waterHeatingGasInstant": "false", "waterHeatingGasSolar": "false", "waterHeatingOther": "true",
"controlledLoad": "", "tvTotal": "", "turnOffAtPowerShort": "", "ovensElectric": "", "ovensGas": "",
"washingMachineUsage": "", "washingMachineWeekday": "", "washingMachineWeekend": "",
"televisionUsageWeekday": "", "televisionUsageWeekend": "", "heatingUsageMethod": "",
"gasUsageWinter": "0", "hhSize51": "", "energyType": "Electricity", "hasSolar": "0",
"pageDataType": "energyProfileData", "loopBack": "false"
}
Expected result
The expected result is a JSON object containing offers. Here is its structure:
{
"selectedEnergyType": "Electricity",
"energyTypeCount": 1,
"offers": {
"Electricity": {
"offersList": [{...}]
}
}
}
The site has some requirements and restrictions on the form data.
form_data1:
Add required fields "solarCapacity" and "feedInTariff".
"hasSolar": "0",
"solarCapacity": "", # Add this
"hasConcession": "0",
"feedInTariff": "", # Add this
Change "loopBack": "true" to "loopBack": false.
# "loopBack": "true"
"loopBack": False
Set "serverCacheId" and change data= to json=.
# r = s.post(f"{base_url}/save-form-data", data=form_data1)
r = s.post(f"{base_url}/save-form-data", json=dict(form_data1, serverCacheId=str(serverCacheId)))
form_data2:
Set "serverCacheId" and change data= to json=.
# r = s.post(f"{base_url}/save-form-data", data=form_data2)
r = s.post(f"{base_url}/save-form-data", json=dict(form_data2, serverCacheId=str(serverCacheId)))
(Optional, for consistency) Change "loopBack": "false" to "loopBack": false.
# "loopBack": "false"
"loopBack": False
The combined code:
import requests as rs
form_data1 = {
"showSolarSelection": "true",
"energyType": "Electricity",
"userType": "Residential",
"bill": "no bill",
"postcode": "3000",
"usageProfile": "0",
"averageDailyConsumption": "0",
"skipNMI": "true",
"smartMeter": "1",
"disclaimer": "true",
"hasSolar": "0",
"solarCapacity": "",
"hasConcession": "0",
"feedInTariff": "",
"distributor": {
"id": "4",
"name": "Citipower",
"display": "Citipower",
"phone": "1300 301 101 / 13 12 80",
"distribution_zone_id": "11",
"distribution_zone_name": "All"
},
"distributorDerived": "0",
"distributorSubmit": "true",
"pageDataType": "energyConfigData",
"loopBack": False
}
form_data2 = {
"pvCapacity": "0", "pvCapacityCap": "null", "hhSize": "1", "totalRooms": "1", "fridgeCount": "0",
"gasConnection": "4", "poolHeating": "0", "poolHeatingSolar": "false", "poolHeatingGas": "false",
"poolHeatingElectric": "false", "poolHeatingNone": "false", "spaceHeatingElectricDucted": "false",
"spaceHeatingSplitSystem": "false", "spaceHeatingElectricUnderfloor": "false",
"spaceHeatingElectricIndividual": "false", "spaceHeatingGasDucted": "false",
"spaceHeatingGasUnderfloor": "false", "spaceHeatingGasIndividual": "false", "spaceHeatingOther": "false",
"spaceHeatingNone": "true", "spaceCoolingRoomAC": "false", "spaceCoolingSplitSystem": "false",
"spaceCoolingDuctedReverse": "false", "spaceCoolingDuctedEvaporative": "false",
"spaceCoolingPortableRef": "false", "spaceCoolingPortableEvap": "false", "spaceCoolingOther": "false",
"spaceCoolingNone": "true", "seaDistance": "", "clothesDryer": "0", "clothesDryerWeekday": "",
"clothesDryerWeekend": "", "dishwasherWeekday": "", "dishwasherWeekend": "",
"waterHeatingElectric": "false", "waterHeatingElectricSolar": "false", "waterHeatingGasStorage": "false",
"waterHeatingGasInstant": "false", "waterHeatingGasSolar": "false", "waterHeatingOther": "true",
"controlledLoad": "", "tvTotal": "", "turnOffAtPowerShort": "", "ovensElectric": "", "ovensGas": "",
"washingMachineUsage": "", "washingMachineWeekday": "", "washingMachineWeekend": "",
"televisionUsageWeekday": "", "televisionUsageWeekend": "", "heatingUsageMethod": "",
"gasUsageWinter": "0", "hhSize51": "", "energyType": "Electricity", "hasSolar": "0",
"pageDataType": "energyProfileData", "loopBack": False
}
base_url = "https://compare.energy.vic.gov.au/api"
with rs.Session() as s:
url_ = f"{base_url}/get-psb-details?serverCacheId=null"
r = (s.get(url_))
serverCacheId = r.json()["serverCacheId"]
r = s.post(f"{base_url}/save-form-data", json=dict(form_data1, serverCacheId=str(serverCacheId)))
r = s.post(f"{base_url}/save-form-data", json=dict(form_data2, serverCacheId=str(serverCacheId)))
url_ = "https://compare.energy.vic.gov.au/api/get-offers"
body = {"serverCacheId": str(serverCacheId),
"loopBack": "false",
"selectedEnergy": "/offer"}
r = s.get(url_, params=body)
print(r.json())

I am trying to create a database using the data in the json file provided

The program for creating the database, I am trying to do that in python. I have a json file whose contents will be used to populate the db.
In this we need to parse a JSON file, create a relational database dbjson corresponding to the JSON file, and upload the data into the dbjson.
The following is the formatted json file:
{
"cursor": {
"prev": null,
"hasNext": false,
"next": "1542275432008325:1:0",
"hasPrev": false,
"total": null,
"id": "1542275432008325:1:0",
"more": false
},
"code": 0,
"response": [
{
"editableUntil": "2018-11-22T11:20:37",
"dislikes": 0,
"numReports": 0,
"likes": 1,
"message": "<p>A sinking pound, resignations by the bucketful railroading through a so-called agreement nobody wants. For the good of the country Mrs. May 'go and go now'.</p>",
"id": "4196155749",
"createdAt": "2018-11-15T11:20:37",
"author": {
"username": "mnaid-233509073ed3432027d48b1a83f5fbd2",
"about": "",
"name": "baggiebuoy",
"disable3rdPartyTrackers": true,
"isPowerContributor": false,
"joinedAt": "2017-05-03T10:40:52",
"profileUrl": "https://disqus.com/by/mnaid-233509073ed3432027d48b1a83f5fbd2/",
"url": "",
"location": "",
"isPrivate": false,
"signedUrl": "",
"isPrimary": true,
"isAnonymous": false,
"id": "250728493",
"avatar": {
"small": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-233509073ed3432027d48b1a83f5fbd2.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar32.png"
},
"isCustom": false,
"permalink": "https://disqus.com/api/users/avatars/mnaid-233509073ed3432027d48b1a83f5fbd2.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png",
"large": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-233509073ed3432027d48b1a83f5fbd2.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png"
}
}
},
"media": [],
"isSpam": false,
"isDeletedByAuthor": false,
"isDeleted": false,
"parent": null,
"isApproved": true,
"isFlagged": false,
"raw_message": "A sinking pound, resignations by the bucketful railroading through a so-called agreement nobody wants. For the good of the country Mrs. May 'go and go now'.",
"isHighlighted": false,
"canVote": false,
"thread": "7046384220",
"forum": "expressandstar",
"points": 1,
"moderationLabels": [],
"isEdited": true,
"sb": false
},
{
"editableUntil": "2018-11-22T10:37:59",
"dislikes": 0,
"numReports": 0,
"likes": 0,
"message": "<p>This could be heaven, or this could be hell.</p>",
"id": "4196048572",
"createdAt": "2018-11-15T10:37:59",
"author": {
"username": "mnaid-29e1c59be16c852670e3be302e8c303b",
"about": "",
"name": "Mordecai",
"disable3rdPartyTrackers": false,
"isPowerContributor": false,
"joinedAt": "2017-05-03T13:01:02",
"profileUrl": "https://disqus.com/by/mnaid-29e1c59be16c852670e3be302e8c303b/",
"url": "",
"location": "",
"isPrivate": false,
"signedUrl": "",
"isPrimary": true,
"isAnonymous": false,
"id": "250739494",
"avatar": {
"small": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-29e1c59be16c852670e3be302e8c303b.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar32.png"
},
"isCustom": false,
"permalink": "https://disqus.com/api/users/avatars/mnaid-29e1c59be16c852670e3be302e8c303b.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png",
"large": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-29e1c59be16c852670e3be302e8c303b.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png"
}
}
},
"media": [],
"isSpam": false,
"isDeletedByAuthor": false,
"isDeleted": false,
"parent": 4196013534,
"isApproved": true,
"isFlagged": false,
"raw_message": "This could be heaven, or this could be hell.",
"isHighlighted": false,
"canVote": false,
"thread": "7046384220",
"forum": "expressandstar",
"points": 0,
"moderationLabels": [],
"isEdited": false,
"sb": false
},
{
"editableUntil": "2018-11-22T10:36:50",
"dislikes": 0,
"numReports": 0,
"likes": 0,
"message": "<p>The crappest of crap deals.<br>Will never get through Parliament.</p><p>You are the weakest link May, goodbye.</p>",
"id": "4196044068",
"createdAt": "2018-11-15T10:36:50",
"author": {
"username": "mnaid-29e1c59be16c852670e3be302e8c303b",
"about": "",
"name": "Mordecai",
"disable3rdPartyTrackers": false,
"isPowerContributor": false,
"joinedAt": "2017-05-03T13:01:02",
"profileUrl": "https://disqus.com/by/mnaid-29e1c59be16c852670e3be302e8c303b/",
"url": "",
"location": "",
"isPrivate": false,
"signedUrl": "",
"isPrimary": true,
"isAnonymous": false,
"id": "250739494",
"avatar": {
"small": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-29e1c59be16c852670e3be302e8c303b.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar32.png"
},
"isCustom": false,
"permalink": "https://disqus.com/api/users/avatars/mnaid-29e1c59be16c852670e3be302e8c303b.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png",
"large": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-29e1c59be16c852670e3be302e8c303b.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png"
}
}
},
"media": [],
"isSpam": false,
"isDeletedByAuthor": false,
"isDeleted": false,
"parent": null,
"isApproved": true,
"isFlagged": false,
"raw_message": "The crappest of crap deals.\nWill never get through Parliament.\n\nYou are the weakest link May, goodbye.",
"isHighlighted": false,
"canVote": false,
"thread": "7046384220",
"forum": "expressandstar",
"points": 0,
"moderationLabels": [],
"isEdited": false,
"sb": false
},
{
"editableUntil": "2018-11-22T10:28:13",
"dislikes": 0,
"numReports": 0,
"likes": 0,
"message": "<p>We are all just prisoners here of our own device. 😁</p>",
"id": "4196013534",
"createdAt": "2018-11-15T10:28:13",
"author": {
"username": "mnaid-42ba513c42a0fd6558aa44b1de658140",
"about": "",
"name": "chaffwolf",
"disable3rdPartyTrackers": true,
"isPowerContributor": false,
"joinedAt": "2017-12-20T16:45:10",
"profileUrl": "https://disqus.com/by/mnaid-42ba513c42a0fd6558aa44b1de658140/",
"url": "",
"location": "",
"isPrivate": false,
"signedUrl": "",
"isPrimary": true,
"isAnonymous": false,
"id": "274657487",
"avatar": {
"small": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-42ba513c42a0fd6558aa44b1de658140.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar32.png"
},
"isCustom": false,
"permalink": "https://disqus.com/api/users/avatars/mnaid-42ba513c42a0fd6558aa44b1de658140.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png",
"large": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-42ba513c42a0fd6558aa44b1de658140.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png"
}
}
},
"media": [],
"isSpam": false,
"isDeletedByAuthor": false,
"isDeleted": false,
"parent": 4195981074,
"isApproved": true,
"isFlagged": false,
"raw_message": "We are all just prisoners here of our own device. 😁",
"isHighlighted": false,
"canVote": false,
"thread": "7046384220",
"forum": "expressandstar",
"points": 0,
"moderationLabels": [],
"isEdited": false,
"sb": false
},
{
"editableUntil": "2018-11-22T10:27:03",
"dislikes": 0,
"numReports": 0,
"likes": 0,
"message": "<p>Should be no deal end off.<br>Nobody could possibly be the winner so it's simple leave and that's it .<br>Their will always be wannabes who think they can do better in it for only their own powers not for the general country or the people of it .,</p>",
"id": "4196012501",
"createdAt": "2018-11-15T10:27:03",
"author": {
"username": "mnaid-90ecce8d5dad4396f681182cb470872c",
"about": "",
"name": "wanderer in eire",
"disable3rdPartyTrackers": true,
"isPowerContributor": false,
"joinedAt": "2017-05-12T20:20:43",
"profileUrl": "https://disqus.com/by/mnaid-90ecce8d5dad4396f681182cb470872c/",
"url": "",
"location": "",
"isPrivate": false,
"signedUrl": "",
"isPrimary": true,
"isAnonymous": false,
"id": "251694793",
"avatar": {
"small": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-90ecce8d5dad4396f681182cb470872c.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar32.png"
},
"isCustom": false,
"permalink": "https://disqus.com/api/users/avatars/mnaid-90ecce8d5dad4396f681182cb470872c.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png",
"large": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-90ecce8d5dad4396f681182cb470872c.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png"
}
}
},
"media": [],
"isSpam": false,
"isDeletedByAuthor": false,
"isDeleted": false,
"parent": null,
"isApproved": true,
"isFlagged": false,
"raw_message": "Should be no deal end off.\nNobody could possibly be the winner so it's simple leave and that's it .\nTheir will always be wannabes who think they can do better in it for only their own powers not for the general country or the people of it .,",
"isHighlighted": false,
"canVote": false,
"thread": "7046384220",
"forum": "expressandstar",
"points": 0,
"moderationLabels": [],
"isEdited": false,
"sb": false
},
{
"editableUntil": "2018-11-22T10:26:42",
"dislikes": 0,
"numReports": 0,
"likes": 0,
"message": "<p>Damned if she does, damned if she doesn't. All for a cause she does not believe in.<br>She will go down in history whatever happens. <br>The question is: who better to lead the country at such an important juncture??<br>Answers on a postcard!</p>",
"id": "4196012237",
"createdAt": "2018-11-15T10:26:42",
"author": {
"username": "mnaid-42ba513c42a0fd6558aa44b1de658140",
"about": "",
"name": "chaffwolf",
"disable3rdPartyTrackers": true,
"isPowerContributor": false,
"joinedAt": "2017-12-20T16:45:10",
"profileUrl": "https://disqus.com/by/mnaid-42ba513c42a0fd6558aa44b1de658140/",
"url": "",
"location": "",
"isPrivate": false,
"signedUrl": "",
"isPrimary": true,
"isAnonymous": false,
"id": "274657487",
"avatar": {
"small": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-42ba513c42a0fd6558aa44b1de658140.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar32.png"
},
"isCustom": false,
"permalink": "https://disqus.com/api/users/avatars/mnaid-42ba513c42a0fd6558aa44b1de658140.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png",
"large": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-42ba513c42a0fd6558aa44b1de658140.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png"
}
}
},
"media": [],
"isSpam": false,
"isDeletedByAuthor": false,
"isDeleted": false,
"parent": null,
"isApproved": true,
"isFlagged": false,
"raw_message": "Damned if she does, damned if she doesn't. All for a cause she does not believe in.\nShe will go down in history whatever happens. \nThe question is: who better to lead the country at such an important juncture??\nAnswers on a postcard!",
"isHighlighted": false,
"canVote": false,
"thread": "7046384220",
"forum": "expressandstar",
"points": 0,
"moderationLabels": [],
"isEdited": false,
"sb": false
},
{
"editableUntil": "2018-11-22T09:50:32",
"dislikes": 0,
"numReports": 0,
"likes": 0,
"message": "<p>Theresa May is using The Eagles Hotel California lyrics as the inspiration for her Brexit plan.</p><p>You can check out any time you like, but you can never leave.</p>",
"id": "4195981074",
"createdAt": "2018-11-15T09:50:32",
"author": {
"username": "mnaid-e82a88d937e60267fd2c866b01131ada",
"about": "",
"name": "Olly the cat",
"disable3rdPartyTrackers": false,
"isPowerContributor": false,
"joinedAt": "2017-05-03T10:27:23",
"profileUrl": "https://disqus.com/by/mnaid-e82a88d937e60267fd2c866b01131ada/",
"url": "",
"location": "",
"isPrivate": false,
"signedUrl": "",
"isPrimary": true,
"isAnonymous": false,
"id": "250727584",
"avatar": {
"small": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-e82a88d937e60267fd2c866b01131ada.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar32.png"
},
"isCustom": false,
"permalink": "https://disqus.com/api/users/avatars/mnaid-e82a88d937e60267fd2c866b01131ada.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png",
"large": {
"permalink": "https://disqus.com/api/users/avatars/mnaid-e82a88d937e60267fd2c866b01131ada.jpg",
"cache": "//a.disquscdn.com/1541535443/images/noavatar92.png"
}
}
},
"media": [],
"isSpam": false,
"isDeletedByAuthor": false,
"isDeleted": false,
"parent": null,
"isApproved": true,
"isFlagged": false,
"raw_message": "Theresa May is using The Eagles Hotel California lyrics as the inspiration for her Brexit plan.\n\nYou can check out any time you like, but you can never leave.",
"isHighlighted": false,
"canVote": false,
"thread": "7046384220",
"forum": "expressandstar",
"points": 0,
"moderationLabels": [],
"isEdited": false,
"sb": false
}
]
}
This data must be used to populate the database.
The following is the implementation I have worked out:
import csv, json, sys
if sys.argv[1] is not None and sys.argv[2] is not None:
fileInput = sys.argv[1]
fileOutput = sys.argv[2]
inputFile = open(fileInput)
outputFile = open(fileOutput, 'w')
data = json.load(inputFile)
inputFile.close()
output = csv.writer(outputFile)
output.writerow(data.keys())
#output.writerow(data[0].keys()) # header row
for row in data:
output.writerow(row.values())
I tried to parse data using json package in python but have not been able to successfully implement it.
This seems a similar problem.
Have you tried it? Please edit with your code and errors

Python 3 Trying to access json array keep getting key error

Im using python 3 and am trying to access some information from a json query. Currently I can access data within payments by looping through. I try to do the same to access order_status and it doesn't work.
Json:
___order details___
{
"message_type_name": "OrderPlaced",
"order": {
"adcode": "",
"adcode_id": 0,
"billing_address": {
"address_line_1": "123 east street",
"address_line_2": "apt one"
},
"campaign_code": "",
"channel": "Online",
"customer": {
"adcode": "",
"adcode_id": 0,
"affiliate_id": 0,
"alternate_phone_number": ""
},
"device": "None",
"discount_total": 0.0,
"discounted_shipping_total": 0.0,
"items": [
{
"admin_comments": "",
"cost": 90.0,
"created_at": "2018-12-04T17:14:55.1128646-06:00"
}
],
"manufacturer_invoice_amount": null,
"manufacturer_invoice_number": "",
"manufacturer_invoice_paid": false,
"order_status": {
"color": "#A4E065",
"created_at": null,
"email_template_id": null,
"id": 6,
"is_cancelled": false,
"is_declined": false,
"is_fully_refunded": false,
"is_open": true,
"is_partially_refunded": false,
"is_quote_status": false,
"is_shipped": false,
"name": "Awaiting Payment",
"updated_at": "2015-10-12T22:07:47.487-05:00"
},
"order_status_id": 6,
"order_status_last_changed_at": "2018-12-04T17:14:55.0503538-06:00",
"order_type": "Order",
"payments": [
{
"amount": 100.00,
"approved_at": "",
"authorization_code": ""
"
}
],
"ppc_keyword": "",
"previous_order_status_id": 6,
"shipments": [],
"shipping_address": {
"comments": "",
"company": "",
"country": "United States"
},
"shipping_total": 0.0,
"source": "Google [organic]"
},
"store_id": 1
}
I have a variable called data: data = json.loads(self.rfile.read( length ).decode('utf-8'))
I have another variable order_payments = data['payments']
I can loop through that and access is_declined
I want to have a variable called order_status = data['order_status'], and then loop through that to access name. I get a key error on order_status = data['order_status']and I dont know why.

How can you extract data from this json using, beautifulsoup and python?

how can get those two values utc_last_updated and name given the following json ?
I used requests, to get to fetch the content, and then I used BeautifulSoup to make it like it is now. But now I just want to extract the two values that I have shown.
"data": [
{
"scm": "hg",
"has_wiki": false,
"last_updated": "2016-03-23T14:05:27.433",
"no_forks": false,
"created_on": "2016-03-18T22:55:52.705",
"owner": "user",
"email_mailinglist": "",
"is_mq": false,
"size": 420034,
"read_only": false,
"fork_of": null,
"mq_of": null,
"state": "available",
"utc_created_on": "2016-03-18 21:55:52+00:00",
"website": "",
"description": "",
"has_issues": false,
"is_fork": false,
"slug": "store",
"is_private": true,
"name": "store",
"language": "python",
"utc_last_updated": "2016-03-23 13:05:27+00:00",
"no_public_forks": true,
"creator": null,
"resource_uri": "/1.0/repositories/my_url"
},
{
"scm": "hg",
"has_wiki": false,
"last_updated": "2016-03-18T12:26:22.261",
"no_forks": false,
"created_on": "2016-03-18T12:19:08.262",
"owner": "user",
"email_mailinglist": "",
"is_mq": false,
"size": 173137,
"read_only": false,
"fork_of": null,
"mq_of": null,
"state": "available",
"utc_created_on": "2016-03-18 11:19:08+00:00",
"website": "",
"description": "",
"has_issues": false,
"is_fork": false,
"name": 'foo'
"is_private": true,,
"language": "python",
"utc_last_updated": "2016-03-18 11:26:22+00:00",
"no_public_forks": true,
"creator": null,
"resource_uri": "/1.0/repositories/my_rl"
},
}
I will appreciate any help.
You've got a JSON response, not HTML - parse it with json module:
import json
data = json.loads(response)
for item in data["data"]:
print(item["utc_last_updated"])

Handling multiple JSON data sets for POST request in Python

I have 3 JSON methods which are two query methods and an update method. I would like to parse through this information with and execute a POST request and pass this data into a database using arcpy for GIS use for all three methods. I have a script which works and gets a response, however the problem is locating the keys and values for each object so that I can successfully send my values to to a database.
Additionally, I handle this task with three different methods, all of which I need data from.
For instance,
query 1 would allow me to parse and find the address, lat/lng, etc.
query 2 would allow me to parse and find customer info, type of request, etc.
query 3 would allow me to update a request.
My first question is how do I successfully extract only the data that I want from each output; I have tested Key/Values in POSTman to no avail, the server is expecting an entire JSON file.
My second question is how do I handle 3 different requests; I am assuming 3 different post methods and selecting the data that I want.
Example of JSON one query passed to server
{
"RequestSpecificDetail": {
"ParentSRNumberForLink": ""
},
"MetaData": {
"appVersion": "1.34",
"deviceModel": "x86_64",
"dateAndTime": "01/15/2015 12:46:36",
"deviceToken": "A2C1DD9D-D17D-4031-BA3E-977C250BFD58",
"osVersion": "8.1"
},
"SRData": {
"LoginUser": "User89",
"NewContactEmail": "abc#gmail.com",
"UpdatedDate": "02/05/2015"
}
}
Example of Query 1 Output
{
"status": {
"code": 311,
"message": "Service Request Successfully Queried.",
"cause": ""
},
"Response": {
"NumOutputObjects": "2",
"ListOfServiceRequest": {
"ServiceRequest": [
{
"SRAddress": "1200 W TEMPLE ST, 90026",
"SRNumber": "1-5099871",
"SRType": "Feedback",
"CreatedDate": "02/05/2015 22:55:58",
"UpdatedDate": "02/05/2015 22:55:58",
"Status": "Open",
"imageURL": ""
},
{
"SRAddress": "1200 W TEMPLE ST, 90026",
"SRNumber": "1-5133051",
"SRType": "Feedback",
"CreatedDate": "02/05/2015 23:03:54",
"UpdatedDate": "02/05/2015 23:03:54",
"Status": "Open",
"imageURL": "https://SERVER_END_POINT/portal/docview?id=fe083ae14b52b1af0945b4d756c296a5"
}
]
},
"LastUpdateDate": "02/05/2015"
}
}
Example of Query 2 passed to server
{
"RequestSpecificDetail": {
"ParentSRNumberForLink": ""
},
"MetaData": {
"appVersion": "1.34",
"deviceModel": "x86_64",
"dateAndTime": "01/15/2015 12:46:36",
"deviceToken": "A2C1DD9D-D17D-4031-BA3E-977C250BFD58",
"osVersion": "8.1"
},
"SRData": {
"SRNumber": "1-1080871"
}
}
Query two output
{
"status": {
"code": 311,
"message": "Service Request Successfully Queried.",
"cause": ""
},
"Response": {
"NumOutputObjects": "1",
"ListOfServiceRequest": {
"ServiceRequest": [
{
"AddressVerified": "Y",
"SRNumber": "1-1080871",
"SRType": "Homeless Encampment",
"CreatedDate": "12/31/2014 13:49:23",
"UpdatedDate": "12/31/2014 13:49:23",
"IntegrationId": "1420033765921",
"Status": "Open",
"CreatedByUserLogin": "User89",
"UpdatedByUserLogin": "User89",
"Anonymous": "N",
"Zipcode": "90026",
"Latitude": "34.064937",
"Longitude": "-118.252968",
"CustomerAccessNumber": "",
"LADWPAccountNo": "",
"NewContactFirstName": "",
"NewContactLastName": "",
"NewContactPhone": "",
"NewContactEmail": "",
"ParentSRNumber": "",
"Priority": "Normal",
"Language": "English",
"ReasonCode": "",
"ServiceDate": "12/31/2014 00:00:00",
"Source": "311",
"Email": "user#email.com",
"FirstName": "User",
"HomePhone": "3123123123",
"LastName": "Pp",
"LoginUser": "",
"ResolutionCode": "",
"SRUnitNumber": "",
"MobilOS": "iOS",
"SRAddress": "1200 W TEMPLE ST, 90026",
"SRAddressName": "",
"SRAreaPlanningCommission": "Central APC",
"SRCommunityPoliceStation": "",
"SRCouncilDistrictMember": "Gilbert Cedillo",
"SRCouncilDistrictNo": "1",
"SRDirection": "W",
"SRNeighborhoodCouncilId": "44",
"SRNeighborhoodCouncilName": "GREATER ECHO PARK ELYSIAN NC",
"SRStreetName": "TEMPLE",
"SRSuffix": "ST",
"SRTBColumn": "E",
"SRTBMapGridPage": "634",
"SRTBRow": "2",
"SRXCoordinate": "6485064",
"SRYCoordinate": "1846114",
"AssignTo": "North Central - 104 - IED",
"Assignee": "Siebel Administrator",
"Owner": "BSS",
"ParentSRStatus": "",
"ParentSRType": "",
"ParentSRLinkDate": "",
"ParentSRLinkUser": "",
"SRAreaPlanningCommissionId": "4",
"SRCommunityPoliceStationAPREC": "RAMPART",
"SRCommunityPoliceStationPREC": "2",
"SRCrossStreet": "",
"ActionTaken": "",
"SRCity": "",
"RescheduleCounter": "",
"SRHouseNumber": "",
"ListOfDataBarricadeRemoval": {},
"ListOfDataBulkyItem": {},
"ListOfDataDeadAnimalRemoval": {},
"ListOfDataGraffitiRemoval": {},
"ListOfDataInformationOnly": {},
"ListOfDataMultipleStreetlightIssue": {},
"ListOfDataSingleStreetlightIssue": {},
"ListOfDataSrPhotoId": {
"DataSrPhotoId": []
},
"ListOfDataBusPadLanding": {},
"ListOfDataCurbRepair": {},
"ListOfDataFlooding": {},
"ListOfDataGeneralStreetInspection": {},
"ListOfDataGuardWarningRailMaintenance": {},
"ListOfDataGutterRepair": {},
"ListOfDataLandMudSlide": {},
"ListOfDataPothole": {},
"ListOfDataResurfacing": {},
"ListOfDataSidewalkRepair": {},
"ListOfDataStreetSweeping": {},
"ListOfDataBeesOrBeehive": {},
"ListOfDataMedianIslandMaintenance": {},
"ListOfDataOvergrownVegetationPlants": {},
"ListOfDataPalmFrondsDown": {},
"ListOfDataStreetTreeInspection": {},
"ListOfDataStreetTreeViolations": {},
"ListOfDataTreeEmergency": {},
"ListOfDataTreeObstruction": {},
"ListOfDataTreePermits": {},
"ListOfDataBrushItemsPickup": {},
"ListOfDataContainers": {},
"ListOfDataElectronicWaste": {},
"ListOfDataIllegalDumpingPickup": {},
"ListOfDataManualPickup": {},
"ListOfDataMetalHouseholdAppliancesPickup": {},
"ListOfDataMoveInMoveOut": {},
"ListOfDataHomelessEncampment": {
"DataHomelessEncampment": [
{
"ApprovedBy": "",
"AssignedTo": "",
"CompletedBy": "",
"Contact": "",
"ContactDate": "",
"Crew": "",
"DateCompleted": "12/31/2014 00:00:00",
"InspectedBy": "",
"InspectionDate": "",
"Location": "Alley",
"Type": "Homeless Encampment",
"LastUpdatedBy": "",
"OptionalTrackingCode": "",
"Name": "a5b5b2b9-d2e7-400a-bf75-1138ff013caa"
}
]
},
"ListOfDataIllegalAutoRepair": {},
"ListOfDataIllegalConstruction": {},
"ListOfDataIllegalConstructionFence": {},
"ListOfDataIllegalDischargeOfWater": {},
"ListOfDataIllegalDumpingInProgress": {},
"ListOfDataIllegalExcavation": {},
"ListOfDataIllegalSignRemoval": {},
"ListOfDataIllegalVending": {},
"ListOfDataLeafBlowerViolation": {},
"ListOfDataNewsRackViolation": {},
"ListOfDataObstructions": {},
"ListOfDataTablesAndChairsObstructing": {},
"ListOfDataGisLayer": {
"DataGisLayer": [
{
"A_Call_No": "",
"Area": "",
"Day": "",
"DirectionSuffix": "",
"DistrictAbbr": "",
"DistrictName": "Central",
"DistrictNumber": "104",
"DistrictOffice": "North Central",
"Fraction": "",
"R_Call_No": "",
"SectionId": "5279800",
"ShortDay": "",
"StreetFrom": "BOYLSTON ST",
"StreetTo": "FIRMIN ST",
"StreetLightId": "",
"StreetLightStatus": "",
"Type": "GIS",
"Y_Call_No": "",
"Name": "41572025-3803-49c4-8561-6e7ef41775df",
"CommunityPlanningArea": "Westlake",
"LastUpdatedBy": "",
"BOSRadioHolderName": ""
}
]
},
"ListOfDataServiceRequestNotes": {
"DataServiceRequestNotes": [
{
"CreatedDate": "12/31/2014 13:49:23",
"Comment": "",
"CreatedByUser": "User89",
"IsSrNoAvailable": "N",
"CommentType": "External",
"Notification": "N",
"FeedbackSRType": "",
"IntegrationId": "1420033765921",
"Date1": "",
"Date2": "",
"Date3": "",
"Text1": "",
"ListOfDataSrNotesAuditTrail": {}
}
]
},
"ListOfDataSubscribeDuplicateSr": {
"DataSubscribeDuplicateSr": [
{
"Activeflag": "Y",
"EmailId": "pratik.desai#yoopmail.com",
"Name": "010420150405",
"Type": "Subscription",
"LastUpdatedBy": ""
}
]
},
"ListOfChildServiceRequest": {},
"ListOfDataBillingCsscAdjustment": {},
"ListOfDataBillingEccAdjustment": {},
"ListOfDataBillingRsscAdjustment": {},
"ListOfDataBillingRsscExemption": {},
"ListOfDataSanitationBillingBif": {},
"ListOfDataSanitationBillingCssc": {},
"ListOfDataSanitationBillingEcc": {},
"ListOfDataSanitationBillingInquiry": {},
"ListOfDataSanitationBillingLifeline": {},
"ListOfDataSanitationBillingRssc": {},
"ListOfDataSanitationBillingSrf": {},
"ListOfDataDocumentLog": {},
"ListOfAuditTrailItem2": {},
"ListOfDataGenericBc": {
"DataGenericBc": [
{
"ATTRIB_08": "",
"NAME": "41572025-3803-49c4-8561-6e7ef41775df",
"PAR_ROW_ID": "1-N607",
"ROW_ID": "1-N60A",
"TYPE": "GIS",
"ListOfDataGenericbcAuditTrail": {}
},
{
"ATTRIB_08": "",
"NAME": "a5b5b2b9-d2e7-400a-bf75-1138ff013caa",
"PAR_ROW_ID": "1-N607",
"ROW_ID": "1-N609",
"TYPE": "Homeless Encampment",
"ListOfDataGenericbcAuditTrail": {}
},
{
"ATTRIB_08": "",
"NAME": "010420150405",
"PAR_ROW_ID": "1-N607",
"ROW_ID": "1-RN2D",
"TYPE": "Subscription",
"ListOfDataGenericbcAuditTrail": {}
}
]
},
"ListOfDataServiceNotComplete": {},
"ListOfDataOther": {},
"ListOfDataWeedAbatementForPrivateParcels": {}
}
]
}
}
}
Query 3 input
{
"MetaData": {},
"RequestSpecificDetail": {
"ParentSRNumberForLink": ""
},
"SRData": {
"SRNumber":"1-5968841",
"Anonymous": "N",
"Assignee": "",
"CreatedByUserLogin": "KAHUNA30DEC",
"CustomerAccessNumber": "",
"LADWPAccountNo": "",
"Language": "English",
"ListOfDataGisLayer": {},
"ListOfDataServiceRequestNotes": {
"DataServiceRequestNotes": [
{
"Comment": "description 1245",
"CommentType": "Feedback",
"CreatedByUser": "KAHUNA30DEC",
"FeedbackSRType": "Illegal Dumping in Progress",
"IsSrNoAvailable": "N"
},
{
"Comment": "comments 123568",
"CommentType": "External",
"CreatedByUser": "",
"IsSrNoAvailable": "N"
}
]
},
"LoginUser": "KAHUNA30DEC",
"MobilOS": "Android",
"NewContactEmail": "",
"NewContactFirstName": "",
"NewContactLastName": "",
"NewContactPhone": "",
"Owner": "Other",
"ParentSRNumber": "",
"Priority": "Normal",
"SRCommunityPoliceStation": "RAMPART",
"UpdatedByUserLogin": "KAHUNA30DEC",
"Status": "Open",
"SRType": "Feedback",
"ServiceDate": "02/11/2015",
"Source": "Mobile App"
}
}
Query 3 output
{
"status": {
"code": 311,
"message": "Service Request Successfully Submited",
"cause": ""
},
"Response": {
"PrimaryRowId": "1-3JXL5",
"ListOfServiceRequest": {
"ServiceRequest": [
{
"SRNumber": "1-5968841"
}
]
}
}
}
Python Script responds with query two output
import json
import jsonpickle
import arcpy
import json
import numpy
import requests
f = open('C:\Users\Administrator\Desktop\myData.json', 'r')
data = jsonpickle.encode( jsonpickle.decode(f.read()) )
url = "https://myDatatest.lacity.org/myDatarouter/srbe/1/QuerySR"
headers = {'Content-type': 'text/plain', 'Accept': '/'}
r = requests.post(url, data=data, headers=headers)
sr = arcpy.SpatialReference(4326)
decoded = json.loads(r.text)
# pretty printing of json-formatted string
print json.dumps(decoded, sort_keys=True, indent=4)
f.close()
decoded is a dictionary containing the data you're looking for.
try:
print decoded
print decoded.keys()
print decoded.items()

Categories

Resources