I am trying to get the co-ordinates and distance but getting inconsistencies from Bing Maps API. Is there something I'm doing wrong or do the responses really vary that much?
Issue: The co-ordinates that I get in the JSON response are different for the same starting address.
My input is an excel file with the starting and delivery addresses like so
df = df[['Starting Address', 'Delivery Address', 'Driving Distance (Miles)', 'Radial Distance (Miles)']]
df = df.reset_index()
for index, row in df.iterrows():
#Encoding Addresses
enc_start = urllib.parse.quote(df['Starting Address'][index], safe='')
print(enc_start)
enc_del = urllib.parse.quote(df['Delivery Address'][index], safe='')
print(enc_del)
#API Call
url = "http://dev.virtualearth.net/REST/V1/Routes/Driving?wp.0=" + enc_start + "&wp.1=" + enc_del + "&distanceUnit=mi" + "&optmz=distance" + "&key=" + bingMapsKey
response = requests.get(url).json() #Setting API response (JSON)
try:
s_lat = response["resourceSets"][0]["resources"][0]["bbox"][2] #Fetching Starting Address Latitude
s_lon = response["resourceSets"][0]["resources"][0]["bbox"][3] #Fetching Starting Address Longitude
d_lat = response["resourceSets"][0]["resources"][0]["bbox"][0] #Fetching Delivery Address Latitude
d_lon = response["resourceSets"][0]["resources"][0]["bbox"][1] #Fetching Delivery Address Longitude
coords_start = (s_lat, s_lon) #Coords of Starting Address
coords_del = (d_lat, d_lon) #Coords of Delivery Address
print("c_str ", coords_start)
print("c_del ", coords_del)
#Optimized Travel Distance
travelDistance = response["resourceSets"][0]["resources"][0]["travelDistance"]
print("travel dist ", travelDistance)
df.loc[[index], 'Driving Distance (Miles)'] = travelDistance
# folium.Marker(location=[d_lat, d_lon], popup = df["Delivery Address"][index]).add_to(mc) #Creating Output Map
except:
IndexError
#Radial Distance
radialDistance = haversine(coords_start, coords_del, unit=Unit.MILES)
# # print(radialDistance)
df.loc[[index], 'Radial Distance (Miles)'] = radialDistance
Output:
You are grabbing the bounding box coordinates rather than at actual starting location. The bounding box is just a recommendation for viewing the full route and would change if both the start and end are not identical between queries.
To get the actual start/end points;
s_lat = response["resourceSets"][0]["resources"][0]["routeLegs"][0]["actualStart"]["coordinates"][0]
s_lon = response["resourceSets"][0]["resources"][0]["routeLegs"][0]["actualStart"]["coordinates"][1]
d_lat = response["resourceSets"][0]["resources"][0]["routeLegs"][0]["actualEnd"]["coordinates"][0]
d_lon = response["resourceSets"][0]["resources"][0]["routeLegs"][0]["actualEnd"]["coordinates"][1]
Related
I am using OpenTripPlanner in python to calculate travel times via transit for from a set of ~70 origin points to a set of ~600 destination points. When I calculate the travel times for each individual set of points, I get one set of times, but the process is very slow (about 5 hours). When I use a surface to find the times, it is much faster (3 minutes), but the times seem much less accurate (documentationhere:). Many of the destinations show the same time despite having different origins. I am using a graph with GTFS data from all Bay Area agencies. My two functions are here:
# function to return the itineraries point to point
def return_itineraries(param):
ox = param[0]
oy = param[1]
dx = param[2]
dy = param[3]
date_us = param[4]
hr = param[5]
minute = param[6]
# parameters
options = {
'fromPlace': str(oy) + ", " + str(ox),
'toPlace': str(dy) + ", " + str(dx),
'time': str(hr)+':' + str(minute),
'date': date_us,
'mode': 'TRANSIT,WALK',
'maxWalkDistance':5000,
'clampInitialWait':0,
'wheelchair':False,
#'batch': True,
'numItineraries': 1
}
# send to server and get data
response = requests.get(
"http://localhost:8080/otp/routers/default/plan",
params = options
)
# return as json
data = json.loads(response.text)
return data
# one-to-many function
def return_itineraries_otm(param):
ox = param[0]
oy = param[1]
date_us = param[2]
hr = param[3]
minute = param[4]
# parameters
surface_options = {
'fromPlace': str(oy) + ", " + str(ox),
'time': str(hr)+':' + str(minute),
'date': date_us,
'mode': 'TRANSIT,WALK',
'maxWalkDistance':5000,
'clampInitialWait':0,
'wheelchair':False,
'batch': True,
}
#create surface
surf_resp = requests.post(
"http://localhost:8080/otp/surfaces",
params = surface_options)
surf_data = json.loads(surf_resp.text)
# parameters
options = {
'targets': 'destinations',
'detail':'true'
}
# send to server and get data
response = requests.get(
"http://localhost:8080/otp/surfaces/" + str(surf_data['id']) + "/indicator",
params = options)
# return as json
data = json.loads(response.text)
return data
Is there a way to improve the accuracy of the one-to-many function so that the results are closer to the point-to-point calculation?
Thanks!
I have a google earth engine javascript code to detect water pixel in the closest date SAR imagery. Link of the code: https://code.earthengine.google.com/0a35eea49123a5390b822bac7afc1b0c. I can run the code in GEE and returning exactly what I required (1 if the location is above water and 0 when it above land).
I have tried to develop the following code which is returning the dictionary, not the single expected output.
import ee, datetime
ee.Initialize()
waterThresh = -16;
angle_threshold_1 = ee.Number(45.4);
angle_threshold_2 = ee.Number(31.66);
class AltimeterExtraction(object):
def __init__(self, locationfile = './Bin/Location_Data.txt'):
filecontent = open(locationfile, 'r')
content = csv.DictReader(filecontent, delimiter='\t')
def watertestpoint(self, lat=10.55587,lon=89.89789, date1='2019-04-05'):
lat = ee.Number(lat)
lon = ee.Number(lon)
datep = datetime.datetime.strptime(date1, "%Y-%m-%d")
date2 = datep + datetime.timedelta(days = -10)
point = ee.Geometry.Point([lon,lat])
S1 = ee.ImageCollection('COPERNICUS/S1_GRD').filterBounds(point).filterDate(date2, date1)
S1 = S1.map(self.maskByAngle)
S1 = S1.select('VV').median().rename('VV')
S1 = S1.focal_median(50,'circle','meters').rename('VV')
WaterMask = S1.lt(waterThresh)
flag = WaterMask.reduceRegion(**{
'reducer': ee.Reducer.mean(),
'geometry': point,
'scale': 10
});
return flag.get('VV')
def maskByAngle(self, img):
I = ee.Image(img)
angle = I.select('angle')
mask1 = angle.lt(angle_threshold_1)
mask2 = angle.gt(angle_threshold_2)
I = I.updateMask(mask1)
return I.updateMask(mask2)
P = AltimeterExtraction()
x= P.watertestpoint()
print x
Are there any ways to get the single value instead of the dictionary from python? I need the binary output (0 or 1) from the function.
You should add .getInfo() while printing to get the actual value from that point. Earth Engine process all of the data on the server side so you have to explicitly call .getInfo() to return the information.
Here is an example I used:
P = AltimeterExtraction()
x= P.watertestpoint(lat=20.5564,lon=94.818,date1='2019-03-30')
print(x.getInfo())
Trying to use the latitude and longitude that is returned by geopy to create a shapefile. The shapefile creator part works line if I give it a set of numbers (44.977753, -93.2650108) but it will not work with the returned data lat_long. My thought is that it needs a "," but I dont know.
from geopy.geocoders import GoogleV3
import csv
import ogr, os
def geopy():
loc = raw_input("What location? ")
geolocator = GoogleV3()
location = geolocator.geocode(loc, exactly_one=True)
if location != None:
Address = location.address
lat_long = location.latitude, location.longitude
latitude = str(location.latitude)
longitude = str(location.longitude)
print Address, latitude, longitude
print""
else:
print "There is no geographic information to return for the word in input. \n"
# Input data
pointCoord = lat_long
fieldName = 'test'
fieldType = ogr.OFTString
fieldValue = 'test'
outSHPfn = "output file"
# create the spatial reference, WGS84
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
# Create the output shapefile
shpDriver = ogr.GetDriverByName("ESRI Shapefile")
if os.path.exists(outSHPfn):
shpDriver.DeleteDataSource(outSHPfn)
outDataSource = shpDriver.CreateDataSource(outSHPfn)
outLayer = outDataSource.CreateLayer(outSHPfn, srs, geom_type = ogr.wkbPoint )
#create point geometry
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(pointCoord[0],pointCoord[1])
# create a field
idField = ogr.FieldDefn(fieldName, fieldType)
outLayer.CreateField(idField)
# Create the feature and set values
featureDefn = outLayer.GetLayerDefn()
outFeature = ogr.Feature(featureDefn)
outFeature.SetGeometry(point)
outFeature.SetField(fieldName, fieldValue)
outLayer.CreateFeature(outFeature)
geopy()
Need to add a loop to put the latitude and longitude in a list. This code will create a point shapefile of any location you give it.
from geopy.geocoders import GoogleV3
import csv
import ogr, os
def geopy(location):
"""This function takes the word given about
and uses GoogleV3 to search for a location. If a
location is found it then returns the Address, latitude and longitude.
It then prints that information to a .CSV"""
geolocator = GoogleV3()
loc_input = raw_input("Add the location you would like data back for: ")
location = geolocator.geocode(loc_input, exactly_one=True)
if location != None:
Address = location.address
lat_lon = location.latitude, location.longitude
latitude = str(location.latitude)
longitude = str(location.longitude)
print Address, latitude, longitude
print""
#Converts lat_long to a list for use in making the shapefile.
list_lat = []
for i in range(1):
list_lat.append(lat_lon)
for list_of_lat_lon in list_lat:
print""
#Calls list_of_lat_lon for the shapefile function
shapefile(list_of_lat_lon)
# If there is no location data to return it prints the below line and does not create a shapefile
else:
print "There is no geographic information to return for the word in input. \n"
def shapefile(list_of_lat_lon):
"""This function uses the GDAL to return a ESRi shapefile
it uses the latitude and longitude in the list_of_lat_lon list.
"""
# Input data
pointCoord = list_of_lat_lon
fieldName = 'Lat'
fieldType = ogr.OFTString
fieldValue = 'test'
outSHPfn = "Input file location"
# create the spatial reference, WGS84
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
# Create the output shapefile
shpDriver = ogr.GetDriverByName("ESRI Shapefile")
if os.path.exists(outSHPfn):
shpDriver.DeleteDataSource(outSHPfn)
outDataSource = shpDriver.CreateDataSource(outSHPfn)
outLayer = outDataSource.CreateLayer(outSHPfn, srs, geom_type = ogr.wkbPoint )
#create point geometry longitude first then latitude
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(pointCoord[1],pointCoord[0])
# create a field
idField = ogr.FieldDefn(fieldName, fieldType)
outLayer.CreateField(idField)
# Create the feature and set values
featureDefn = outLayer.GetLayerDefn()
outFeature = ogr.Feature(featureDefn)
outFeature.SetGeometry(point)
outFeature.SetField(fieldName, fieldValue)
outLayer.CreateFeature(outFeature)
geopy(location)
In the answer to this question R - plotly - combine bubble and chorpleth map, it was described nicely how to combine bubble and choropleth map within one map in plotly. I would like to replicate exactly the same example in Python, however I did not manage based on the information provided in the plotly documentation (https://plot.ly/python/reference/).
The equivalent R code reads as follows:
lon = c(-73.9865812, -118.2427266, -87.6244212, -95.3676974)
pop = c(8287238, 3826423, 2705627, 2129784)
df_cities = data.frame(cities, lat, lon, pop)
state_codes = c("NY", "CA", "IL", "TX")
pop = c(19746227.0, 38802500.0, 12880580.0, 26956958.0)
df_states = data.frame(state_codes, pop)
plot_ly(df_cities, lon=lon, lat=lat,
text=paste0(df_cities$cities,'<br>Population: ', df_cities$pop),
marker= list(size = sqrt(pop/10000) + 1), type="scattergeo",
filename="stackoverflow/choropleth+scattergeo") %>%
add_trace(z=df_states$pop,
locations=df_states$state_codes,
text=paste0(df_states$state_codes, '<br>Population: ', df_states$pop),
type="choropleth",
colors = 'Purples',
locationmode="USA-states") %>%
layout(geo = list(scope="usa"))
How can this be implemented in Python?
Simply append .py to the graph's URL (e.g. https://plot.ly/~RPlotBot/1735.py) to see corresponding python code.
I found that I had to adjust r the code just a little to get it to work:
cities = c('New York', 'Los Angeles', 'Chicago', 'Houston')
lat = c(40.7305991, 34.053717, 41.8755546, 29.7589382)
lon = c(-73.9865812, -118.2427266, -87.6244212, -95.3676974)
pop = c(8287238, 3826423, 2705627, 2129784)
df_cities = data.frame(cities, lat, lon, pop)
state_codes = c("NY", "CA", "IL", "TX")
pop = c(19746227.0, 38802500.0, 12880580.0, 26956958.0)
df_states = data.frame(state_codes, pop)
plot_ly(df_cities, lon=lon, lat=lat,
text = paste0(df_cities$cities,'<br>Population: ', df_cities$pop),
marker = list(size = sqrt(pop/10000) + 1), type="scattergeo",
filename = "stackoverflow/choropleth+scattergeo") %>%
add_trace(z=df_states$pop,
locations = df_states$state_codes,
text = paste0(df_states$state_codes, '<br>Population: ', df_states$pop),
type = "choropleth",
colors = 'Purples',
locationmode = "USA-states") %>%
layout(geo = list(scope="usa"))
The purpose of the code is to make a PDF map book that displays all of the large lakes in North America. I'm trying to run this code to make a map book but it gives me a blank PDF. How can I fix this?
## Import arcpy module
import arcpy
import math
import os
from arcpy import env
arcpy.env.overwriteOutput = True
# Define inputs and outputs - Script arguments
arcpy.env.workspace = r"F:\Geog173\Lab7\Lab7_Data"
Lakes = "NA_Big_Lakes.shp"
Cities = "NA_Cities.shp"
NA = "North_America.shp"
##Python arguments
## Arguments = NA_Big_Lakes.shp NA_Cities.shp New_Lakes.shp Center_Lakes.shp
Lakes= 'NA_Big_Lakes.shp'
NA = 'North_America.shp'
Cities = 'NA_Cities.shp'
##New_Lakes = 'New_Lakes.shp'
##Center_Lakes = 'Center_Lakes.shp'
# Identify the geometry field
desc = arcpy.Describe(Lakes)
shapeName = desc.ShapeFieldName
# Identify the geometry field in Cities shapefile
##desc = arcpy.Describe(Cities)
##shapefieldnameCity = desc.ShapeFieldName
#Get lake cursor
inrows = arcpy.SearchCursor(Lakes)
# Set up variables for output path and PDF file name
outDir = r"F:\Geog173\Lab7\Lab7_Data"
finalMapPDF_filename = outDir + r"\NA_Big_Lake_Mapbook.pdf"
# Check whether the mapbook PDF exists. If it does, delete it.
if os.path.exists(finalMapPDF_filename):
os.remove(finalMapPDF_filename)
# Create map book PDF
finalMapPDF = arcpy.mapping.PDFDocumentCreate(finalMapPDF_filename)
# Create MapDocument object pointing to specified mxd
mxd = arcpy.mapping.MapDocument(outDir + r"\OriginalMap.mxd")
# Get dataframe
df = arcpy.mapping.ListDataFrames(mxd)[0]
# ----------------------------------------------------------------------------#
# Start appending pages. Title page first.
# ----------------------------------------------------------------------------#
# Find text element with value "test", and replace it with other value
mapText = "A Map Book for North American Large Lakes " + '\n\r' + "Kishore, A., Geog173, Geography, UCLA" + '\n\r' + " Lake number: 18" + '\n\r' + " Total area: 362117 km2" + '\n\r' + " Mean area: 20118 km2"
print mapText
for elm in arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT"):
if elm.text == "test":
elm.text = mapText
arcpy.RefreshTOC()
arcpy.RefreshActiveView()
#df.extent = feature.extent
arcpy.mapping.ExportToPDF(mxd, outDir + r"\TempMapPages.pdf")
# Append multi-page PDF to finalMapPDF
finalMapPDF.appendPages(outDir + r"\TempMapPages.pdf")
#initialize text value, so it can be reused in next iteration
for elm in arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT"):
if elm.text == mapText:
elm.text = "test"
# ----------------------------------------------------------------------------#
# Loop through each lake
# ----------------------------------------------------------------------------#
# Loop through each row/feature
lakecount = 0
for row in inrows:
lakecount = lakecount + 1
CITY_NAME = ""
CNTRY_NAME = ""
ADMIN_NAME = ""
POP_CLASS = ""
DISTANCE = 0
XY = ""
#print "shapeName" , shapeName
# Create the geometry object
feature = row.getValue(shapeName)
mapText = "Lake FID: " + str(row.FID) + ", Area (km2): " + str(row.Area_km2)
print mapText
# Find text element with value "test", and replace it with other value
for elm in arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT"):
if elm.text == "test":
elm.text = mapText
arcpy.RefreshTOC()
arcpy.RefreshActiveView()
df.extent = feature.extent
arcpy.mapping.ExportToPDF(mxd, outDir + r"\TempMapPages.pdf")
# Append multi-page PDF to finalMapPDF
finalMapPDF.appendPages(outDir + r"\TempMapPages.pdf")
# Set up properties for Adobe Reader and save PDF.
finalMapPDF.updateDocProperties(pdf_open_view = "USE_THUMBS",
pdf_layout = "SINGLE_PAGE")
finalMapPDF.saveAndClose()
# Done. Clean up and let user know the process has finished.
del row, inrows
del mxd, finalMapPDF
print "Map book for lakes in North America is complete!"
First off you should remove the last lines of your code where you delete the mxd. Run the code again and inspect the MXD. Are the data layers drawing properly? I recommend having code that completely works before performing file cleanup so you can identify potential errors.