I'm looking to compute poleward heat fluxes at a level in the atmosphere, i.e the mean of (u't') . I'm aware of the covariance function in NumPy, but cannot seem to implement it. Here is my code below.
from netCDF4 import Dataset
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
myfile = '/home/ubuntu/Fluxes_Test/out.nc'
Import = Dataset(myfile, mode='r')
lon = Import.variables['lon'][:] # Longitude
lat = Import.variables['lat'][:] # Latitude
time = Import.variables['time'][:] # Time
lev = Import.variables['lev'][:] # Level
wind = Import.variables['ua'][:]
temp = Import.variables['ta'][:]
lon = lon-180 # to shift co-ordinates to -180 to 180.
variable1 = np.squeeze(wind,temp, axis=0)
variable2 = np.cov(variable1)
m = Basemap(resolution='l')
lons, lats = np.meshgrid(lon,lat)
X, Y = m(lons, lats)
cs = m.pcolor(X,Y, variable2)
plt.show()
The shape of the variables wind and temp which I am trying to compute the flux of (the covariance) are both (3960,64,128), so 3960 pieces of data on a 64x128 grid (with co-ordinates).
I tried squeezing both variables to produce a array of (3960, 3960, 64,128) so cov could work on these first two series of data (the two 3960's) of wind and temp, but this didn't work.
Related
I have a 3D numpy array of temperature values on a grid. From this I can compute the gradients using dTdx, dTdy, dYdz = np.gradient(T). Now I'm only interested in the values of the gradients on the isosurface where the temperature is 900. What I want to do is something like (pseudo-codish):
import nympy as np
def regular(x,y,z,q=100,k=175,a=7.1e-5):
R = np.sqrt(x**2+y**2+z**2)
return 100 / (2*np.pi*k) * (1/R) * np.exp(-0.5/a*(R+x))
x = np.arange(-1.5,0.5+res/2,res)*1e-3
y = np.arange(-1.0,1.0+res/2,res)*1e-3
z = np.arange(0.0,0.5+res/2,res)*1e-3
Y,X,Z = np.meshgrid(y,x,z)
T = regular(X,Y,Z)
dTdx, dTdy, dYdz = np.gradient(T)
(xind,yind,zind) = <package>.get_contour_indices(X,Y,Z,T,value=900)
x_gradients_at_isosurface = dTdx[xind,yind,zind]
...
I've tried:
import numpy as np
from skimage import measure
contour_data = measure.find_contours(T[:,:,0],900)
contour_data = np.int_(np.round(contour_data[0]))
xs,ys = contour_data[:,0],contour_data[:,1]
gradients_of_interest = np.array([G[x,y,0] for x,y in zip( xs,ys )])
which works fine, but only works for 2D data. I'm looking for the 3D equivalent. I've found the following:
import plotly.graph_objects as go
surf = go.Isosurface(x=X.flatten(),y=Y.flatten(),z=Z.flatten(),value=T.flatten(),isomin=900,isomax=900)
fig = go.Figure(data=surf)
plt.show()
But I'm not interested in plotting it. I want to know the indices where the temperature is T=900 so I can use it on the gradients. Any ideas?
You need skimage.measure.marching_cubes.
Using NASA's SRTM data, I've generated a global elevation heatmap.
The problem is, however, the continents tend to blend in with the ocean because of the range of elevation values. Is it possible to change the colorbar's scale so that the edges of the continents are more distinct from the ocean? I've tried different cmaps, but they all seem to suffer from the problem.
Here is my code. I'm initializing a giant array (with 0s) to hold global elevation data, and then populating it file by file from the SRTM dataset. Each file is 1 degree latitude by 1 degree longitude.
Another question I had was regarding the map itself. For some reason, the Appalachian Mountains seem to have disappeared entirely.
import os
import numpy as np
from .srtm_map import MapGenerator
from ..utils.hgt_parser import HGTParser
from tqdm import tqdm
import cv2
import matplotlib.pyplot as plt
import richdem as rd
class GlobalMapGenerator():
def __init__(self):
self.gen = MapGenerator()
self.base_dir = "data/elevation/"
self.hgt_files = os.listdir(self.base_dir)
self.global_elevation_data = None
def shrink(data, rows, cols):
return data.reshape(rows, data.shape[0]/rows, cols, data.shape[1]/cols).sum(axis=1).sum(axis=2)
def GenerateGlobalElevationMap(self, stride):
res = 1201//stride
max_N = 59
max_W = 180
max_S = 56
max_E = 179
# N59 --> N00
# S01 --> S56
# E000 --> E179
# W180 --> W001
# Initialize array global elevation
self.global_elevation_data = np.zeros(( res*(max_S+max_N+1), res*(max_E+max_W+1) ))
print("Output Image Shape:", self.global_elevation_data.shape)
for hgt_file in tqdm(self.hgt_files):
lat_letter = hgt_file[0]
lon_letter = hgt_file[3]
lat = int(hgt_file[1:3])
lon = int(hgt_file[4:7])
if lat_letter == "S":
# Shift south down by max_N, but south starts at S01 so we translate up by 1 too
lat_trans = max_N + lat - 1
else:
# Bigger N lat means further up. E.g. N59 is at index 0 and is higher than N00
lat_trans = max_N - lat
if lon_letter == "E":
# Shift east right by max_W
lon_trans = max_W + lon
else:
# Bigger W lon means further left. E.g. W180 is at index 0 and is more left than W001
lon_trans = max_W - lon
# load in data from file as resized
data = cv2.resize(HGTParser(os.path.join(self.base_dir, hgt_file)), (res, res))
# generate bounds (x/y --> lon.lat for data from this file for the giant array)
lat_bounds = [res*lat_trans, res*(lat_trans+1)]
lon_bounds = [res*lon_trans, res*(lon_trans+1)]
try:
self.global_elevation_data[ lat_bounds[0]:lat_bounds[1], lon_bounds[0]:lon_bounds[1] ] = data
except:
print("REFERENCE ERROR: " + hgt_file)
print("lat: ", lat_bounds)
print("lon: ", lon_bounds)
# generate figure
plt.figure(figsize=(20,20))
plt.imshow(self.global_elevation_data, cmap="rainbow")
plt.title("Global Elevation Heatmap")
plt.colorbar()
plt.show()
np.save("figures/GlobalElevationMap.npy", self.global_elevation_data)
plt.savefig("figures/GlobalElevationMap.png")
def GenerateGlobalSlopeMap(self, stride):
pass
Use a TwoSlopeNorm (docs) for your norm, like the example here.
From the example:
Sometimes we want to have a different colormap on either side of a conceptual center point, and we want those two colormaps to have different linear scales. An example is a topographic map where the land and ocean have a center at zero, but land typically has a greater elevation range than the water has depth range, and they are often represented by a different colormap.
If you set the midpoint at sea level (0), then you can have two very different scalings based on ocean elevation vs land elevation.
Example code (taken from the example linked above):
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import matplotlib.cbook as cbook
from matplotlib import cm
dem = cbook.get_sample_data('topobathy.npz', np_load=True)
topo = dem['topo']
longitude = dem['longitude']
latitude = dem['latitude']
fig, ax = plt.subplots()
# make a colormap that has land and ocean clearly delineated and of the
# same length (256 + 256)
colors_undersea = plt.cm.terrain(np.linspace(0, 0.17, 256))
colors_land = plt.cm.terrain(np.linspace(0.25, 1, 256))
all_colors = np.vstack((colors_undersea, colors_land))
terrain_map = colors.LinearSegmentedColormap.from_list(
'terrain_map', all_colors)
# make the norm: Note the center is offset so that the land has more
# dynamic range:
divnorm = colors.TwoSlopeNorm(vmin=-500., vcenter=0, vmax=4000)
pcm = ax.pcolormesh(longitude, latitude, topo, rasterized=True, norm=divnorm,
cmap=terrain_map, shading='auto')
# Simple geographic plot, set aspect ratio beecause distance between lines of
# longitude depends on latitude.
ax.set_aspect(1 / np.cos(np.deg2rad(49)))
ax.set_title('TwoSlopeNorm(x)')
cb = fig.colorbar(pcm, shrink=0.6)
cb.set_ticks([-500, 0, 1000, 2000, 3000, 4000])
plt.show()
See how it scales numbers with this simple usage (from docs):
>>> import matplotlib. Colors as mcolors
>>> offset = mcolors.TwoSlopeNorm(vmin=-4000., vcenter=0., vmax=10000)
>>> data = [-4000., -2000., 0., 2500., 5000., 7500., 10000.]
>>> offset(data)
array([0., 0.25, 0.5, 0.625, 0.75, 0.875, 1.0])
How can I increase the resolution of netfdf data for feeding to CNN in python?
Is there any function in xarray to do the same?
Thanks in advance.
You can use xarray interpolation to achieve what you want. You have to define the new latitude and longitude you'd like to interpolate to and pass it to the xarray interp function. See example below
import xarray as xr
import numpy as np
import matplotlib.pyplot as plt
# Create sample data
dx = 0.25
lon = np.arange(0, 360, dx)
lat = np.arange(-90, 90+dx, dx)
data = 10 * np.random.rand(len(lat), len(lon))
data_set = xr.Dataset({"temp": (["lat", "lon"], data)},
coords={"lon": lon,"lat": lat})
# Just checking the datasets are not empty
print(data_set)
# Create new lat and lon
dx_new = 0.125
newlon = np.arange(0, 360, dx_new)
newlat = np.arange(-90, 90+dx_new, dx_new)
# Interpolate
data_set_interp = data_set.interp(lat=newlat, lon=newlon)
# Check output
print(data_set_interp)
I'm currently trying to get Tropomi data in geoTiff format. I downloaded some data in netCDF4 format. This way I obtain three numpy arrays. one with latitude coordinates, one with longitude coordinates and one with carbon-mono-oxide values.
So I have a matrix with values for my raster and of each value I know the longitude and latitude of that respective value.
With this information how can I construct a georeferenced raster?
I read in the data as follows
import netCDF4
from netCDF4 import Dataset
import numpy as np
file = '/home/daniel/Downloads/S5P_NRTI_L2__CO_____20190430T171319_20190430T171819_08006_01_010301_20190430T175151.nc'
rootgrp = Dataset(file, "r",format="NETCDF4")
lat = rootgrp.groups['PRODUCT']['latitude'][:]
lon = rootgrp.groups['PRODUCT']['longitude'][:]
carbon = rootgrp.groups['PRODUCT']['carbonmonoxide_total_column'][:]
obtaining 3 matrices with shape (1,290,215)
Now I would like to convert this to a Mercator projected geoTIFF, but I do not know how to go about it.
the gdal_translate option seems to work. But here is an alternative explicit way I did it.
#importing packages
import numpy as np
from scipy import interpolate
from netCDF4 import Dataset
from shapely.geometry import Point
import geopandas as gpd
from geopy.distance import geodesic
import rasterio
import matplotlib.pyplot as plt
#load data
file = '/home/daniel/Ellipsis/db/downloaded/rawtropomi/S5P_NRTI_L2__CO_____20190430T171319_20190430T171819_08006_01_010301_20190430T175151.nc'
rootgrp = Dataset(file, "r",format="NETCDF4")
lat = rootgrp.groups['PRODUCT']['latitude'][:]
lon = rootgrp.groups['PRODUCT']['longitude'][:]
carbon = rootgrp.groups['PRODUCT']['carbonmonoxide_total_column'][:]
carbon = carbon.filled(0)
lat = lat.filled(-1000)
lon = lon.filled(-1000)
carbon = carbon.flatten()
lat = lat.flatten()
lon = lon.flatten()
#calculate the real distance between corners and get the widht and height in pixels assuming you want a pixel resolution of at least 7 by 7 kilometers
w = max(geodesic((min(lat),max(lon)), (min(lat),min(lon))).meters/7000 , geodesic((max(lat),max(lon)), (max(lat),min(lon))).meters/14000)
h = geodesic((min(lat),max(lon)), (max(lat),max(lon))).meters/14000
# create a geopandas with as its rows the latitude, longitude an the measrument values. transfrom it to the webmercator projection (or projection of your choosing)
points = [Point(xy) for xy in zip(lon, lat)]
crs = {'init': 'epsg:4326'}
data = gpd.GeoDataFrame({'value':carbon}, crs=crs, geometry=points)
data = data.to_crs({'init': 'epsg:3395'})
data['lon'] = data.bounds['maxx'].values
data['lat'] = data.bounds['maxy'].values
#make grid of coordinates. You nee de calculate the coordinate of each pixel in the desired raster
minlon = min(data['lon'])
maxlon = max(data['lon'])
minlat = min(data['lat'])
maxlat = max(data['lat'])
lon_list = np.arange(minlon, maxlon, (maxlon-minlon)/w )
lat_list = np.arange(minlat, maxlat, (maxlat-minlat)/h)
lon_2d, lat_2d = np.meshgrid(lon_list, lat_list)
#use the values in the geopandas dataframe to interpolate values int the coordinate raster
r = interpolate.griddata(points = (data['lon'].values,data['lat'].values), values = data['value'].values, xi = (lon_2d, lat_2d))
r = np.flip(r, axis = 0)
#check result
plt.imshow(r)
#save raster
transform = rasterio.transform.from_bounds(south = minlat, east = maxlon, north = maxlat, west = minlon, width = r.shape[1], height = r.shape[2] )
file_out = 'test.tiff'
new_dataset = rasterio.open(file_out , 'w', driver='Gtiff', compress='lzw',
height = r.shape[1], width = r.shape[2],
count= r.shape[0], dtype=str( r.dtype),
crs= data.crs,
transform= transform)
new_dataset.write(r)
new_dataset.close()
I would suggest looking at this answer here using gdal_translate:
Convert NetCDF (.nc) to GEOTIFF
gdal_translate -of GTiff file.nc test.tiff
I'd like to map a distribution of values of an irregular grid on a regular one.
I'm trying with the different interpolators but it looks I'm not able to do it.
Here there is the code I've written:
import numpy as np
from scipy import interpolate
import matplotlib.pyplot as plt
N = 100
M = 10
lat = ((np.random.rand(M,N))*2)+0.2
lon = ((np.random.rand(M,N))*3)+0.2
theta = ((np.random.rand(M,N))*180)
lat_min = np.min(lat)
lat_max = np.max(lat)
lon_min = np.min(lon)
lon_max = np.max(lon)
dlat = 0.1 # regular step for the lat[rad]
dlon = 0.1 # regular step for the lon[rad]
# Grid dimensions
Nlat = np.int(np.abs(lat_max-lat_min)/dlat)+1
Nlon = np.int(np.abs(lon_max-lon_min)/dlon)+1
# Lat-Lon vector
reg_lat = np.linspace(lat_min, lat_max, Nlat) # regularly spaced latitude vector
reg_lon = np.linspace(lon_min, lon_max, Nlon) # regularly spaced longitude vector
# Lat-Lon regular Grid
reg_lon_mesh, reg_lat_mesh = np.meshgrid(reg_lon, reg_lat)
I've used:
theta2 = scipy.interpolate.griddata((lon.ravel(), lat.ravel()), theta.ravel(),(reg_lon_mesh, reg_lat_mesh), method='cubic')
but the interpolation seems wrong
and
f = interpolate.interp2d(lon.ravel(), lat.ravel(), theta,kind='cubic')
and it rises the warning: A theoretically impossible results when finding a smoothin spline
with fp = s. Probably causes: s too small or badly chosen eps.
(abs(fp-s)/s>0.001)
kx,ky=3,3 nx,ny=36,34 m=1000 fp=14832451.907306 s=0.000000