From 25267af7a23c1a395bdbac7fac0d8d37f8eeea0a Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Tue, 11 May 2021 00:01:18 -0600 Subject: [PATCH 01/33] Add option to write points (or not) from BoundaryCoupling.write() --- mmctools/coupling/sowfa.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/mmctools/coupling/sowfa.py b/mmctools/coupling/sowfa.py index 87b0bba..094feab 100755 --- a/mmctools/coupling/sowfa.py +++ b/mmctools/coupling/sowfa.py @@ -394,7 +394,7 @@ def _check_xarray_dataset(self, print('Input is an {:s}-boundary at {:g}'.format(constdim, self.ds.coords[constdim].values)) - def write(self, fields, binary=False, gzip=False): + def write(self, fields, points=True, binary=False, gzip=False): """ Write surface boundary conditions to SOWFA-readable input files for the solver in constant/boundaryData @@ -408,9 +408,13 @@ def write(self, fields, binary=False, gzip=False): field name, values corresponding to dataset data variables; values may be a single variable (scalar) or a list/tuple of variables (vector) + points : bool, optional + Write out points definition for this patch binary : bool, optional Write out actual data (coordinates, scalars, vectors) in binary for faster I/O + gzip : bool, optional + Write out compressed data to save disk space """ # check output options if binary and gzip: @@ -426,7 +430,8 @@ def write(self, fields, binary=False, gzip=False): self.bndry_dims = [dim for dim in ['x','y','height'] if dim in dims] assert (len(self.bndry_dims) == 2) # write out patch/points - self._write_points(binary=binary, gzip=gzip) + if points: + self._write_points(binary=binary, gzip=gzip) # write out patch/*/field for fieldname,dvars in fields.items(): if isinstance(dvars, (list,tuple)): From fe38455144acb8ead0e2123f6d928d1434af17ed Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Tue, 11 May 2021 01:48:56 -0600 Subject: [PATCH 02/33] Load tslist data with dimension coordinate 'height' --- mmctools/wrf/ts.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/mmctools/wrf/ts.py b/mmctools/wrf/ts.py index 289fb9f..e9e1071 100644 --- a/mmctools/wrf/ts.py +++ b/mmctools/wrf/ts.py @@ -188,7 +188,8 @@ def _read_towers(self): agl=True, verbose=self.verbose ) - self.ds = self.ds.swap_dims({'nz':'height'}) # to facilitate ds.interp() + self.ds = self.ds.swap_dims({'nz':'z'}) # dimension coordinate to enable ds.interp() + self.ds = self.ds.rename({'z':'height'}) if self.verbose: print('... done reading ts outputs') @@ -281,9 +282,6 @@ def _create_dataset_from_list(self,i,j,k,allpts,dslist): elif (j is not None): mydim = 'y' idx = j - elif (k is not None): - mydim = 'height' - idx = k if ((i is not None) or (j is not None)) and allpts: # if allpts, interpolate side boundary profiles to exact domain heights ds = ds.interp(height=self.domain.z) From b138918b6f2541cda83c0ecf23fcd21ea31c6f82 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Tue, 11 May 2021 01:52:27 -0600 Subject: [PATCH 03/33] Drop x,y coord in returned ds from interp_to_latlon These coordinates are not interpolated and meaningless in this context --- mmctools/wrf/ts.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/mmctools/wrf/ts.py b/mmctools/wrf/ts.py index e9e1071..25b97de 100644 --- a/mmctools/wrf/ts.py +++ b/mmctools/wrf/ts.py @@ -230,18 +230,17 @@ def interp_to_latlon(self,latlon): assert (tgtlat >= wrflat[i,j]) and (tgtlat < wrflat[i,j+1]) assert (tgtlon >= wrflon[i,j]) and (tgtlon < wrflon[i+1,j]) # bilinear interpolation - f00 = self.ds.sel(nx=i ,ny=j) - f10 = self.ds.sel(nx=i+1,ny=j) - f01 = self.ds.sel(nx=i ,ny=j+1) - f11 = self.ds.sel(nx=i+1,ny=j+1) - finterp = f00 * (wrflon[i+1,j] - tgtlon ) * (wrflat[i,j+1] - tgtlat ) + \ - f10 * (tgtlon - wrflon[i,j]) * (wrflat[i,j+1] - tgtlat ) + \ - f01 * (wrflon[i+1,j] - tgtlon ) * (tgtlat - wrflat[i,j]) + \ - f11 * (tgtlon - wrflon[i,j]) * (tgtlat - wrflat[i,j]) + f00 = self.ds.sel(nx=i ,ny=j).drop_vars(['x','y']) + f10 = self.ds.sel(nx=i+1,ny=j).drop_vars(['x','y']) + f01 = self.ds.sel(nx=i ,ny=j+1).drop_vars(['x','y']) + f11 = self.ds.sel(nx=i+1,ny=j+1).drop_vars(['x','y']) + finterp = f00 * (wrflon[i+1,j] - tgtlon ) * (wrflat[i,j+1] - tgtlat ) \ + + f10 * (tgtlon - wrflon[i,j]) * (wrflat[i,j+1] - tgtlat ) \ + + f01 * (wrflon[i+1,j] - tgtlon ) * (tgtlat - wrflat[i,j]) \ + + f11 * (tgtlon - wrflon[i,j]) * (tgtlat - wrflat[i,j]) finterp = finterp / ((wrflon[i+1,j] - wrflon[i,j]) * (wrflat[i,j+1] - wrflat[i,j])) - # note: y and z coordinates don't get interpolated finterp = finterp.assign_coords({'lon':tgtlon,'lat':tgtlat}) - return finterp.drop_vars(['y','z']) + return finterp def map_to_boundary(self,i=None,j=None,k=None,allpts=False): From e650d191662cf0247460b5cd9949b1a72f6fbfa8 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Tue, 11 May 2021 01:54:06 -0600 Subject: [PATCH 04/33] Update assertions --- mmctools/coupling/sowfa.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/mmctools/coupling/sowfa.py b/mmctools/coupling/sowfa.py index 094feab..b3196b2 100755 --- a/mmctools/coupling/sowfa.py +++ b/mmctools/coupling/sowfa.py @@ -381,18 +381,17 @@ def _check_xarray_dataset(self, """Do all sanity checks here""" for dim in self.ds.dims: # dimension coordinates - assert dim in expected_dims + assert dim in expected_dims, f'missing dim {dim}' coord = self.ds.coords[dim] - assert (coord.dims[0] == dim) and (len(coord.dims) == 1) + assert (coord.dims[0] == dim) and (len(coord.dims) == 1), \ + f'{dim} is not a dimension coordinate' # Only handle a single boundary plane at a time; boundaries # should be aligned with the Cartesian axes - dims = expected_dims.copy() - for dim in self.ds.dims: - dims.remove(dim) - assert (len(dims) == 1) - constdim = dims[0] - print('Input is an {:s}-boundary at {:g}'.format(constdim, - self.ds.coords[constdim].values)) + constdims = [dim for dim in self.ds.dims if self.ds.dims[dim]==1] + assert (len(constdims) == 1), 'more than one constant dim' + constdim = constdims[0] + print('Input is a {:s}-boundary at {:g}'.format(constdim, + float(self.ds.coords[constdim]))) def write(self, fields, points=True, binary=False, gzip=False): """ @@ -427,8 +426,11 @@ def write(self, fields, points=True, binary=False, gzip=False): # make sure ordering of bnd_dims is correct dims = list(self.ds.dims) dims.remove('datetime') - self.bndry_dims = [dim for dim in ['x','y','height'] if dim in dims] - assert (len(self.bndry_dims) == 2) + self.bndry_dims = [ + dim for dim in ['x','y','height'] + if (dim in dims) and self.ds.dims[dim] > 1 + ] + assert (len(self.bndry_dims) == 2), f'boundary patch dims: {str(self.bndry_dims)}' # write out patch/points if points: self._write_points(binary=binary, gzip=gzip) From b05684691182c9745edb96dfedfb91be0c9d7f39 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Tue, 11 May 2021 02:05:04 -0600 Subject: [PATCH 05/33] Properly handle stored constant dim coord --- mmctools/coupling/sowfa.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mmctools/coupling/sowfa.py b/mmctools/coupling/sowfa.py index b3196b2..1ee0eb3 100755 --- a/mmctools/coupling/sowfa.py +++ b/mmctools/coupling/sowfa.py @@ -392,6 +392,7 @@ def _check_xarray_dataset(self, constdim = constdims[0] print('Input is a {:s}-boundary at {:g}'.format(constdim, float(self.ds.coords[constdim]))) + self.constdim = constdim def write(self, fields, points=True, binary=False, gzip=False): """ @@ -480,7 +481,7 @@ def _write_points(self,fname='points',binary=False,gzip=False): print('Wrote',N,'points to',fpath) def _write_boundary_vector(self,fname,components,binary=False,gzip=False): - ds = self.ds.copy() + ds = self.ds.isel({self.constdim:0}) # add missing dimensions, if any for dim in self.bndry_dims: for var in components: From 9dae1207916ae55a658b362a311ca47c9c563e10 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Tue, 11 May 2021 05:00:46 -0600 Subject: [PATCH 06/33] Properly handle const dim when writing out scalar field --- mmctools/coupling/sowfa.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmctools/coupling/sowfa.py b/mmctools/coupling/sowfa.py index 1ee0eb3..4a63267 100755 --- a/mmctools/coupling/sowfa.py +++ b/mmctools/coupling/sowfa.py @@ -527,7 +527,7 @@ def _write_boundary_vector(self,fname,components,binary=False,gzip=False): print('Wrote',N,'vectors to',fpath,'at',str(tstamp)) def _write_boundary_scalar(self,fname,var,binary=False,gzip=False): - ds = self.ds.copy() + ds = self.ds.isel({self.constdim:0}) # add missing dimensions, if any for dim in self.bndry_dims: if dim not in ds[var].dims: From 16dc40cf9bfb31c90edbae5727176d2f438b0000 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Wed, 12 May 2021 15:47:44 -0600 Subject: [PATCH 07/33] Rewrite bilinear interp --> 20% speedup --- mmctools/wrf/ts.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/mmctools/wrf/ts.py b/mmctools/wrf/ts.py index 25b97de..c01f5df 100644 --- a/mmctools/wrf/ts.py +++ b/mmctools/wrf/ts.py @@ -234,10 +234,11 @@ def interp_to_latlon(self,latlon): f10 = self.ds.sel(nx=i+1,ny=j).drop_vars(['x','y']) f01 = self.ds.sel(nx=i ,ny=j+1).drop_vars(['x','y']) f11 = self.ds.sel(nx=i+1,ny=j+1).drop_vars(['x','y']) - finterp = f00 * (wrflon[i+1,j] - tgtlon ) * (wrflat[i,j+1] - tgtlat ) \ - + f10 * (tgtlon - wrflon[i,j]) * (wrflat[i,j+1] - tgtlat ) \ - + f01 * (wrflon[i+1,j] - tgtlon ) * (tgtlat - wrflat[i,j]) \ - + f11 * (tgtlon - wrflon[i,j]) * (tgtlat - wrflat[i,j]) + coef00 = (wrflon[i+1,j] - tgtlon ) * (wrflat[i,j+1] - tgtlat ) + coef10 = (tgtlon - wrflon[i,j]) * (wrflat[i,j+1] - tgtlat ) + coef01 = (wrflon[i+1,j] - tgtlon ) * (tgtlat - wrflat[i,j]) + coef11 = (tgtlon - wrflon[i,j]) * (tgtlat - wrflat[i,j]) + finterp = coef00*f00 + coef10*f10 + coef01*f01 + coef11*f11 finterp = finterp / ((wrflon[i+1,j] - wrflon[i,j]) * (wrflat[i,j+1] - wrflat[i,j])) finterp = finterp.assign_coords({'lon':tgtlon,'lat':tgtlat}) return finterp From 6dbedb891ddc6f240354531eca98f96b5d02afba Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Wed, 12 May 2021 21:48:58 -0600 Subject: [PATCH 08/33] Handle dask chunked dataset in Toof --- mmctools/wrf/ts.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/mmctools/wrf/ts.py b/mmctools/wrf/ts.py index c01f5df..b22c3ee 100644 --- a/mmctools/wrf/ts.py +++ b/mmctools/wrf/ts.py @@ -260,13 +260,18 @@ def map_to_boundary(self,i=None,j=None,k=None,allpts=False): if self.verbose: print('selected lat:',selected_lat) print('selected lon:',selected_lon) + # get all selected profiles dslist = self._get_datasets_at_locations( selected_x,selected_y,selected_lat,selected_lon) # combine all interpolated profiles boundarydata = self._create_dataset_from_list(i,j,k,allpts,dslist) + if boundarydata.chunks: + from dask.diagnostics import ProgressBar + with ProgressBar(): + boundarydata = boundarydata.compute() return boundarydata - def _get_datasets_at_locations(self,selected_x, selected_y, selected_lat, selected_lon): + def _get_datasets_at_locations(self, selected_x, selected_y, selected_lat, selected_lon): dslist = [] for x,y,lat,lon in zip(selected_x, selected_y, selected_lat, selected_lon): ds = self.interp_to_latlon((lat,lon)) @@ -283,10 +288,11 @@ def _create_dataset_from_list(self,i,j,k,allpts,dslist): mydim = 'y' idx = j if ((i is not None) or (j is not None)) and allpts: - # if allpts, interpolate side boundary profiles to exact domain heights + # lateral boundaries: if allpts, interpolate side boundary + # profiles to exact domain heights ds = ds.interp(height=self.domain.z) elif k is not None: - # if horizontal boundary, interpolate to constant z + # horizontal boundaries: interpolate to constant z if self.verbose: print('interpolating to',self.domain.z[k]) ds = ds.interp(height=self.domain.z[k]) From 841b53d6cc2777ddd09e5146920c59f9266185f2 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Wed, 12 May 2021 23:00:50 -0600 Subject: [PATCH 09/33] Add verbose option --- mmctools/coupling/sowfa.py | 13 +++++++++---- mmctools/wrf/ts.py | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/mmctools/coupling/sowfa.py b/mmctools/coupling/sowfa.py index 4a63267..259a0be 100755 --- a/mmctools/coupling/sowfa.py +++ b/mmctools/coupling/sowfa.py @@ -316,7 +316,8 @@ def __init__(self, name='patch', dateref=None, datefrom=None, - dateto=None): + dateto=None, + verbose=True): """ Initialize SOWFA input object. This should be called for _each_ inflow/outflow boundary. @@ -344,6 +345,7 @@ def __init__(self, with the last timestamp in df; only used if dateref is specified """ + self.verbose = verbose self.name = name self.dpath = os.path.join(dpath, name) # Create folder dpath if needed @@ -478,7 +480,8 @@ def _write_points(self,fname='points',binary=False,gzip=False): else: with self._open(fpath, 'w', gzip=gzip) as f: np.savetxt(f, pts, fmt='(%g %g %g)', header=header, footer=')', comments='') - print('Wrote',N,'points to',fpath) + if self.verbose: + print('Wrote',N,'points to',fpath) def _write_boundary_vector(self,fname,components,binary=False,gzip=False): ds = self.ds.isel({self.constdim:0}) @@ -524,7 +527,8 @@ def _write_boundary_vector(self,fname,components,binary=False,gzip=False): f.write(b'\n(0 0 0) // average value') else: f.write('\n(0 0 0) // average value') - print('Wrote',N,'vectors to',fpath,'at',str(tstamp)) + if self.verbose: + print('Wrote',N,'vectors to',fpath,'at',str(tstamp)) def _write_boundary_scalar(self,fname,var,binary=False,gzip=False): ds = self.ds.isel({self.constdim:0}) @@ -562,5 +566,6 @@ def _write_boundary_scalar(self,fname,var,binary=False,gzip=False): f.write(b'\n0 // average value') else: f.write('\n0 // average value') - print('Wrote',N,'scalars to',fpath,'at',str(tstamp)) + if self.verbose: + print('Wrote',N,'scalars to',fpath,'at',str(tstamp)) diff --git a/mmctools/wrf/ts.py b/mmctools/wrf/ts.py index b22c3ee..4e1ce14 100644 --- a/mmctools/wrf/ts.py +++ b/mmctools/wrf/ts.py @@ -253,7 +253,7 @@ def map_to_boundary(self,i=None,j=None,k=None,allpts=False): assert np.count_nonzero([idx is not None for idx in [i,j,k]])==1, \ 'Specify i, j, or k' if allpts: - print('WARNING: current implementation of allpts is likely to result in extreme memory usage and may crash') + print('WARNING: Current implementation of allpts can result in extreme memory usage') # interpolate to selected lat/lon selected_x, selected_y, selected_lat, selected_lon = \ self._select_boundary_latlon(i,j,k,allpts) From a5c31a15b489f6701a1660cd25b57d60eb796cee Mon Sep 17 00:00:00 2001 From: William Lassman Date: Fri, 28 May 2021 09:11:48 -0700 Subject: [PATCH 10/33] add wrfout_slices_seriesReader function --- mmctools/wrf/utils.py | 147 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index e351ac8..89ff87b 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -1304,6 +1304,153 @@ def wrfout_seriesReader(wrf_path,wrf_file_filter, return ds_subset + +def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, + specified_height = None, + do_slice_vars = True, + do_surf_vars = False, + vlist = None ): + """ + Construct an a2e-mmc standard, xarrays-based, data structure from a + series of WRF slice outpput fies + + Note: Base state theta= 300.0 K is assumed by convention in WRF, + this function follow this convention. + + Usage + ==== + wrfpath : string + The path to directory containing wrfout files to be processed + wrf_file_filter : string-glob expression + A string-glob expression to filter a set of 4-dimensional WRF + output files. + specified_height : list-like, optional + If not None, then a list of static heights to which all data + variables should be interpolated. Note that this significantly + increases the data read time. + do_slice_vars: Logical (default True), optional + If true, then the slice variables (SLICES_U, SLICES_V, SLICES_W, SLICES_T) are read for the specified height + (or for all heights if 'specified_height = None') + do_surf_vars: Logical (default False), optional + If true, then the surface variables (UST, HFX, QFX, SST, SSTK) will be added to the file + vlist: List-like, default None (optional). + If not none, then do_slice_vars and do_surf_vars set to False, and only variables in the list 'vlist' are read + """ + TH0 = 300.0 #WRF convention base-state theta = 300.0 K + dims_dict = { + 'Time':'datetime', + 'num_slices':'nz_slice', + 'south_north': 'ny', + 'west_east':'nx', + } + + ds = xr.open_mfdataset(os.path.join(wrf_path,wrf_file_filter), + chunks={'Time': 10}, + combine='nested', + concat_dim='Time') + + ds = ds.assign_coords({"SLICES_Z": ds.SLICES_Z.isel(Time=1)}) + ds = ds.swap_dims({'num_slices': 'SLICES_Z'}) + + dim_keys = ["Time","bottom_top","south_north","west_east"] + horiz_dim_keys = ["south_north","west_east"] + print('Finished opening/concatenating datasets...') + #print(ds.dims) + ds_subset = ds[['Time']] + print('Establishing coordinate variables, x,y,z, zSurface...') + ycoord = ds.DY * (0.5 + np.arange(ds.dims['south_north'])) + xcoord = ds.DX * (0.5 + np.arange(ds.dims['west_east'])) + ds_subset['z'] = xr.DataArray(specified_height, dims='num_slices') + + ds_subset['y'] = xr.DataArray(ycoord, dims='south_north') + ds_subset['x'] = xr.DataArray(xcoord, dims='west_east') + + + + if vlist not None: + print("Vlist not nne, setting do_slice_vars andd do_surf_vars to False") + print("Does not support specified_height argument, grabing all available heights") + do_slice_vars = False + do_surf_vars = False + print("Extracting variables") + for vv in vlist: + print(vv) + ds_subset[vv] = ds[vv] + + + if do_slice_vars: + print("Doing slice variables") + print('Grabbing u, v, w, T') + if specified_height is not None: + if len(specified_height) == 1: + print("One height") + #print(ds.dims) + #print(ds.coords) + ds_subset['u'] = ds['SLICES_U'].sel( SLICES_Z = specified_height ) + + ds_subset['v'] = ds['SLICES_V'].sel( SLICES_Z = specified_height ) + + ds_subset['w'] = ds['SLICES_W'].sel( SLICES_Z = specified_height ) + + ds_subset['T'] = ds['SLICES_T'].sel( SLICES_Z = specified_height ) + else: + print("Multiple heights") + ds_subset['u'] = ds['SLICES_U'].sel( SLICES_Z = specified_height ) + + ds_subset['v'] = ds['SLICES_V'].sel( SLICES_Z = specified_height ) + + ds_subset['w'] = ds['SLICES_W'].sel( SLICES_Z = specified_height ) + + ds_subset['T'] = ds['SLICES_T'].sel( SLICES_Z = specified_height ) + else: + + ds_subset['u'] = ds['SLICES_U'] + + ds_subset['v'] = ds['SLICES_V'] + + ds_subset['w'] = ds['SLICES_W'] + + ds_subset['T'] = ds['SLICES_T'] + print('Calculating derived data variables, wspd, wdir...') + #print( (ds_subset['u'].ufuncs.square()).values ) + ds_subset['wspd'] = xr.DataArray(np.sqrt(ds_subset['u'].values**2 + ds_subset['v'].values**2), + dims=dim_keys) + ds_subset['wdir'] = xr.DataArray(180. + np.arctan2(ds_subset['u'].values,ds_subset['v'].values)*180./np.pi, + dims=dim_keys) + + + + + if do_surf_vars: + print('Extracting 2-D variables (UST, HFX, QFX, SST, SSTSK)') + ds_subset['UST'] = ds['UST'] + ds_subset['HFX'] = ds['HFX'] + ds_subset['QFX'] = ds['QFX'] + ds_subset['SST'] = ds['SST'] + ds_subset['SSTK'] = ds['SSTK'] + else: + print("Skipping 2-D variables") + + + + # assign rename coord variable for time, and assign ccordinates + + if specified_height is None: + ds_subset = ds_subset.assign_coords(z=ds_subset['SLICES_Z']) + ds_subset = ds_subset.assign_coords(y=ds_subset['y']) + ds_subset = ds_subset.assign_coords(x=ds_subset['x']) + + + + print(ds_subset.dims) + ds_subset = ds_subset.rename_dims(dims_dict) + + return ds_subset + + + + + def write_tslist_file(fname,lat=None,lon=None,i=None,j=None,twr_names=None,twr_abbr=None): """ Write a list of lat/lon or i/j locations to a tslist file that is From 121d1c40aa3d8451bf3b447295acc0a464bdb5fe Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Fri, 28 May 2021 12:30:08 -0600 Subject: [PATCH 11/33] Rename specified_height to specified_heights for consistency with existing wrfout_seriesReader() function --- mmctools/wrf/utils.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index 89ff87b..dbbcac4 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -1306,7 +1306,7 @@ def wrfout_seriesReader(wrf_path,wrf_file_filter, def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, - specified_height = None, + specified_heights = None, do_slice_vars = True, do_surf_vars = False, vlist = None ): @@ -1324,13 +1324,13 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, wrf_file_filter : string-glob expression A string-glob expression to filter a set of 4-dimensional WRF output files. - specified_height : list-like, optional + specified_heights : list-like, optional If not None, then a list of static heights to which all data variables should be interpolated. Note that this significantly increases the data read time. do_slice_vars: Logical (default True), optional If true, then the slice variables (SLICES_U, SLICES_V, SLICES_W, SLICES_T) are read for the specified height - (or for all heights if 'specified_height = None') + (or for all heights if 'specified_heights = None') do_surf_vars: Logical (default False), optional If true, then the surface variables (UST, HFX, QFX, SST, SSTK) will be added to the file vlist: List-like, default None (optional). @@ -1360,7 +1360,7 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, print('Establishing coordinate variables, x,y,z, zSurface...') ycoord = ds.DY * (0.5 + np.arange(ds.dims['south_north'])) xcoord = ds.DX * (0.5 + np.arange(ds.dims['west_east'])) - ds_subset['z'] = xr.DataArray(specified_height, dims='num_slices') + ds_subset['z'] = xr.DataArray(specified_heights, dims='num_slices') ds_subset['y'] = xr.DataArray(ycoord, dims='south_north') ds_subset['x'] = xr.DataArray(xcoord, dims='west_east') @@ -1369,7 +1369,7 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, if vlist not None: print("Vlist not nne, setting do_slice_vars andd do_surf_vars to False") - print("Does not support specified_height argument, grabing all available heights") + print("Does not support specified_heights argument, grabing all available heights") do_slice_vars = False do_surf_vars = False print("Extracting variables") @@ -1381,27 +1381,27 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, if do_slice_vars: print("Doing slice variables") print('Grabbing u, v, w, T') - if specified_height is not None: - if len(specified_height) == 1: + if specified_heights is not None: + if len(specified_heights) == 1: print("One height") #print(ds.dims) #print(ds.coords) - ds_subset['u'] = ds['SLICES_U'].sel( SLICES_Z = specified_height ) + ds_subset['u'] = ds['SLICES_U'].sel( SLICES_Z = specified_heights ) - ds_subset['v'] = ds['SLICES_V'].sel( SLICES_Z = specified_height ) + ds_subset['v'] = ds['SLICES_V'].sel( SLICES_Z = specified_heights ) - ds_subset['w'] = ds['SLICES_W'].sel( SLICES_Z = specified_height ) + ds_subset['w'] = ds['SLICES_W'].sel( SLICES_Z = specified_heights ) - ds_subset['T'] = ds['SLICES_T'].sel( SLICES_Z = specified_height ) + ds_subset['T'] = ds['SLICES_T'].sel( SLICES_Z = specified_heights ) else: print("Multiple heights") - ds_subset['u'] = ds['SLICES_U'].sel( SLICES_Z = specified_height ) + ds_subset['u'] = ds['SLICES_U'].sel( SLICES_Z = specified_heights ) - ds_subset['v'] = ds['SLICES_V'].sel( SLICES_Z = specified_height ) + ds_subset['v'] = ds['SLICES_V'].sel( SLICES_Z = specified_heights ) - ds_subset['w'] = ds['SLICES_W'].sel( SLICES_Z = specified_height ) + ds_subset['w'] = ds['SLICES_W'].sel( SLICES_Z = specified_heights ) - ds_subset['T'] = ds['SLICES_T'].sel( SLICES_Z = specified_height ) + ds_subset['T'] = ds['SLICES_T'].sel( SLICES_Z = specified_heights ) else: ds_subset['u'] = ds['SLICES_U'] @@ -1435,7 +1435,7 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, # assign rename coord variable for time, and assign ccordinates - if specified_height is None: + if specified_heights is None: ds_subset = ds_subset.assign_coords(z=ds_subset['SLICES_Z']) ds_subset = ds_subset.assign_coords(y=ds_subset['y']) ds_subset = ds_subset.assign_coords(x=ds_subset['x']) From 74795017fedc7f3537a8378fb339246281a00451 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Fri, 28 May 2021 12:50:22 -0600 Subject: [PATCH 12/33] Cleanup docstring --- mmctools/wrf/utils.py | 61 ++++++++++++++++++++++--------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index dbbcac4..89d1c9d 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -1304,37 +1304,38 @@ def wrfout_seriesReader(wrf_path,wrf_file_filter, return ds_subset - def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, - specified_heights = None, - do_slice_vars = True, - do_surf_vars = False, - vlist = None ): - """ - Construct an a2e-mmc standard, xarrays-based, data structure from a - series of WRF slice outpput fies - - Note: Base state theta= 300.0 K is assumed by convention in WRF, - this function follow this convention. - - Usage - ==== - wrfpath : string - The path to directory containing wrfout files to be processed - wrf_file_filter : string-glob expression - A string-glob expression to filter a set of 4-dimensional WRF - output files. - specified_heights : list-like, optional - If not None, then a list of static heights to which all data - variables should be interpolated. Note that this significantly - increases the data read time. - do_slice_vars: Logical (default True), optional - If true, then the slice variables (SLICES_U, SLICES_V, SLICES_W, SLICES_T) are read for the specified height - (or for all heights if 'specified_heights = None') - do_surf_vars: Logical (default False), optional - If true, then the surface variables (UST, HFX, QFX, SST, SSTK) will be added to the file - vlist: List-like, default None (optional). - If not none, then do_slice_vars and do_surf_vars set to False, and only variables in the list 'vlist' are read + specified_heights=None, + do_slice_vars=True, + do_surf_vars=False, + vlist=None): + """ + Construct an a2e-mmc standard, xarrays-based, data structure from a + series of WRF slice output files + + Note: Base state theta= 300.0 K is assumed by convention in WRF, + and this function follows this convention. + Usage + ==== + wrfpath : string + The path to directory containing wrfout files to be processed + wrf_file_filter : string-glob expression + A string-glob expression to filter a set of 4-dimensional WRF + output files. + specified_heights : list-like, optional + If not None, then a list of static heights to which all data + variables should be interpolated. Note that this significantly + increases the data read time. + do_slice_vars: Logical (default True), optional + If true, then the slice variables (SLICES_U, SLICES_V, SLICES_W, + SLICES_T) are read for the specified height (or for all heights + if 'specified_heights = None') + do_surf_vars: Logical (default False), optional + If true, then the surface variables (UST, HFX, QFX, SST, SSTK) + will be added to the file + vlist: List-like, default None (optional) + If not none, then do_slice_vars and do_surf_vars set to False, + and only variables in the list 'vlist' are read """ TH0 = 300.0 #WRF convention base-state theta = 300.0 K dims_dict = { From 0b052ab5a462b03ee5a50b2bfb14672807214b16 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Fri, 28 May 2021 12:52:05 -0600 Subject: [PATCH 13/33] Clarify vlist option behavior --- mmctools/wrf/utils.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index 89d1c9d..27cb841 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -1334,7 +1334,7 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, If true, then the surface variables (UST, HFX, QFX, SST, SSTK) will be added to the file vlist: List-like, default None (optional) - If not none, then do_slice_vars and do_surf_vars set to False, + If not none, then set do_slice_vars and do_surf_vars to False, and only variables in the list 'vlist' are read """ TH0 = 300.0 #WRF convention base-state theta = 300.0 K @@ -1366,10 +1366,8 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, ds_subset['y'] = xr.DataArray(ycoord, dims='south_north') ds_subset['x'] = xr.DataArray(xcoord, dims='west_east') - - if vlist not None: - print("Vlist not nne, setting do_slice_vars andd do_surf_vars to False") + print("vlist not None, setting do_slice_vars and do_surf_vars to False") print("Does not support specified_heights argument, grabing all available heights") do_slice_vars = False do_surf_vars = False @@ -1378,7 +1376,6 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, print(vv) ds_subset[vv] = ds[vv] - if do_slice_vars: print("Doing slice variables") print('Grabbing u, v, w, T') From 0c94ff183fadfa129873f3ae4df6567e7ab83ce7 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Fri, 28 May 2021 12:54:25 -0600 Subject: [PATCH 14/33] Clean up formatting/style --- mmctools/wrf/utils.py | 57 +++++++++++++++---------------------------- 1 file changed, 19 insertions(+), 38 deletions(-) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index 27cb841..40544a3 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -1382,42 +1382,32 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, if specified_heights is not None: if len(specified_heights) == 1: print("One height") - #print(ds.dims) - #print(ds.coords) - ds_subset['u'] = ds['SLICES_U'].sel( SLICES_Z = specified_heights ) - - ds_subset['v'] = ds['SLICES_V'].sel( SLICES_Z = specified_heights ) - - ds_subset['w'] = ds['SLICES_W'].sel( SLICES_Z = specified_heights ) - - ds_subset['T'] = ds['SLICES_T'].sel( SLICES_Z = specified_heights ) + #print(ds.dims) + #print(ds.coords) + ds_subset['u'] = ds['SLICES_U'].sel(SLICES_Z=specified_heights) + ds_subset['v'] = ds['SLICES_V'].sel(SLICES_Z=specified_heights) + ds_subset['w'] = ds['SLICES_W'].sel(SLICES_Z=specified_heights) + ds_subset['T'] = ds['SLICES_T'].sel(SLICES_Z=specified_heights) else: print("Multiple heights") - ds_subset['u'] = ds['SLICES_U'].sel( SLICES_Z = specified_heights ) - - ds_subset['v'] = ds['SLICES_V'].sel( SLICES_Z = specified_heights ) - - ds_subset['w'] = ds['SLICES_W'].sel( SLICES_Z = specified_heights ) - - ds_subset['T'] = ds['SLICES_T'].sel( SLICES_Z = specified_heights ) + ds_subset['u'] = ds['SLICES_U'].sel(SLICES_Z=specified_heights) + ds_subset['v'] = ds['SLICES_V'].sel(SLICES_Z=specified_heights) + ds_subset['w'] = ds['SLICES_W'].sel(SLICES_Z=specified_heights) + ds_subset['T'] = ds['SLICES_T'].sel(SLICES_Z=specified_heights) else: - ds_subset['u'] = ds['SLICES_U'] - ds_subset['v'] = ds['SLICES_V'] - ds_subset['w'] = ds['SLICES_W'] - ds_subset['T'] = ds['SLICES_T'] - print('Calculating derived data variables, wspd, wdir...') - #print( (ds_subset['u'].ufuncs.square()).values ) - ds_subset['wspd'] = xr.DataArray(np.sqrt(ds_subset['u'].values**2 + ds_subset['v'].values**2), - dims=dim_keys) - ds_subset['wdir'] = xr.DataArray(180. + np.arctan2(ds_subset['u'].values,ds_subset['v'].values)*180./np.pi, - dims=dim_keys) - - + print('Calculating derived data variables, wspd, wdir...') + #print((ds_subset['u'].ufuncs.square()).values) + ds_subset['wspd'] = xr.DataArray( + np.sqrt(ds_subset['u'].values**2 + ds_subset['v'].values**2), + dims=dim_keys) + ds_subset['wdir'] = xr.DataArray( + 180. + np.arctan2(ds_subset['u'].values,ds_subset['v'].values)*180./np.pi, + dims=dim_keys) if do_surf_vars: print('Extracting 2-D variables (UST, HFX, QFX, SST, SSTSK)') @@ -1429,26 +1419,17 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, else: print("Skipping 2-D variables") - - - # assign rename coord variable for time, and assign ccordinates - + # assign rename coord variable for time, and assign coordinates if specified_heights is None: ds_subset = ds_subset.assign_coords(z=ds_subset['SLICES_Z']) ds_subset = ds_subset.assign_coords(y=ds_subset['y']) ds_subset = ds_subset.assign_coords(x=ds_subset['x']) - - - print(ds_subset.dims) ds_subset = ds_subset.rename_dims(dims_dict) return ds_subset - - - def write_tslist_file(fname,lat=None,lon=None,i=None,j=None,twr_names=None,twr_abbr=None): """ Write a list of lat/lon or i/j locations to a tslist file that is From 91b8073ab3922846176ff2ca3cac53144b6e5ee9 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Fri, 28 May 2021 13:00:40 -0600 Subject: [PATCH 15/33] Fix syntax and inconsistent tabs --- mmctools/wrf/utils.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index 40544a3..60c672a 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -1366,10 +1366,10 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, ds_subset['y'] = xr.DataArray(ycoord, dims='south_north') ds_subset['x'] = xr.DataArray(xcoord, dims='west_east') - if vlist not None: - print("vlist not None, setting do_slice_vars and do_surf_vars to False") - print("Does not support specified_heights argument, grabing all available heights") - do_slice_vars = False + if vlist is not None: + print("vlist not None, setting do_slice_vars and do_surf_vars to False") + print("Does not support specified_heights argument, grabing all available heights") + do_slice_vars = False do_surf_vars = False print("Extracting variables") for vv in vlist: @@ -1430,6 +1430,10 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, return ds_subset +def test(blerg): + TH0 = 300.0 #WRF convention base-state theta = 300.0 K + print(TH0) + def write_tslist_file(fname,lat=None,lon=None,i=None,j=None,twr_names=None,twr_abbr=None): """ Write a list of lat/lon or i/j locations to a tslist file that is From d5e43e93688ea12925552cab8595549f27fce93c Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Fri, 28 May 2021 13:03:44 -0600 Subject: [PATCH 16/33] Define module-level TH0 = 300.0 --- mmctools/wrf/utils.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index 60c672a..df90502 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -60,6 +60,8 @@ 'UST', # u* from M-O ] +TH0 = 300.0 # [K] base-state potential temperature by WRF convention + def _get_dim(wrfdata,dimname): """Returns the specified dimension, with support for both netCDF4 and xarray @@ -370,7 +372,7 @@ def _create_datadict(self,varns,unstagger=False,staggered_vars=['ph']): datadict[varn] = ((tsdata[:,1:] + tsdata[:,:-1]) / 2).ravel() elif varn == 'th': # theta is a special case - #assert np.all(tsdata[:,-1] == 300), 'Unexpected nonzero value for theta' + #assert np.all(tsdata[:,-1] == TH0), 'Unexpected nonzero value for theta' # drop the trailing 0 for already unstaggered quantities datadict[varn] = tsdata[:,:-1].ravel() else: @@ -564,7 +566,7 @@ def interp_to_heights(df): tsdata = getattr(self,varn) #if varn == 'th': # # theta is a special case - # assert np.all(tsdata[:,-1] == 300) + # assert np.all(tsdata[:,-1] == TH0) #elif not varn == 'ww': # # if w has already been destaggered by wrf # assert np.all(tsdata[:,-1] == 0) @@ -744,7 +746,7 @@ def add_surface_plane(var,plane=None): def extract_column_from_wrfdata(fpath, coords, Ztop=2000., Vres=5.0, - T0=300., + T0=TH0, spatial_filter='interpolate',L_filter=0.0, additional_fields=[], verbose=False, @@ -1189,7 +1191,6 @@ def wrfout_seriesReader(wrf_path,wrf_file_filter, dimension to facilitate and expedite xarray operations """ import wrf as wrfpy - TH0 = 300.0 #WRF convention base-state theta = 300.0 K dims_dict = { 'Time':'datetime', 'bottom_top':'nz', @@ -1337,7 +1338,6 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, If not none, then set do_slice_vars and do_surf_vars to False, and only variables in the list 'vlist' are read """ - TH0 = 300.0 #WRF convention base-state theta = 300.0 K dims_dict = { 'Time':'datetime', 'num_slices':'nz_slice', @@ -1431,7 +1431,6 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, def test(blerg): - TH0 = 300.0 #WRF convention base-state theta = 300.0 K print(TH0) def write_tslist_file(fname,lat=None,lon=None,i=None,j=None,twr_names=None,twr_abbr=None): From e220ab1d21c5ea480bb71b3a8f1c93473129b85c Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Fri, 28 May 2021 13:09:15 -0600 Subject: [PATCH 17/33] Forgot to remove test function --- mmctools/wrf/utils.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index df90502..9bc69cc 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -1430,9 +1430,6 @@ def wrfout_slices_seriesReader(wrf_path, wrf_file_filter, return ds_subset -def test(blerg): - print(TH0) - def write_tslist_file(fname,lat=None,lon=None,i=None,j=None,twr_names=None,twr_abbr=None): """ Write a list of lat/lon or i/j locations to a tslist file that is From f3d49856f6fb87ace2a11b153d1d9a368ea8ed2c Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Mon, 12 Jul 2021 23:28:06 -0600 Subject: [PATCH 18/33] Update error checking when initializing CDSDataset --- mmctools/wrf/preprocessing.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/mmctools/wrf/preprocessing.py b/mmctools/wrf/preprocessing.py index ad7d63f..8e7fd20 100644 --- a/mmctools/wrf/preprocessing.py +++ b/mmctools/wrf/preprocessing.py @@ -250,10 +250,15 @@ class CDSDataset(object): def __init__(self): if not os.path.isfile(self.api_rc): - print('WARNING: '+self.api_rc+' not found') - print('Go to https://cds.climate.copernicus.eu/api-how-to for more information') - import cdsapi - self.client = cdsapi.Client() + raise FileNotFoundError(f"""Expected CDS API key in {self.api_rc} +Go to https://cds.climate.copernicus.eu/api-how-to for more information""") + try: + import cdsapi + except ImportError: + raise ModuleNotFoundError("""Need CDS API client +Run `conda install -c conda-forge cdsapi`""") + else: + self.client = cdsapi.Client() def download(self,datetimes,product,prefix=None, variables=[], From edf11d43d1a40d018f3f8f2ce06cc1f355dd8553 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Wed, 14 Jul 2021 14:32:39 -0600 Subject: [PATCH 19/33] Add combine_request kwarg For retrieving data at many output times, the previous approach (which creates one request per output time) can be rather slow; this combines all output datetimes into a single request (and output file). It's up to the user to decide whether to request days, weeks, months, or years at a time. --- mmctools/wrf/preprocessing.py | 44 ++++++++++++++++++++++++++--------- 1 file changed, 33 insertions(+), 11 deletions(-) diff --git a/mmctools/wrf/preprocessing.py b/mmctools/wrf/preprocessing.py index 8e7fd20..6f8e38f 100644 --- a/mmctools/wrf/preprocessing.py +++ b/mmctools/wrf/preprocessing.py @@ -263,7 +263,8 @@ def __init__(self): def download(self,datetimes,product,prefix=None, variables=[], area=[], - pressure_levels=None): + pressure_levels=None, + combine_request=False): """Download data at specified datetimes. Usage @@ -283,6 +284,11 @@ def download(self,datetimes,product,prefix=None, North/west/south/east lat/long limits pressure_levels : list, optional List of pressure levels + combine_request : bool, optional + Aggregate requested dates into lists of years, months, days, + and hours--note that this may return additional time steps + because the request selects all permutations of + year/month/day/hour; should be False for WRF WPS """ if prefix is None: prefix = os.path.join('.',product) @@ -297,14 +303,23 @@ def download(self,datetimes,product,prefix=None, if pressure_levels is not None: req['pressure_level'] = pressure_levels print('Requesting',len(pressure_levels),'pressure levels') - for datetime in datetimes: - req['year'] = datetime.strftime('%Y') - req['month'] = datetime.strftime('%m') - req['day'] = datetime.strftime('%d') - req['time'] = datetime.strftime('%H:%M') - target = datetime.strftime('{:s}_%Y_%m_%d_%H.grib'.format(prefix)) - #print(datetime,req,target) + if combine_request: + print('Combining all datetimes into a single request') + req['year'] = sorted(list(set([datetime.strftime('%Y') for datetime in datetimes]))) + req['month'] = sorted(list(set([datetime.strftime('%m') for datetime in datetimes]))) + req['day'] = sorted(list(set([datetime.strftime('%d') for datetime in datetimes]))) + req['time'] = sorted(list(set([datetime.strftime('%H:%M') for datetime in datetimes]))) + target = datetimes[0].strftime('{:s}_from_%Y_%m_%d_%H.grib'.format(prefix)) self.client.retrieve(product, req, target) + else: + for datetime in datetimes: + req['year'] = datetime.strftime('%Y') + req['month'] = datetime.strftime('%m') + req['day'] = datetime.strftime('%d') + req['time'] = datetime.strftime('%H:%M') + target = datetime.strftime('{:s}_%Y_%m_%d_%H.grib'.format(prefix)) + #print(datetime,req,target) + self.client.retrieve(product, req, target) class ERA5(CDSDataset): @@ -324,7 +339,7 @@ class ERA5(CDSDataset): Ref: https://confluence.ecmwf.int/pages/viewpage.action?pageId=74764925 """ - def download(self,datetimes,path=None,bounds={}): + def download(self,datetimes,path=None,bounds={},combine_request=False): """Download data at specified datetimes. Descriptions: @@ -344,6 +359,11 @@ def download(self,datetimes,path=None,bounds={}): includes all of US and Central America, most of Alaska and Canada (up to 60deg latitude), and parts of South America that lie north of the equator. + combine_request : bool, optional + Aggregate requested dates into lists of years, months, days, + and hours--note that this may return additional time steps + because the request selects all permutations of + year/month/day/hour; should be False for WRF WPS """ if path is None: path = '.' @@ -376,7 +396,8 @@ def download(self,datetimes,path=None,bounds={}): '600','650','700','750','775','800','825','850','875','900', '925','950','975','1000' ], - area=area + area=area, + combine_request=combine_request, ) super().download( datetimes, @@ -402,7 +423,8 @@ def download(self,datetimes,path=None,bounds={}): 'volumetric_soil_water_layer_1','volumetric_soil_water_layer_2', 'volumetric_soil_water_layer_3','volumetric_soil_water_layer_4' ], - area=area + area=area, + combine_request=combine_request, ) From f9098a7247d5be7fe7765a2a1b1736189469d159 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Thu, 15 Jul 2021 10:34:48 -0600 Subject: [PATCH 20/33] Update install instructions --- README.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 2d1fb88..ef9871c 100644 --- a/README.md +++ b/README.md @@ -82,8 +82,18 @@ df = read_dir(dpath, file_filter='*_w*', reader=profiler) ## Installation -To install, run `pip install -e mmctools` after cloning the repository (or `pip install -e .` from inside a2e-mmc/mmctools). +The recommended approach is to first create a new conda environment: +``` +conda create -n mmc python=3.7 +conda activate mmc +conda install -y -c conda-forge jupyterlab matplotlib scipy xarray dask pyarrow gdal rasterio elevation pyyaml netcdf4 wrf-python cdsapi cfgrib +``` +Then create an "editable" installation of the mmctools repository: +``` +cd /path/to/a2e-mmc/mmctools +pip install -e .` +``` ## Code Development Principles From ac32d1a5fb77cf31d05a862d12097cedc44ed9a0 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Thu, 15 Jul 2021 11:02:58 -0600 Subject: [PATCH 21/33] Add details about dependencies --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index ef9871c..81372d8 100644 --- a/README.md +++ b/README.md @@ -88,6 +88,17 @@ conda create -n mmc python=3.7 conda activate mmc conda install -y -c conda-forge jupyterlab matplotlib scipy xarray dask pyarrow gdal rasterio elevation pyyaml netcdf4 wrf-python cdsapi cfgrib ``` +Note: All packages after `xarray` are optional: +- `dask` makes netcdf data processing more efficient +- `pyarrow` is a dependency for the "feather" data format, an *extremely* efficient + way to save dataframe data (in terms file I/O time and file size) +- `gdal`, `rasterio`, and `elevation` are required for processing terrain data +- `netcdf4` and `wrf-python` are for the NCAR-provided WRF utilities, which are + useful for interpolating and slicing data +- `cdsapi` is needed for `wrf.preprocessing` to retrieve Copernicus ERA5 + reanalysis data +- `cfgrib` enables xarray to load grib files + Then create an "editable" installation of the mmctools repository: ``` From 77407d82b7863f7d9e596c231edfc8f5f595c23b Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Thu, 15 Jul 2021 11:04:50 -0600 Subject: [PATCH 22/33] Fix typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 81372d8..e352225 100644 --- a/README.md +++ b/README.md @@ -103,7 +103,7 @@ Note: All packages after `xarray` are optional: Then create an "editable" installation of the mmctools repository: ``` cd /path/to/a2e-mmc/mmctools -pip install -e .` +pip install -e . ``` ## Code Development Principles From e5641d266d8899913acfa0151166e179261e6362 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Thu, 15 Jul 2021 13:25:43 -0600 Subject: [PATCH 23/33] Add kwarg to select output slices and quantities of interest Added default variables (at pressure-levels or single/surface level) and default pressure levels --- mmctools/wrf/preprocessing.py | 128 +++++++++++++++++++++------------- 1 file changed, 78 insertions(+), 50 deletions(-) diff --git a/mmctools/wrf/preprocessing.py b/mmctools/wrf/preprocessing.py index 6f8e38f..caf1c61 100644 --- a/mmctools/wrf/preprocessing.py +++ b/mmctools/wrf/preprocessing.py @@ -339,7 +339,49 @@ class ERA5(CDSDataset): Ref: https://confluence.ecmwf.int/pages/viewpage.action?pageId=74764925 """ - def download(self,datetimes,path=None,bounds={},combine_request=False): + default_single_level_vars = [ + '10m_u_component_of_wind','10m_v_component_of_wind', + '2m_dewpoint_temperature','2m_temperature', + 'convective_snowfall','convective_snowfall_rate_water_equivalent', + 'ice_temperature_layer_1','ice_temperature_layer_2', + 'ice_temperature_layer_3','ice_temperature_layer_4', + 'land_sea_mask','large_scale_snowfall', + 'large_scale_snowfall_rate_water_equivalent', + 'maximum_2m_temperature_since_previous_post_processing', + 'mean_sea_level_pressure', + 'minimum_2m_temperature_since_previous_post_processing', + 'sea_ice_cover','sea_surface_temperature','skin_temperature', + 'snow_albedo','snow_density','snow_depth','snow_evaporation', + 'snowfall','snowmelt','soil_temperature_level_1', + 'soil_temperature_level_2','soil_temperature_level_3', + 'soil_temperature_level_4','soil_type','surface_pressure', + 'temperature_of_snow_layer','total_column_snow_water', + 'volumetric_soil_water_layer_1','volumetric_soil_water_layer_2', + 'volumetric_soil_water_layer_3','volumetric_soil_water_layer_4' + ] + + default_pressure_level_vars = [ + 'divergence','fraction_of_cloud_cover','geopotential', + 'ozone_mass_mixing_ratio','potential_vorticity', + 'relative_humidity','specific_cloud_ice_water_content', + 'specific_cloud_liquid_water_content','specific_humidity', + 'specific_rain_water_content','specific_snow_water_content', + 'temperature','u_component_of_wind','v_component_of_wind', + 'vertical_velocity','vorticity' + ] + + default_pressure_levels = [ + '1','2','3','5','7','10','20','30','50','70','100','125','150', + '175','200','225','250','300','350','400','450','500','550', + '600','650','700','750','775','800','825','850','875','900', + '925','950','975','1000' + ] + + def download(self,datetimes,path=None, + pressure_level_vars='default', pressure_levels='default', + single_level_vars='default', + bounds={}, + combine_request=False): """Download data at specified datetimes. Descriptions: @@ -359,6 +401,15 @@ def download(self,datetimes,path=None,bounds={},combine_request=False): includes all of US and Central America, most of Alaska and Canada (up to 60deg latitude), and parts of South America that lie north of the equator. + pressure_level_vars : list, optional + Variables to retrieve at the specified pressure levels; if + set to 'default', then use `default_pressure_level_vars` + pressure_levels : list, optional + Pressure levels from which 4D data are constructed; if set + to 'default', then use `default_pressure_levels` + single_level_vars : list, optional + Variables to retrieve at the specified pressure levels; if + set to 'default', then use `default_single_level_vars` combine_request : bool, optional Aggregate requested dates into lists of years, months, days, and hours--note that this may return additional time steps @@ -374,58 +425,35 @@ def download(self,datetimes,path=None,bounds={},combine_request=False): S_bound = bounds.get('S', 0) W_bound = bounds.get('W', -169) E_bound = bounds.get('E', -47) + + if single_level_vars == 'default': + single_level_vars = self.default_single_level_vars + if pressure_level_vars == 'default': + pressure_level_vars = self.default_pressure_level_vars + if pressure_levels == 'default': + pressure_levels = self.default_pressure_levels area = [N_bound, W_bound, S_bound, E_bound] - super().download( - datetimes, - 'reanalysis-era5-pressure-levels', - prefix=os.path.join(path,'era5_pressure'), - variables=[ - 'divergence','fraction_of_cloud_cover','geopotential', - 'ozone_mass_mixing_ratio','potential_vorticity', - 'relative_humidity','specific_cloud_ice_water_content', - 'specific_cloud_liquid_water_content','specific_humidity', - 'specific_rain_water_content','specific_snow_water_content', - 'temperature','u_component_of_wind','v_component_of_wind', - 'vertical_velocity','vorticity' - ], - pressure_levels=[ - '1','2','3','5','7','10','20','30','50','70','100','125','150', - '175','200','225','250','300','350','400','450','500','550', - '600','650','700','750','775','800','825','850','875','900', - '925','950','975','1000' - ], - area=area, - combine_request=combine_request, - ) - super().download( - datetimes, - 'reanalysis-era5-single-levels', - prefix=os.path.join(path,'era5_surface'), - variables=[ - '10m_u_component_of_wind','10m_v_component_of_wind', - '2m_dewpoint_temperature','2m_temperature', - 'convective_snowfall','convective_snowfall_rate_water_equivalent', - 'ice_temperature_layer_1','ice_temperature_layer_2', - 'ice_temperature_layer_3','ice_temperature_layer_4', - 'land_sea_mask','large_scale_snowfall', - 'large_scale_snowfall_rate_water_equivalent', - 'maximum_2m_temperature_since_previous_post_processing', - 'mean_sea_level_pressure', - 'minimum_2m_temperature_since_previous_post_processing', - 'sea_ice_cover','sea_surface_temperature','skin_temperature', - 'snow_albedo','snow_density','snow_depth','snow_evaporation', - 'snowfall','snowmelt','soil_temperature_level_1', - 'soil_temperature_level_2','soil_temperature_level_3', - 'soil_temperature_level_4','soil_type','surface_pressure', - 'temperature_of_snow_layer','total_column_snow_water', - 'volumetric_soil_water_layer_1','volumetric_soil_water_layer_2', - 'volumetric_soil_water_layer_3','volumetric_soil_water_layer_4' - ], - area=area, - combine_request=combine_request, - ) + if pressure_level_vars: + super().download( + datetimes, + 'reanalysis-era5-pressure-levels', + prefix=os.path.join(path,'era5_pressure'), + variables=pressure_level_vars, + pressure_levels=pressure_levels, + area=area, + combine_request=combine_request, + ) + if single_level_vars: + super().download( + datetimes, + 'reanalysis-era5-single-levels', + prefix=os.path.join(path,'era5_surface'), + variables=single_level_vars, + area=area, + combine_request=combine_request, + ) class SetupWRF(): From c8d3ad9a488b52d0399f3eee924dc4eac5cf95ed Mon Sep 17 00:00:00 2001 From: Regis Thedin Date: Wed, 21 Jul 2021 11:33:21 -0600 Subject: [PATCH 24/33] Fix bug on slope and vector rugeddness vector The underlying resolution of the height array wasn't being taken into account, which resulted in wrong computation of slope, rise-run, and VRM. The `richdem` call for slope also had `riserun` which is incorrect for this case. --- mmctools/helper_functions.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/mmctools/helper_functions.py b/mmctools/helper_functions.py index 9b036f1..aaf084e 100644 --- a/mmctools/helper_functions.py +++ b/mmctools/helper_functions.py @@ -1323,7 +1323,7 @@ def tri_filt(x): return tri -def calcVRM(hgt,window=None,footprint=None,slope_zscale=1.0,return_slope=False): +def calcVRM(hgt,res,window=None,footprint=None,fill_depressions=True,return_slope_aspect=False): ''' Vector Ruggedness Measure Sappington, J. M., Longshore, K. M., & Thompson, D. B. (2007). @@ -1333,6 +1333,9 @@ def calcVRM(hgt,window=None,footprint=None,slope_zscale=1.0,return_slope=False): hgt : array Array of heights over which TRI will be calculated + res : int or float + Resolution of the underlying hgt array. Should be constant in x and y + Needed for proper slope calculation window : int Length of window in x and y direction. Must be odd. ''' @@ -1353,18 +1356,22 @@ def calcVRM(hgt,window=None,footprint=None,slope_zscale=1.0,return_slope=False): assert len(np.shape(hgt)) == 2, 'hgt must be 2-dimensional. Currently has {} dimensions'.format(len(np.shape(hgt))) ny,nx = np.shape(hgt) + # Determine scale based on resolution of hgt array + zscale = 1/res + # Get slope and aspect: hgt_rd = rd.rdarray(hgt, no_data=-9999) - rd.FillDepressions(hgt_rd, in_place=True) - slope = rd.TerrainAttribute(hgt_rd, attrib='slope_riserun', zscale=slope_zscale) + if fill_depressions: + rd.FillDepressions(hgt_rd, in_place=True) + slope = rd.TerrainAttribute(hgt_rd, attrib='slope_degrees',zscale=zscale) aspect = rd.TerrainAttribute(hgt_rd, attrib='aspect') # Calculate vectors: vrm = np.zeros((ny,nx)) - rugz = np.cos(slope*np.pi/180.0) - rugdxy = np.sin(slope*np.pi/180.0) - rugx = rugdxy*np.cos(aspect*np.pi/180.0) - rugy = rugdxy*np.sin(aspect*np.pi/180.0) + rugz = np.cos(np.deg2rad(slope)) + rugdxy = np.sin(np.deg2rad(slope)) + rugx = rugdxy*np.cos(np.deg2rad(aspect)) + rugy = rugdxy*np.sin(np.deg2rad(aspect)) def vrm_filt(x): return(sum(x)**2) @@ -1384,7 +1391,8 @@ def vrm_filt(x): else: num_points = float(window**2) vrm = 1.0 - np.sqrt(vrmX + vrmY + vrmZ)/num_points - if return_slope: - return vrm,slope + if return_slope_aspect: + return vrm,slope,aspect else: return vrm + From 3c37ff56e2b0c2d871033ae9c44065a651f98a14 Mon Sep 17 00:00:00 2001 From: Regis Thedin Date: Fri, 23 Jul 2021 09:48:29 -0600 Subject: [PATCH 25/33] Move TRI and VRM functions from helper to coupling.terrain --- mmctools/coupling/terrain.py | 116 +++++++++++++++++++++++++++++++++++ mmctools/helper_functions.py | 115 ---------------------------------- 2 files changed, 116 insertions(+), 115 deletions(-) diff --git a/mmctools/coupling/terrain.py b/mmctools/coupling/terrain.py index 4d93d1b..c3f9a65 100644 --- a/mmctools/coupling/terrain.py +++ b/mmctools/coupling/terrain.py @@ -506,3 +506,119 @@ def calc_slope(x,y,z): rise_run = np.sqrt(dz_dx**2 + dz_dy**2) slope[1:-1,1:-1] = np.degrees(np.arctan(rise_run)) return slope + + +def calcTRI(hgt,window=None,footprint=None): + ''' + Terrain Ruggedness Index + Riley, S. J., DeGloria, S. D., & Elliot, R. (1999). Index that + quantifies topographic heterogeneity. intermountain Journal + of sciences, 5(1-4), 23-27. + + hgt : array + Array of heights over which TRI will be calculated + window : int + Length of window in x and y direction. Must be odd. + ''' + from scipy.ndimage.filters import generic_filter + + # Window setup: + if footprint is not None: + assert window is None, 'Must specify either window or footprint' + window = np.shape(footprint)[0] + + assert (window/2.0) - np.floor(window/2.0) != 0.0, 'window must be odd...' + Hwindow = int(np.floor(window/2)) + + # Type and dimension check: + if isinstance(hgt,(xr.Dataset,xr.DataArray,xr.Variable)): + hgt = hgt.data + assert len(np.shape(hgt)) == 2, 'hgt must be 2-dimensional. Currently has {} dimensions'.format(len(np.shape(hgt))) + + ny,nx = np.shape(hgt) + + def tri_filt(x): + middle_ind = int(len(x)/2) + return((sum((x - x[middle_ind])**2.0))**0.5) + + if footprint is None: + tri = generic_filter(hgt,tri_filt, size = (window,window)) + else: + tri = generic_filter(hgt,tri_filt, footprint=footprint) + + return tri + + +def calcVRM(hgt,res,window=None,footprint=None,fill_depressions=True,return_slope_aspect=False): + ''' + Vector Ruggedness Measure + Sappington, J. M., Longshore, K. M., & Thompson, D. B. (2007). + Quantifying landscape ruggedness for animal habitat analysis: + a case study using bighorn sheep in the Mojave Desert. The + Journal of wildlife management, 71(5), 1419-1426. + + hgt : array + Array of heights over which TRI will be calculated + res : int or float + Resolution of the underlying hgt array. Should be constant in x and y + Needed for proper slope calculation + window : int + Length of window in x and y direction. Must be odd. + ''' + import richdem as rd + from scipy.ndimage.filters import generic_filter + + # Window setup: + if footprint is not None: + assert window is None, 'Must specify either window or footprint' + window = np.shape(footprint)[0] + + assert (window/2.0) - np.floor(window/2.0) != 0.0, 'window must be odd...' + Hwndw = int(np.floor(window/2)) + + # Type and dimension check: + if isinstance(hgt,(xr.Dataset,xr.DataArray,xr.Variable)): + hgt = hgt.data + assert len(np.shape(hgt)) == 2, 'hgt must be 2-dimensional. Currently has {} dimensions'.format(len(np.shape(hgt))) + ny,nx = np.shape(hgt) + + # Determine scale based on resolution of hgt array + zscale = 1/res + + # Get slope and aspect: + hgt_rd = rd.rdarray(hgt, no_data=-9999) + if fill_depressions: + rd.FillDepressions(hgt_rd, in_place=True) + slope = rd.TerrainAttribute(hgt_rd, attrib='slope_degrees',zscale=zscale) + aspect = rd.TerrainAttribute(hgt_rd, attrib='aspect') + + # Calculate vectors: + vrm = np.zeros((ny,nx)) + rugz = np.cos(np.deg2rad(slope)) + rugdxy = np.sin(np.deg2rad(slope)) + rugx = rugdxy*np.cos(np.deg2rad(aspect)) + rugy = rugdxy*np.sin(np.deg2rad(aspect)) + + def vrm_filt(x): + return(sum(x)**2) + + if footprint is None: + vrmX = generic_filter(rugx,vrm_filt, size = (window,window)) + vrmY = generic_filter(rugy,vrm_filt, size = (window,window)) + vrmZ = generic_filter(rugz,vrm_filt, size = (window,window)) + else: + vrmX = generic_filter(rugx,vrm_filt, footprint=footprint) + vrmY = generic_filter(rugy,vrm_filt, footprint=footprint) + vrmZ = generic_filter(rugz,vrm_filt, footprint=footprint) + + + if footprint is not None: + num_points = len(footprint[footprint != 0.0]) + else: + num_points = float(window**2) + vrm = 1.0 - np.sqrt(vrmX + vrmY + vrmZ)/num_points + if return_slope_aspect: + return vrm,slope,aspect + else: + return vrm + diff --git a/mmctools/helper_functions.py b/mmctools/helper_functions.py index aaf084e..f3c75f2 100644 --- a/mmctools/helper_functions.py +++ b/mmctools/helper_functions.py @@ -1281,118 +1281,3 @@ def calc_spectra(data, psd_f = psd_level.combine_first(psd_f) return(psd_f) - -def calcTRI(hgt,window=None,footprint=None): - ''' - Terrain Ruggedness Index - Riley, S. J., DeGloria, S. D., & Elliot, R. (1999). Index that - quantifies topographic heterogeneity. intermountain Journal - of sciences, 5(1-4), 23-27. - - hgt : array - Array of heights over which TRI will be calculated - window : int - Length of window in x and y direction. Must be odd. - ''' - from scipy.ndimage.filters import generic_filter - - # Window setup: - if footprint is not None: - assert window is None, 'Must specify either window or footprint' - window = np.shape(footprint)[0] - - assert (window/2.0) - np.floor(window/2.0) != 0.0, 'window must be odd...' - Hwindow = int(np.floor(window/2)) - - # Type and dimension check: - if isinstance(hgt,(xr.Dataset,xr.DataArray,xr.Variable)): - hgt = hgt.data - assert len(np.shape(hgt)) == 2, 'hgt must be 2-dimensional. Currently has {} dimensions'.format(len(np.shape(hgt))) - - ny,nx = np.shape(hgt) - - def tri_filt(x): - middle_ind = int(len(x)/2) - return((sum((x - x[middle_ind])**2.0))**0.5) - - if footprint is None: - tri = generic_filter(hgt,tri_filt, size = (window,window)) - else: - tri = generic_filter(hgt,tri_filt, footprint=footprint) - - return tri - - -def calcVRM(hgt,res,window=None,footprint=None,fill_depressions=True,return_slope_aspect=False): - ''' - Vector Ruggedness Measure - Sappington, J. M., Longshore, K. M., & Thompson, D. B. (2007). - Quantifying landscape ruggedness for animal habitat analysis: - a case study using bighorn sheep in the Mojave Desert. The - Journal of wildlife management, 71(5), 1419-1426. - - hgt : array - Array of heights over which TRI will be calculated - res : int or float - Resolution of the underlying hgt array. Should be constant in x and y - Needed for proper slope calculation - window : int - Length of window in x and y direction. Must be odd. - ''' - import richdem as rd - from scipy.ndimage.filters import generic_filter - - # Window setup: - if footprint is not None: - assert window is None, 'Must specify either window or footprint' - window = np.shape(footprint)[0] - - assert (window/2.0) - np.floor(window/2.0) != 0.0, 'window must be odd...' - Hwndw = int(np.floor(window/2)) - - # Type and dimension check: - if isinstance(hgt,(xr.Dataset,xr.DataArray,xr.Variable)): - hgt = hgt.data - assert len(np.shape(hgt)) == 2, 'hgt must be 2-dimensional. Currently has {} dimensions'.format(len(np.shape(hgt))) - ny,nx = np.shape(hgt) - - # Determine scale based on resolution of hgt array - zscale = 1/res - - # Get slope and aspect: - hgt_rd = rd.rdarray(hgt, no_data=-9999) - if fill_depressions: - rd.FillDepressions(hgt_rd, in_place=True) - slope = rd.TerrainAttribute(hgt_rd, attrib='slope_degrees',zscale=zscale) - aspect = rd.TerrainAttribute(hgt_rd, attrib='aspect') - - # Calculate vectors: - vrm = np.zeros((ny,nx)) - rugz = np.cos(np.deg2rad(slope)) - rugdxy = np.sin(np.deg2rad(slope)) - rugx = rugdxy*np.cos(np.deg2rad(aspect)) - rugy = rugdxy*np.sin(np.deg2rad(aspect)) - - def vrm_filt(x): - return(sum(x)**2) - - if footprint is None: - vrmX = generic_filter(rugx,vrm_filt, size = (window,window)) - vrmY = generic_filter(rugy,vrm_filt, size = (window,window)) - vrmZ = generic_filter(rugz,vrm_filt, size = (window,window)) - else: - vrmX = generic_filter(rugx,vrm_filt, footprint=footprint) - vrmY = generic_filter(rugy,vrm_filt, footprint=footprint) - vrmZ = generic_filter(rugz,vrm_filt, footprint=footprint) - - - if footprint is not None: - num_points = len(footprint[footprint != 0.0]) - else: - num_points = float(window**2) - vrm = 1.0 - np.sqrt(vrmX + vrmY + vrmZ)/num_points - if return_slope_aspect: - return vrm,slope,aspect - else: - return vrm - From 5dcd9bf4a087242e04c8d3c47a4ae182c6c9d963 Mon Sep 17 00:00:00 2001 From: Regis Thedin Date: Fri, 23 Jul 2021 10:09:22 -0600 Subject: [PATCH 26/33] Add xarray package for terrain functions --- mmctools/coupling/terrain.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmctools/coupling/terrain.py b/mmctools/coupling/terrain.py index c3f9a65..899752c 100644 --- a/mmctools/coupling/terrain.py +++ b/mmctools/coupling/terrain.py @@ -520,6 +520,7 @@ def calcTRI(hgt,window=None,footprint=None): window : int Length of window in x and y direction. Must be odd. ''' + import xarray as xr from scipy.ndimage.filters import generic_filter # Window setup: @@ -566,6 +567,7 @@ def calcVRM(hgt,res,window=None,footprint=None,fill_depressions=True,return_slop Length of window in x and y direction. Must be odd. ''' import richdem as rd + import xarray as xr from scipy.ndimage.filters import generic_filter # Window setup: From 04e1c284737e2af003ed036524905aca82a9ee1e Mon Sep 17 00:00:00 2001 From: Regis Thedin Date: Wed, 4 Aug 2021 18:39:43 -0600 Subject: [PATCH 27/33] Add calculation of topographic shelter Sx --- mmctools/coupling/terrain.py | 80 ++++++++++++++++++++++++++++++++++++ 1 file changed, 80 insertions(+) diff --git a/mmctools/coupling/terrain.py b/mmctools/coupling/terrain.py index 899752c..ec17a2c 100644 --- a/mmctools/coupling/terrain.py +++ b/mmctools/coupling/terrain.py @@ -624,3 +624,83 @@ def vrm_filt(x): else: return vrm + +def calcSx (xx, yy, zagl, A, dmax, method='nearest', propagateNaN=False, verbose=False): + ''' + Sx is a measure of topographic shelter or exposure relative to a particular + wind direction. Calculates a whole map for all points (xi, yi) in the domain. + For each (xi, yi) pair, it uses all v points (xv, yv) upwind of (xi, yi) in + the A wind direction, up to dmax. + + Winstral, A., Marks D. "Simulating wind fields and snow redistribution using + terrain-based parameters to model snow accumulation and melt over a semi- + arid mountain catchment" Hydrol. Process. 16, 3585–3603 (2002) + + Usage + ===== + xx, yy : array + meshgrid arrays of the region extent coordinates. + zagl: array + Elevation map of the region + A: float + Wind direction (deg, wind direction convention) + dmax: float + Upwind extent of the search + method: string + griddata interpolation method. Options are 'nearest', 'linear', 'cubic'. + Function is slow if not `nearest`. + propagateNaN: bool + If method != nearest, upwind posititions that lie outside the domain bounds receive NaN + ''' + + from scipy import interpolate + + # create empty output Sx array + Sx = np.empty(np.shape(zagl)); Sx[:,:] = np.nan + + # get resolution (assumes uniform resolution) + res = xx[1,0] - xx[0,0] + npoints = 1+int(dmax/res) + if dmax < res: + raise ValueError('dmax needs to be larger or equal to the resolution of the grid') + + # change angle notation + ang = np.deg2rad(270-A) + + # array for interpolation using griddata + points = np.array( (xx.flatten(), yy.flatten()) ).T + values = zagl.flatten() + + for i, xi in enumerate(xx[:,0]): + print(f'Processing row {i+1}/{len(xx)} ', end='\r') + for j, yi in enumerate(yy[0,:]): + + # limits of the line where Sx will be calculated on (minus bc it's upwind) + xf = xi - dmax*np.cos(ang) + yf = yi - dmax*np.sin(ang) + xline = np.around(np.linspace(xi, xf, num=npoints), decimals=4) + yline = np.around(np.linspace(yi, yf, num=npoints), decimals=4) + + # interpolate points upstream (xi, yi) along angle ang + elev = interpolate.griddata( points, values, (xline,yline), method=method ) + + # elevation of (xi, yi), for convenience + elevi = elev[0] + + if propagateNaN: + Sx[i,j] = np.amax(np.rad2deg( np.arctan( (elev[1:] - elevi)/(((xline[1:]-xi)**2 + (yline[1:]-yi)**2)**0.5) ) )) + else: + Sx[i,j] = np.nanmax(np.rad2deg( np.arctan( (elev[1:] - elevi)/(((xline[1:]-xi)**2 + (yline[1:]-yi)**2)**0.5) ) )) + + if verbose: print(f'Max angle is {Sx:.4f} degrees') + + return Sx + +def calcSxmean (xx, yy, zagl, A, dmax, method='nearest', verbose=False): + + Asweep = np.linspace(A-15, A+15, 7)%360 + Sxmean = np.mean([calcSx(xx, yy, zagl, a, dmax, method, verbose=verbose) for a in Asweep ], axis=0) + + return Sxmean + + From 87d4a8f7b014f6a0f6e055115358cb42780f9b3f Mon Sep 17 00:00:00 2001 From: Regis Thedin Date: Thu, 5 Aug 2021 08:34:32 -0600 Subject: [PATCH 28/33] Add Sb calculation --- mmctools/coupling/terrain.py | 47 ++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/mmctools/coupling/terrain.py b/mmctools/coupling/terrain.py index ec17a2c..294083d 100644 --- a/mmctools/coupling/terrain.py +++ b/mmctools/coupling/terrain.py @@ -704,3 +704,50 @@ def calcSxmean (xx, yy, zagl, A, dmax, method='nearest', verbose=False): return Sxmean +def calcSb (xx, yy, zagl, A, sepdist=60): + ''' + Sb is a measure of upwind slope break and can be used to delineate zones of + possible flow separation. This function follows the definition of Sx0 from + the reference listed below and uses 1000m separation. + + Winstral, A., Marks D. "Simulating wind fields and snow redistribution using + terrain-based parameters to model snow accumulation and melt over a semi- + arid mountain catchment" Hydrol. Process. 16, 3585–3603 (2002) + + Usage + ===== + xx, yy : array + meshgrid arrays of the region extent coordinates. + zagl: array + Elevation map of the region + A: float + Wind direction (deg, wind direction convention) + sepdist : float, default 60 + Separation between between two regional Sx calculations. + Suggested value: 60 m. + ''' + + from scipy import interpolate + + # local Sx + Sx1 = calcSx(xx, yy, zagl, A, dmax=sepdist) + + # outlying Sx. Computing it at (xo, yo), and not at (xi, yi) + xxo = xx - sepdist*np.cos(np.deg2rad(270-A)) + yyo = yy - sepdist*np.sin(np.deg2rad(270-A)) + points = np.array( (xx.flatten(), yy.flatten()) ).T + values = zagl.flatten() + zaglo = interpolate.griddata( points, values, (xxo,yyo), method='linear' ) + Sx0 = calcSx(xxo, yyo, zaglo, A, dmax=1000) + + Sb = Sx1 - Sx0 + + return Sb + +def calcSbmean (xx, yy, zagl, A, sepdist): + + Asweep = np.linspace(A-15, A+15, 7)%360 + Sbmean = np.mean([calcSb(xx, yy, zagl, a, sepdist) for a in Asweep ], axis=0) + + return Sbmean + From fedeb3cb61d2c4e181467ec6e996b43e648f1faf Mon Sep 17 00:00:00 2001 From: Regis Thedin Date: Thu, 5 Aug 2021 08:35:55 -0600 Subject: [PATCH 29/33] Add topographic position index calculation --- mmctools/coupling/terrain.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/mmctools/coupling/terrain.py b/mmctools/coupling/terrain.py index 294083d..4327075 100644 --- a/mmctools/coupling/terrain.py +++ b/mmctools/coupling/terrain.py @@ -751,3 +751,27 @@ def calcSbmean (xx, yy, zagl, A, sepdist): return Sbmean + +def calcTPI (xx, yy, zagl, r): + ''' + Topographic Position Index + + Reu, J, et al. Application of the topographic position index to heterogeneous + landscapes. Geomorphology, 186, 39-49 (2013) + ''' + from scipy.signal import convolve2d + + # get resolution (assumes uniform resolution) + res = xx[1,0] - xx[0,0] + rpoints = int(r/res) + if r < res: + raise ValueError('Averaging radium needs to be larger the resolution of the grid') + + y,x = np.ogrid[-rpoints:rpoints+1, -rpoints:rpoints+1] + kernel = x**2+y**2 <= rpoints**2 + + zaglmean = convolve2d(zagl, kernel*1, mode='same', boundary='fill', fillvalue=0) + zaglmean = zaglmean/np.sum(kernel*1) + + return zagl - zaglmean + From fce5deac3ead47d6a85de7cc08437899d8210881 Mon Sep 17 00:00:00 2001 From: ewquon Date: Mon, 9 Aug 2021 12:35:01 -0600 Subject: [PATCH 30/33] Minor cleanup --- mmctools/coupling/terrain.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mmctools/coupling/terrain.py b/mmctools/coupling/terrain.py index 4327075..f2aebfa 100644 --- a/mmctools/coupling/terrain.py +++ b/mmctools/coupling/terrain.py @@ -625,7 +625,7 @@ def vrm_filt(x): return vrm -def calcSx (xx, yy, zagl, A, dmax, method='nearest', propagateNaN=False, verbose=False): +def calcSx(xx, yy, zagl, A, dmax, method='nearest', propagateNaN=False, verbose=False): ''' Sx is a measure of topographic shelter or exposure relative to a particular wind direction. Calculates a whole map for all points (xi, yi) in the domain. @@ -650,7 +650,7 @@ def calcSx (xx, yy, zagl, A, dmax, method='nearest', propagateNaN=False, verbos griddata interpolation method. Options are 'nearest', 'linear', 'cubic'. Function is slow if not `nearest`. propagateNaN: bool - If method != nearest, upwind posititions that lie outside the domain bounds receive NaN + If method != nearest, upwind positions that lie outside the domain bounds receive NaN ''' from scipy import interpolate @@ -696,7 +696,7 @@ def calcSx (xx, yy, zagl, A, dmax, method='nearest', propagateNaN=False, verbos return Sx -def calcSxmean (xx, yy, zagl, A, dmax, method='nearest', verbose=False): +def calcSxmean(xx, yy, zagl, A, dmax, method='nearest', verbose=False): Asweep = np.linspace(A-15, A+15, 7)%360 Sxmean = np.mean([calcSx(xx, yy, zagl, a, dmax, method, verbose=verbose) for a in Asweep ], axis=0) @@ -704,7 +704,7 @@ def calcSxmean (xx, yy, zagl, A, dmax, method='nearest', verbose=False): return Sxmean -def calcSb (xx, yy, zagl, A, sepdist=60): +def calcSb(xx, yy, zagl, A, sepdist=60): ''' Sb is a measure of upwind slope break and can be used to delineate zones of possible flow separation. This function follows the definition of Sx0 from @@ -744,7 +744,7 @@ def calcSb (xx, yy, zagl, A, sepdist=60): return Sb -def calcSbmean (xx, yy, zagl, A, sepdist): +def calcSbmean(xx, yy, zagl, A, sepdist): Asweep = np.linspace(A-15, A+15, 7)%360 Sbmean = np.mean([calcSb(xx, yy, zagl, a, sepdist) for a in Asweep ], axis=0) @@ -752,7 +752,7 @@ def calcSbmean (xx, yy, zagl, A, sepdist): return Sbmean -def calcTPI (xx, yy, zagl, r): +def calcTPI(xx, yy, zagl, r): ''' Topographic Position Index From 79b57dd31492a024f1a7de1e71736e3eb2551991 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Wed, 8 Sep 2021 01:14:25 -0600 Subject: [PATCH 31/33] Fix height coordinate from interp_to_latlon --- mmctools/wrf/ts.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mmctools/wrf/ts.py b/mmctools/wrf/ts.py index 4e1ce14..9c37ff1 100644 --- a/mmctools/wrf/ts.py +++ b/mmctools/wrf/ts.py @@ -241,6 +241,12 @@ def interp_to_latlon(self,latlon): finterp = coef00*f00 + coef10*f10 + coef01*f01 + coef11*f11 finterp = finterp / ((wrflon[i+1,j] - wrflon[i,j]) * (wrflat[i,j+1] - wrflat[i,j])) finterp = finterp.assign_coords({'lon':tgtlon,'lat':tgtlat}) + # fix height coordinate if needed + if 'height' not in finterp.coords: + if 'z' in finterp.coords: + finterp = finterp.rename_vars({'z':'height'}) + else: + print('WARNING: no z or height coordinate found') return finterp From eebfc7e79af296c010aa5a1a7f253a7ecb5980c4 Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Wed, 8 Sep 2021 00:48:51 -0600 Subject: [PATCH 32/33] Add kwargs for xr.open_dataset when called from extract_column_from_wrfdata --- mmctools/wrf/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmctools/wrf/utils.py b/mmctools/wrf/utils.py index 9bc69cc..c1cd531 100644 --- a/mmctools/wrf/utils.py +++ b/mmctools/wrf/utils.py @@ -750,7 +750,7 @@ def extract_column_from_wrfdata(fpath, coords, spatial_filter='interpolate',L_filter=0.0, additional_fields=[], verbose=False, - ): + **kwargs): """ Extract a column of time-height data for a specific site from a 4-dimensional WRF output file @@ -786,7 +786,7 @@ def extract_column_from_wrfdata(fpath, coords, 'Spatial filtering type "'+spatial_filter+'" not recognised' # Load WRF data - ds = xr.open_dataset(fpath) + ds = xr.open_dataset(fpath,**kwargs) tdim, zdim, ydim, xdim = get_wrf_dims(ds) From d6191c35e9ab77345934f57f14611fe6c811ce8b Mon Sep 17 00:00:00 2001 From: Eliot Quon Date: Wed, 8 Sep 2021 01:44:29 -0600 Subject: [PATCH 33/33] Handle boundary planes with and without const (dim=1) coordinate E.g., map_to_boundary(i=...) used to produce a dataslice with dimension coordinate 'x' and dim['x']==1. Now, we also handle dataslices with a preselected x, which results in a dimensionless coordinate 'x'. --- mmctools/coupling/sowfa.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/mmctools/coupling/sowfa.py b/mmctools/coupling/sowfa.py index 259a0be..a66509d 100755 --- a/mmctools/coupling/sowfa.py +++ b/mmctools/coupling/sowfa.py @@ -390,8 +390,13 @@ def _check_xarray_dataset(self, # Only handle a single boundary plane at a time; boundaries # should be aligned with the Cartesian axes constdims = [dim for dim in self.ds.dims if self.ds.dims[dim]==1] - assert (len(constdims) == 1), 'more than one constant dim' - constdim = constdims[0] + if len(constdims) > 0: + assert (len(constdims) == 1), 'more than one constant dim' + constdim = constdims[0] + else: + nodimcoords = [coord for coord in self.ds.coords if len(self.ds.coords[coord].dims)==0] + assert (len(nodimcoords) == 1), 'more than one selected dim' + constdim = nodimcoords[0] print('Input is a {:s}-boundary at {:g}'.format(constdim, float(self.ds.coords[constdim]))) self.constdim = constdim @@ -484,7 +489,11 @@ def _write_points(self,fname='points',binary=False,gzip=False): print('Wrote',N,'points to',fpath) def _write_boundary_vector(self,fname,components,binary=False,gzip=False): - ds = self.ds.isel({self.constdim:0}) + if self.constdim in self.ds.dims: + assert self.ds.dims[self.constdim] == 1 + ds = self.ds.isel({self.constdim:0}) + else: + ds = self.ds # add missing dimensions, if any for dim in self.bndry_dims: for var in components: @@ -531,7 +540,11 @@ def _write_boundary_vector(self,fname,components,binary=False,gzip=False): print('Wrote',N,'vectors to',fpath,'at',str(tstamp)) def _write_boundary_scalar(self,fname,var,binary=False,gzip=False): - ds = self.ds.isel({self.constdim:0}) + if self.constdim in self.ds.dims: + assert self.ds.dims[self.constdim] == 1 + ds = self.ds.isel({self.constdim:0}) + else: + ds = self.ds # add missing dimensions, if any for dim in self.bndry_dims: if dim not in ds[var].dims: