diff --git a/lib/examples.py b/lib/examples.py index e45225d..9c773ae 100644 --- a/lib/examples.py +++ b/lib/examples.py @@ -9,6 +9,7 @@ from s3_catch import s3_evaluate import geopandas as gpd import os +from datetime import datetime, timedelta if __name__ == '__main__': @@ -22,7 +23,7 @@ # First preliminary step: Exctract water mask subsets # Define your raster for the entire AOI - full_raster = r'C:\users\ceki\Downloads\occurrence_zambezi.tif' + full_raster = r'D:\OneDrive\Sentinel_3_GPOD\occurrence_zambezi.tif' # where to save the subsets dest_folder = r'C:\test\Water_Mask' if not os.path.exists(dest_folder): @@ -48,7 +49,7 @@ extent = [19, -9, 36, -20] #upper left x, upper left y, lower right x, lower right y # Subset netcdf files: - s3_subset_ncdf.subset_scihub_netcdf(download_dir, dest_dir, extent, file_id=r'enhanced_measurement.nc') + # s3_subset_ncdf.subset_scihub_netcdf(download_dir, dest_dir, extent, file_id=r'enhanced_measurement.nc') # First processing step: @@ -60,7 +61,7 @@ # s3b_folder_g3 = r'..\..\test\GPOD_subset\s3b_3bin' # SciHub folders: - s3a_folder_s = r'C:\test\SciHub_subset\s3a' + s3a_folder_s = r'C:\Users\ceki\Desktop\New folder' # GPOD folders with Level-1b data. # s3a_folder_stack = r'..\..\..\test\GPOD_subset\s3a_stacks' @@ -76,13 +77,16 @@ wm_folder_S3A = r'..\..\test\New_VS_Nature' wm_folder_S3B = r'..\..\test\New_VS_Nature' + + s3_folder_S3A = r'D:\OneDrive - Danmarks Tekniske Universitet\Sentinel_3_ESA\Zambezi\S3A\Enh' + s3_folder_S3B = r'D:\OneDrive - Danmarks Tekniske Universitet\Sentinel_3_ESA\Zambezi\S3B\Enh' # Second step: Process netcdf files - extracts all data for all VS # Returns two dictionaries: # VS - contains the virtual stations in dictionary form # outliers - contains information about the removed points for each VS - vs_s3a_s, outliers_s3a_s = s3_preprocessing.read_s3_nc(s3a_folder_s, vs_coords=vs_s3a, + vs_s3b_s, outliers_s3b_s = s3_preprocessing.read_s3_nc(s3_folder_S3B, vs_coords=vs_s3b, wm_folder=dest_folder, source='SciHub', dem_file=r'C:\test\merit_egm2008.tif', sigma_thresh=30, dem_thresh=30, vs_buffer=0.015, rip_thresh=1e-13, @@ -90,7 +94,7 @@ # Third step: Create time series - calculates along-track means to produce # daily wSE observations - vs_s3a_s_d = s3_preprocessing.create_vs_ts(vs_s3a_s, subset_vs=False, sigma_thresh=30, source='SciHub') + vs_s3b_s_d = s3_preprocessing.create_vs_ts(vs_s3b_s, subset_vs=False, sigma_thresh=30, source='SciHub') # Fourth step: Write text files with the observations at each VS @@ -107,7 +111,7 @@ # mostdata: at least 80% of expected observations # windext: improvement between 2x and 3x extension on GPOD # postoltc: improvement after S3A OLTC update - mostdata, windext, postoltc = s3_evaluate.sort_l2(vs_s3a_s, outliers_s3a_s, vsd, oltc=False, oltc_date=datetime(2019,3,1).date(), + mostdata, windext, postoltc = s3_evaluate.sort_l2(vs_s3a_s, outliers_s3a_s, vs_s3a_s_d, oltc=False, oltc_date=datetime(2019,3,1).date(), vs3=None, outliers3=None, vsd3=None) # Level-1b evaluation. # also divided through total (s3-valid), 3x extension (s3_valid_3) diff --git a/lib/s3_catch/s3_preprocessing.py b/lib/s3_catch/s3_preprocessing.py index 404aeca..a4cf418 100644 --- a/lib/s3_catch/s3_preprocessing.py +++ b/lib/s3_catch/s3_preprocessing.py @@ -92,7 +92,7 @@ def read_s3_nc(s3_folder, vs_coords, wm_folder, source, start = datetime.now() for f in os.listdir(s3_folder): s3_file = os.path.join(s3_folder, f) - if s3_file.endswith('.nc'): + if s3_file.endswith('NT_003.nc') or s3_file.endswith('NT_004.nc') and f.split('_')[8][:6] < '202002': nc = netCDF4.Dataset(s3_file) for p in zip(vs_coords['xcoord'], vs_coords['ycoord']): (x, y) = p @@ -117,7 +117,8 @@ def read_s3_nc(s3_folder, vs_coords, wm_folder, source, geoid = np.interp(nc.variables[lat][:].filled(), lat_01, geoid_01) if lat_01[0] > lat_01[1]: geoid = np.interp(nc.variables[lat][:].filled(), - np.flip(lat_01), np.flip(geoid_01)) #np.interp sorts to ascending values - if descending, this will have to be corrected. + np.flip(lat_01), np.flip(geoid_01)) + #np.interp sorts to ascending values - if descending, this will have to be corrected. # Get retracked WSE height = (nc[elev][:].filled() - geoid)[selected] src_ds=gdal.Open(dem_file) @@ -206,7 +207,8 @@ def read_s3_nc(s3_folder, vs_coords, wm_folder, source, filetracker[p][root] = np.concatenate([filetracker[p][root], nc.variables['Meas_Index_20Hz'][selected][dem_filter][~np.isnan(mask)].filled()]) if source == 'SciHub': - vs[p]['sat_path'] = np.concatenate([vs[p]['sat_path'], np.repeat('descending' if nc.getncattr('first_meas_lat')-nc.getncattr('last_meas_lat')<0 else 'ascending', + vs[p]['sat_path'] = np.concatenate([vs[p]['sat_path'], np.repeat('descending' if + nc.getncattr('first_meas_lat')-nc.getncattr('last_meas_lat') > 0 else 'ascending', len(dem[dem_filter][~np.isnan(mask)]))]) vs[p]['pass'] = np.concatenate([vs[p]['pass'], np.repeat(nc.getncattr('pass_number'), len(dem[dem_filter][~np.isnan(mask)]))]) diff --git a/lib/s3_catch/s3_utils.py b/lib/s3_catch/s3_utils.py index 4fa53f3..86adfce 100644 --- a/lib/s3_catch/s3_utils.py +++ b/lib/s3_catch/s3_utils.py @@ -121,7 +121,6 @@ def outlier_filter(p, nc, wm_folder, selected, dem_filter, lat, lon, sigma0, sig # Get water mask coordinates wm_coords = [(float(fn.split('_')[1]), float(fn.split('_')[2].split('.tif')[0])) for fn in os.listdir(wm_folder)] (x, y) = sorted([(dist(p1, p), p1) for p1 in wm_coords])[0][1] - print(x,y) src_filename = glob.glob(os.path.join(wm_folder, '*' + '_' + str(np.round(x, 1)) + '_' + str(np.round(y, 1)) + '.tif'))[0] src_ds=gdal.Open(src_filename) rb=src_ds.GetRasterBand(1)