Skip to content

Commit

Permalink
Updates to example and processing (remove print etc.)
Browse files Browse the repository at this point in the history
  • Loading branch information
Cecile Marie Margaretha Kittel committed Oct 28, 2020
1 parent 38e7c07 commit c9b0f34
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 10 deletions.
16 changes: 10 additions & 6 deletions lib/examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from s3_catch import s3_evaluate
import geopandas as gpd
import os
from datetime import datetime, timedelta

if __name__ == '__main__':

Expand All @@ -22,7 +23,7 @@

# First preliminary step: Exctract water mask subsets
# Define your raster for the entire AOI
full_raster = r'C:\users\ceki\Downloads\occurrence_zambezi.tif'
full_raster = r'D:\OneDrive\Sentinel_3_GPOD\occurrence_zambezi.tif'
# where to save the subsets
dest_folder = r'C:\test\Water_Mask'
if not os.path.exists(dest_folder):
Expand All @@ -48,7 +49,7 @@
extent = [19, -9, 36, -20] #upper left x, upper left y, lower right x, lower right y

# Subset netcdf files:
s3_subset_ncdf.subset_scihub_netcdf(download_dir, dest_dir, extent, file_id=r'enhanced_measurement.nc')
# s3_subset_ncdf.subset_scihub_netcdf(download_dir, dest_dir, extent, file_id=r'enhanced_measurement.nc')


# First processing step:
Expand All @@ -60,7 +61,7 @@
# s3b_folder_g3 = r'..\..\test\GPOD_subset\s3b_3bin'

# SciHub folders:
s3a_folder_s = r'C:\test\SciHub_subset\s3a'
s3a_folder_s = r'C:\Users\ceki\Desktop\New folder'

# GPOD folders with Level-1b data.
# s3a_folder_stack = r'..\..\..\test\GPOD_subset\s3a_stacks'
Expand All @@ -76,21 +77,24 @@

wm_folder_S3A = r'..\..\test\New_VS_Nature'
wm_folder_S3B = r'..\..\test\New_VS_Nature'

s3_folder_S3A = r'D:\OneDrive - Danmarks Tekniske Universitet\Sentinel_3_ESA\Zambezi\S3A\Enh'
s3_folder_S3B = r'D:\OneDrive - Danmarks Tekniske Universitet\Sentinel_3_ESA\Zambezi\S3B\Enh'


# Second step: Process netcdf files - extracts all data for all VS
# Returns two dictionaries:
# VS - contains the virtual stations in dictionary form
# outliers - contains information about the removed points for each VS
vs_s3a_s, outliers_s3a_s = s3_preprocessing.read_s3_nc(s3a_folder_s, vs_coords=vs_s3a,
vs_s3b_s, outliers_s3b_s = s3_preprocessing.read_s3_nc(s3_folder_S3B, vs_coords=vs_s3b,
wm_folder=dest_folder, source='SciHub',
dem_file=r'C:\test\merit_egm2008.tif',
sigma_thresh=30, dem_thresh=30, vs_buffer=0.015, rip_thresh=1e-13,
stack=False, stack_folder=None)

# Third step: Create time series - calculates along-track means to produce
# daily wSE observations
vs_s3a_s_d = s3_preprocessing.create_vs_ts(vs_s3a_s, subset_vs=False, sigma_thresh=30, source='SciHub')
vs_s3b_s_d = s3_preprocessing.create_vs_ts(vs_s3b_s, subset_vs=False, sigma_thresh=30, source='SciHub')


# Fourth step: Write text files with the observations at each VS
Expand All @@ -107,7 +111,7 @@
# mostdata: at least 80% of expected observations
# windext: improvement between 2x and 3x extension on GPOD
# postoltc: improvement after S3A OLTC update
mostdata, windext, postoltc = s3_evaluate.sort_l2(vs_s3a_s, outliers_s3a_s, vsd, oltc=False, oltc_date=datetime(2019,3,1).date(),
mostdata, windext, postoltc = s3_evaluate.sort_l2(vs_s3a_s, outliers_s3a_s, vs_s3a_s_d, oltc=False, oltc_date=datetime(2019,3,1).date(),
vs3=None, outliers3=None, vsd3=None)
# Level-1b evaluation.
# also divided through total (s3-valid), 3x extension (s3_valid_3)
Expand Down
8 changes: 5 additions & 3 deletions lib/s3_catch/s3_preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def read_s3_nc(s3_folder, vs_coords, wm_folder, source,
start = datetime.now()
for f in os.listdir(s3_folder):
s3_file = os.path.join(s3_folder, f)
if s3_file.endswith('.nc'):
if s3_file.endswith('NT_003.nc') or s3_file.endswith('NT_004.nc') and f.split('_')[8][:6] < '202002':
nc = netCDF4.Dataset(s3_file)
for p in zip(vs_coords['xcoord'], vs_coords['ycoord']):
(x, y) = p
Expand All @@ -117,7 +117,8 @@ def read_s3_nc(s3_folder, vs_coords, wm_folder, source,
geoid = np.interp(nc.variables[lat][:].filled(), lat_01, geoid_01)
if lat_01[0] > lat_01[1]:
geoid = np.interp(nc.variables[lat][:].filled(),
np.flip(lat_01), np.flip(geoid_01)) #np.interp sorts to ascending values - if descending, this will have to be corrected.
np.flip(lat_01), np.flip(geoid_01))
#np.interp sorts to ascending values - if descending, this will have to be corrected.
# Get retracked WSE
height = (nc[elev][:].filled() - geoid)[selected]
src_ds=gdal.Open(dem_file)
Expand Down Expand Up @@ -206,7 +207,8 @@ def read_s3_nc(s3_folder, vs_coords, wm_folder, source,
filetracker[p][root] = np.concatenate([filetracker[p][root],
nc.variables['Meas_Index_20Hz'][selected][dem_filter][~np.isnan(mask)].filled()])
if source == 'SciHub':
vs[p]['sat_path'] = np.concatenate([vs[p]['sat_path'], np.repeat('descending' if nc.getncattr('first_meas_lat')-nc.getncattr('last_meas_lat')<0 else 'ascending',
vs[p]['sat_path'] = np.concatenate([vs[p]['sat_path'], np.repeat('descending' if
nc.getncattr('first_meas_lat')-nc.getncattr('last_meas_lat') > 0 else 'ascending',
len(dem[dem_filter][~np.isnan(mask)]))])
vs[p]['pass'] = np.concatenate([vs[p]['pass'], np.repeat(nc.getncattr('pass_number'),
len(dem[dem_filter][~np.isnan(mask)]))])
Expand Down
1 change: 0 additions & 1 deletion lib/s3_catch/s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,6 @@ def outlier_filter(p, nc, wm_folder, selected, dem_filter, lat, lon, sigma0, sig
# Get water mask coordinates
wm_coords = [(float(fn.split('_')[1]), float(fn.split('_')[2].split('.tif')[0])) for fn in os.listdir(wm_folder)]
(x, y) = sorted([(dist(p1, p), p1) for p1 in wm_coords])[0][1]
print(x,y)
src_filename = glob.glob(os.path.join(wm_folder, '*' + '_' + str(np.round(x, 1)) + '_' + str(np.round(y, 1)) + '.tif'))[0]
src_ds=gdal.Open(src_filename)
rb=src_ds.GetRasterBand(1)
Expand Down

0 comments on commit c9b0f34

Please sign in to comment.