From dc986bac9e9cb392b0f6aae722200c7b141467b0 Mon Sep 17 00:00:00 2001 From: Andrew Johnston Date: Tue, 30 Apr 2024 11:00:30 -0800 Subject: [PATCH 01/13] Update dependabot.yml to weekly --- .github/dependabot.yml | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f2494b1b..82990ed2 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,8 +1,13 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + version: 2 updates: - - package-ecosystem: "github-actions" - directory: "/" + - package-ecosystem: github-actions + directory: / schedule: - interval: "daily" + interval: weekly labels: - - "bumpless" + - bumpless From 9e07482b55edd03af984d7254279424d60bd008d Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 3 May 2024 16:35:51 -0800 Subject: [PATCH 02/13] Don't crop when there is no data --- src/hyp3_autorift/process.py | 29 ++++++++++++----------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index de807959..3f6a56a9 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -11,7 +11,7 @@ from datetime import datetime from pathlib import Path from secrets import token_hex -from typing import Callable, Optional, Tuple +from typing import Callable, Literal, Optional, Tuple import boto3 import botocore.exceptions @@ -370,12 +370,11 @@ def get_opendata_prefix(file: Path): region ]) - def process( reference: str, secondary: str, parameter_file: str = DEFAULT_PARAMETER_FILE, - naming_scheme: str = 'ITS_LIVE_OD', + naming_scheme: Literal['ITS_LIVE_OD', 'ITS_LIVE_PROD'] = 'ITS_LIVE_OD', esa_username: Optional[str] = None, esa_password: Optional[str] = None, ) -> Tuple[Path, Path]: @@ -540,21 +539,17 @@ def process( raise Exception('Processing failed! Output netCDF file not found') netcdf_file = Path(netcdf_file) - cropped_file = crop_netcdf_product(netcdf_file) - netcdf_file.unlink() - - if naming_scheme == 'ITS_LIVE_PROD': - product_file = netcdf_file - elif naming_scheme == 'ASF': - product_name = get_product_name( - reference, secondary, orbit_files=(reference_state_vec, secondary_state_vec), - pixel_spacing=parameter_info['xsize'], - ) - product_file = Path(f'{product_name}.nc') - else: + if naming_scheme == 'ITS_LIVE_OD': product_file = netcdf_file.with_stem(f'{netcdf_file.stem}_IL_ASF_OD') + else: + product_file = netcdf_file - shutil.move(cropped_file, str(product_file)) + if not netcdf_file.name.endswith('_P000.nc'): + cropped_file = crop_netcdf_product(netcdf_file) + netcdf_file.unlink() + shutil.move(cropped_file, str(product_file)) + else: + shutil.move(netcdf_file, str(product_file)) with Dataset(product_file) as nc: velocity = nc.variables['v'] @@ -580,7 +575,7 @@ def main(): parser.add_argument('--parameter-file', default=DEFAULT_PARAMETER_FILE, help='Shapefile for determining the correct search parameters by geographic location. ' 'Path to shapefile must be understood by GDAL') - parser.add_argument('--naming-scheme', default='ITS_LIVE_OD', choices=['ITS_LIVE_OD', 'ITS_LIVE_PROD', 'ASF'], + parser.add_argument('--naming-scheme', default='ITS_LIVE_OD', choices=['ITS_LIVE_OD', 'ITS_LIVE_PROD'], help='Naming scheme to use for product files') parser.add_argument('granules', type=str.split, nargs='+', help='Granule pair to process') From 7e7a0004fd17a6474c26601faf2a9f3fdce896cd Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 3 May 2024 16:40:20 -0800 Subject: [PATCH 03/13] Drop product name function and associated tests --- src/hyp3_autorift/process.py | 26 ----------------- tests/test_process.py | 55 ------------------------------------ 2 files changed, 81 deletions(-) diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index 3f6a56a9..d35bb43d 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -10,7 +10,6 @@ import xml.etree.ElementTree as ET from datetime import datetime from pathlib import Path -from secrets import token_hex from typing import Callable, Literal, Optional, Tuple import boto3 @@ -198,31 +197,6 @@ def get_datetime(scene_name): raise ValueError(f'Unsupported scene format: {scene_name}') -def get_product_name(reference_name, secondary_name, orbit_files=None, pixel_spacing=240): - mission = reference_name[0:2] - plat1 = reference_name.split('_')[0][-1] - plat2 = secondary_name.split('_')[0][-1] - - ref_datetime = get_datetime(reference_name) - sec_datetime = get_datetime(secondary_name) - days = abs((ref_datetime - sec_datetime).days) - - datetime1 = ref_datetime.strftime('%Y%m%dT%H%M%S') - datetime2 = sec_datetime.strftime('%Y%m%dT%H%M%S') - - if reference_name.startswith('S1'): - polarization1 = reference_name[15:16] - polarization2 = secondary_name[15:16] - orbit = least_precise_orbit_of(orbit_files) - misc = polarization1 + polarization2 + orbit - else: - misc = 'B08' - - product_id = token_hex(2).upper() - - return f'{mission}{plat1}{plat2}_{datetime1}_{datetime2}_{misc}{days:03}_VEL{pixel_spacing}_A_{product_id}' - - def get_platform(scene: str) -> str: if scene.startswith('S1') or scene.startswith('S2'): return scene[0:2] diff --git a/tests/test_process.py b/tests/test_process.py index d9040e33..58e02a08 100644 --- a/tests/test_process.py +++ b/tests/test_process.py @@ -1,7 +1,6 @@ import io from datetime import datetime from pathlib import Path -from re import match from unittest import mock from unittest.mock import MagicMock, patch @@ -287,60 +286,6 @@ def test_get_datetime(): process.get_datetime('S3_adsflafjladsf') -def test_get_product_name(): - payload = { - 'reference_name': 'S1A_IW_SLC__1SSV_20160527T014319_20160527T014346_011438_011694_26B0', - 'secondary_name': 'S1A_IW_SLC__1SSV_20160714T014322_20160714T014349_012138_012CE7_96A0', - 'orbit_files': [ - 'S1A_OPER_AUX_POEORB_OPOD_20160616T121500_V20160526T225943_20160528T005943.EOF', - 'S1A_OPER_AUX_POEORB_OPOD_20160616T121500_V20160526T225943_20160528T005943.EOF', - ], - 'pixel_spacing': 240, - } - name = process.get_product_name(**payload) - assert match(r'S1AA_20160527T014319_20160714T014322_VVP049_VEL240_A_[0-9A-F]{4}$', name) - - payload = { - 'reference_name': 'S1B_IW_SLC__1SDH_20200918T073646_20200918T073716_023426_02C7FC_6374', - 'secondary_name': 'S1A_IW_SLC__1SDH_20200906T073646_20200906T073716_023251_02C278_AE75', - 'orbit_files': [ - 'S1B_OPER_AUX_RESORB_OPOD_20200907T115242_V20200906T042511_20200906T074241.EOF', - 'S1A_OPER_AUX_POEORB_OPOD_20160616T121500_V20160526T225943_20160528T005943.EOF', - ], - 'pixel_spacing': 40 - } - name = process.get_product_name(**payload) - assert match(r'S1BA_20200918T073646_20200906T073646_HHR012_VEL40_A_[0-9A-F]{4}$', name) - - payload = { - 'reference_name': 'S1A_IW_SLC__1SSV_20150101T230038_20150101T230114_003984_004CC1_0481', - 'secondary_name': 'S1B_IW_SLC__1SDV_20200924T005722_20200924T005750_023510_02CA91_4873', - 'orbit_files': [ - 'S1B_OPER_AUX_RESORB_OPOD_20200907T115242_V20200906T042511_20200906T074241.EOF', - None, - ], - 'pixel_spacing': 40 - } - name = process.get_product_name(**payload) - assert match(r'S1AB_20150101T230038_20200924T005722_VVO2093_VEL40_A_[0-9A-F]{4}$', name) - - payload = { - 'reference_name': 'S2B_MSIL2A_20200903T151809_N0214_R068_T22WEB_20200903T194353', - 'secondary_name': 'S2B_MSIL2A_20200913T151809_N0214_R068_T22WEB_20200913T180530', - 'pixel_spacing': 40, - } - name = process.get_product_name(**payload) - assert match(r'S2BB_20200903T151809_20200913T151809_B08010_VEL40_A_[0-9A-F]{4}$', name) - - payload = { - 'reference_name': 'LC08_L1TP_009011_20200703_20200913_02_T1', - 'secondary_name': 'LC08_L1TP_009011_20200820_20200905_02_T1', - 'pixel_spacing': 40, - } - name = process.get_product_name(**payload) - assert match(r'LC88_20200703T000000_20200820T000000_B08048_VEL40_A_[0-9A-F]{4}$', name) - - def test_get_s1_primary_polarization(): assert process.get_s1_primary_polarization( 'S1B_WV_SLC__1SSV_20200923T184541_20200923T185150_023506_02CA71_AABB') == 'vv' From 65af7e0fdb01ae1b273a49ffe5e5c33bf232f259 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 3 May 2024 16:45:38 -0800 Subject: [PATCH 04/13] changelog --- CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c8f67e6a..2e66c5f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.15.1] +### Fixed +* `hyp3_autorift` will no longer attempt to crop files with no valid data + +### Removed +* The unused `ASF` naming scheme has been removed from the `hyp3_autorift` CLI and the `hyp3_autorift.process` function + ## [0.15.0] ### Added * `--publish-bucket` option has been added to the HyP3 entry point to additionally publish products an AWS bucket, such as the ITS_LIVE AWS Open Data bucket, `s3://its-live-data`. From afacc3131bab606877d43c0f1ce8b3ddf3a7b041 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 3 May 2024 16:47:03 -0800 Subject: [PATCH 05/13] fix whitespace --- src/hyp3_autorift/process.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index d35bb43d..619cc0f9 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -344,6 +344,7 @@ def get_opendata_prefix(file: Path): region ]) + def process( reference: str, secondary: str, From 3ebe930a78eac977beeb3e7d88f7746630e3683b Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 3 May 2024 17:01:32 -0800 Subject: [PATCH 06/13] Rename hyp3_auorift.io to prevent shadowing builtin io module --- src/hyp3_autorift/io.py | 215 -------------------- src/hyp3_autorift/process.py | 25 ++- src/hyp3_autorift/s1_correction.py | 6 +- src/hyp3_autorift/utils.py | 211 ++++++++++++++++++- src/hyp3_autorift/vend/CHANGES.diff | 4 +- src/hyp3_autorift/vend/README.md | 2 +- src/hyp3_autorift/vend/testautoRIFT.py | 2 +- src/hyp3_autorift/vend/testautoRIFT_ISCE.py | 2 +- tests/test_io.py | 105 ---------- tests/test_utils.py | 121 ++++++++++- 10 files changed, 342 insertions(+), 351 deletions(-) delete mode 100644 src/hyp3_autorift/io.py delete mode 100644 tests/test_io.py diff --git a/src/hyp3_autorift/io.py b/src/hyp3_autorift/io.py deleted file mode 100644 index 985aae24..00000000 --- a/src/hyp3_autorift/io.py +++ /dev/null @@ -1,215 +0,0 @@ -"""Helper io utilities for autoRIFT""" - -import logging -import sys -import textwrap -from pathlib import Path -from typing import Tuple, Union - -from hyp3lib import DemError -from isce.applications.topsApp import TopsInSAR -from osgeo import gdal -from osgeo import ogr -from osgeo import osr - -from hyp3_autorift.geometry import fix_point_for_antimeridian, flip_point_coordinates - -log = logging.getLogger(__name__) - - -def find_jpl_parameter_info(polygon: ogr.Geometry, parameter_file: str) -> dict: - driver = ogr.GetDriverByName('ESRI Shapefile') - shapes = driver.Open(parameter_file, gdal.GA_ReadOnly) - - parameter_info = None - centroid = flip_point_coordinates(polygon.Centroid()) - centroid = fix_point_for_antimeridian(centroid) - for feature in shapes.GetLayer(0): - if feature.geometry().Contains(centroid): - parameter_info = { - 'name': f'{feature["name"]}', - 'epsg': feature['epsg'], - 'geogrid': { - 'dem': f"/vsicurl/{feature['h']}", - 'ssm': f"/vsicurl/{feature['StableSurfa']}", - 'dhdx': f"/vsicurl/{feature['dhdx']}", - 'dhdy': f"/vsicurl/{feature['dhdy']}", - 'vx': f"/vsicurl/{feature['vx0']}", - 'vy': f"/vsicurl/{feature['vy0']}", - 'srx': f"/vsicurl/{feature['vxSearchRan']}", - 'sry': f"/vsicurl/{feature['vySearchRan']}", - 'csminx': f"/vsicurl/{feature['xMinChipSiz']}", - 'csminy': f"/vsicurl/{feature['yMinChipSiz']}", - 'csmaxx': f"/vsicurl/{feature['xMaxChipSiz']}", - 'csmaxy': f"/vsicurl/{feature['yMaxChipSiz']}", - 'sp': f"/vsicurl/{feature['sp']}", - 'dhdxs': f"/vsicurl/{feature['dhdxs']}", - 'dhdys': f"/vsicurl/{feature['dhdys']}", - }, - 'autorift': { - 'grid_location': 'window_location.tif', - 'init_offset': 'window_offset.tif', - 'search_range': 'window_search_range.tif', - 'chip_size_min': 'window_chip_size_min.tif', - 'chip_size_max': 'window_chip_size_max.tif', - 'offset2vx': 'window_rdr_off2vel_x_vec.tif', - 'offset2vy': 'window_rdr_off2vel_y_vec.tif', - 'stable_surface_mask': 'window_stable_surface_mask.tif', - 'scale_factor': 'window_scale_factor.tif', - 'mpflag': 0, - } - } - break - - if parameter_info is None: - raise DemError('Could not determine appropriate DEM for:\n' - f' centroid: {centroid}' - f' using: {parameter_file}') - - dem_geotransform = gdal.Info(parameter_info['geogrid']['dem'], format='json')['geoTransform'] - parameter_info['xsize'] = abs(dem_geotransform[1]) - parameter_info['ysize'] = abs(dem_geotransform[5]) - - return parameter_info - - -def format_tops_xml(reference, secondary, polarization, dem, orbits, xml_file='topsApp.xml'): - xml_template = f""" - - - - {orbits} - {orbits} - reference - ['{reference}.zip'] - {polarization} - - - {orbits} - {orbits} - secondary - ['{secondary}.zip'] - {polarization} - - {dem} - False - True - False - False - False - 32 - 32 - 51 - 51 - 32 - 32 - - - """ - - with open(xml_file, 'w') as f: - f.write(textwrap.dedent(xml_template)) - - -class SysArgvManager: - """Context manager to clear and reset sys.argv - - A bug in the ISCE2 Application class causes sys.argv to always be parsed when - no options are proved, even when setting `cmdline=[]`, preventing programmatic use. - """ - def __init__(self): - self.argv = sys.argv.copy() - - def __enter__(self): - sys.argv = sys.argv[:1] - - def __exit__(self, exc_type, exc_val, exc_tb): - sys.argv = self.argv - - -def get_topsinsar_config(): - with SysArgvManager(): - insar = TopsInSAR(name="topsApp") - insar.configure() - - config_data = {} - for name in ['reference', 'secondary']: - scene = insar.__getattribute__(name) - - sensing_times = [] - for swath in range(1, 4): - scene.configure() - scene.swathNumber = swath - scene.parse() - sensing_times.append( - (scene.product.sensingStart, scene.product.sensingStop) - ) - - sensing_start = min([sensing_time[0] for sensing_time in sensing_times]) - sensing_stop = max([sensing_time[1] for sensing_time in sensing_times]) - - sensing_dt = (sensing_stop - sensing_start) / 2 + sensing_start - - config_data[f'{name}_filename'] = Path(scene.safe[0]).name - config_data[f'{name}_dt'] = sensing_dt.strftime("%Y%m%dT%H:%M:%S.%f").rstrip('0') - - return config_data - - -def load_geospatial(infile: str, band: int = 1): - ds = gdal.Open(infile, gdal.GA_ReadOnly) - - data = ds.GetRasterBand(band).ReadAsArray() - nodata = ds.GetRasterBand(band).GetNoDataValue() - projection = ds.GetProjection() - transform = ds.GetGeoTransform() - del ds - return data, transform, projection, nodata - - -def write_geospatial(outfile: str, data, transform, projection, nodata, - driver: str = 'GTiff', dtype: int = gdal.GDT_Float64) -> str: - driver = gdal.GetDriverByName(driver) - - rows, cols = data.shape - ds = driver.Create(outfile, cols, rows, 1, dtype) - ds.SetGeoTransform(transform) - ds.SetProjection(projection) - - if nodata is not None: - ds.GetRasterBand(1).SetNoDataValue(nodata) - ds.GetRasterBand(1).WriteArray(data) - del ds - return outfile - - -def get_epsg_code(info: dict) -> int: - """Get the EPSG code from a GDAL Info dictionary - Args: - info: The dictionary returned by a gdal.Info call - Returns: - epsg_code: The integer EPSG code - """ - proj = osr.SpatialReference(info['coordinateSystem']['wkt']) - epsg_code = int(proj.GetAttrValue('AUTHORITY', 1)) - return epsg_code - - -def ensure_same_projection(reference_path: Union[str, Path], secondary_path: Union[str, Path]) -> Tuple[str, str]: - reprojection_dir = Path('reprojected') - reprojection_dir.mkdir(exist_ok=True) - - ref_info = gdal.Info(str(reference_path), format='json') - ref_epsg = get_epsg_code(ref_info) - - reprojected_reference = str(reprojection_dir / Path(reference_path).name) - reprojected_secondary = str(reprojection_dir / Path(secondary_path).name) - - gdal.Warp(reprojected_reference, str(reference_path), dstSRS=f'EPSG:{ref_epsg}', - xRes=ref_info['geoTransform'][1], yRes=ref_info['geoTransform'][5], - resampleAlg='lanczos', targetAlignedPixels=True) - gdal.Warp(reprojected_secondary, str(secondary_path), dstSRS=f'EPSG:{ref_epsg}', - xRes=ref_info['geoTransform'][1], yRes=ref_info['geoTransform'][5], - resampleAlg='lanczos', targetAlignedPixels=True) - - return reprojected_reference, reprojected_secondary diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index 619cc0f9..1fac5e31 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -24,9 +24,8 @@ from netCDF4 import Dataset from osgeo import gdal -from hyp3_autorift import geometry, image, io +from hyp3_autorift import geometry, image, utils from hyp3_autorift.crop import crop_netcdf_product -from hyp3_autorift.utils import get_esa_credentials, upload_file_to_s3_with_publish_access_keys log = logging.getLogger(__name__) @@ -255,19 +254,19 @@ def apply_wallis_nodata_fill_filter(array: np.ndarray, nodata: int) -> Tuple[np. def _apply_filter_function(image_path: str, filter_function: Callable) -> Tuple[str, Optional[str]]: - image_array, image_transform, image_projection, image_nodata = io.load_geospatial(image_path) + image_array, image_transform, image_projection, image_nodata = utils.load_geospatial(image_path) image_array = image_array.astype(np.float32) image_filtered, zero_mask = filter_function(image_array, image_nodata) image_new_path = create_filtered_filepath(image_path) - _ = io.write_geospatial(image_new_path, image_filtered, image_transform, image_projection, + _ = utils.write_geospatial(image_new_path, image_filtered, image_transform, image_projection, nodata=None, dtype=gdal.GDT_Float32) zero_path = None if zero_mask is not None: zero_path = create_filtered_filepath(f'{Path(image_new_path).stem}_zeroMask{Path(image_new_path).suffix}') - _ = io.write_geospatial(zero_path, zero_mask, image_transform, image_projection, + _ = utils.write_geospatial(zero_path, zero_mask, image_transform, image_projection, nodata=np.iinfo(np.uint8).max, dtype=gdal.GDT_Byte) return image_new_path, zero_path @@ -383,7 +382,7 @@ def process( orbits.mkdir(parents=True, exist_ok=True) if (esa_username is None) or (esa_password is None): - esa_username, esa_password = get_esa_credentials() + esa_username, esa_password = utils.get_esa_credentials() reference_state_vec, reference_provider = downloadSentinelOrbitFile( reference, directory=str(orbits), esa_credentials=(esa_username, esa_password) @@ -443,9 +442,9 @@ def process( # Reproject zero masks if necessary if reference_zero_path and secondary_zero_path: - _, _ = io.ensure_same_projection(reference_zero_path, secondary_zero_path) + _, _ = utils.ensure_same_projection(reference_zero_path, secondary_zero_path) - reference_path, secondary_path = io.ensure_same_projection(reference_path, secondary_path) + reference_path, secondary_path = utils.ensure_same_projection(reference_path, secondary_path) bbox = reference_metadata['bbox'] lat_limits = (bbox[1], bbox[3]) @@ -455,12 +454,12 @@ def process( log.info(f'Secondary scene path: {secondary_path}') scene_poly = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(scene_poly, parameter_file) + parameter_info = utils.find_jpl_parameter_info(scene_poly, parameter_file) if platform == 'S1': isce_dem = geometry.prep_isce_dem(parameter_info['geogrid']['dem'], lat_limits, lon_limits) - io.format_tops_xml(reference, secondary, polarization, isce_dem, orbits) + utils.format_tops_xml(reference, secondary, polarization, isce_dem, orbits) import isce # noqa from topsApp import TopsInSAR @@ -572,6 +571,6 @@ def main(): if args.publish_bucket: prefix = get_opendata_prefix(product_file) - upload_file_to_s3_with_publish_access_keys(product_file, args.publish_bucket, prefix) - upload_file_to_s3_with_publish_access_keys(browse_file, args.publish_bucket, prefix) - upload_file_to_s3_with_publish_access_keys(thumbnail_file, args.publish_bucket, prefix) + utils.upload_file_to_s3_with_publish_access_keys(product_file, args.publish_bucket, prefix) + utils.upload_file_to_s3_with_publish_access_keys(browse_file, args.publish_bucket, prefix) + utils.upload_file_to_s3_with_publish_access_keys(thumbnail_file, args.publish_bucket, prefix) diff --git a/src/hyp3_autorift/s1_correction.py b/src/hyp3_autorift/s1_correction.py index e56d8b36..bb7d6c98 100644 --- a/src/hyp3_autorift/s1_correction.py +++ b/src/hyp3_autorift/s1_correction.py @@ -10,7 +10,7 @@ from hyp3lib.get_orb import downloadSentinelOrbitFile from hyp3lib.scene import get_download_url -from hyp3_autorift import geometry, io +from hyp3_autorift import geometry, utils from hyp3_autorift.process import DEFAULT_PARAMETER_FILE, get_s1_primary_polarization from hyp3_autorift.utils import get_esa_credentials from hyp3_autorift.vend.testGeogrid_ISCE import loadParsedata, runGeogrid @@ -44,10 +44,10 @@ def generate_correction_data( lat_limits, lon_limits = geometry.bounding_box(f'{scene}.zip', polarization=polarization, orbits=orbits) scene_poly = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(scene_poly, parameter_file) + parameter_info = utils.find_jpl_parameter_info(scene_poly, parameter_file) isce_dem = geometry.prep_isce_dem(parameter_info['geogrid']['dem'], lat_limits, lon_limits) - io.format_tops_xml(scene, scene, polarization, isce_dem, orbits) + utils.format_tops_xml(scene, scene, polarization, isce_dem, orbits) reference_meta = loadParsedata(str(scene_path), orbit_dir=orbits, aux_dir=orbits, buffer=buffer) diff --git a/src/hyp3_autorift/utils.py b/src/hyp3_autorift/utils.py index 6bcb6244..19f3e1ed 100644 --- a/src/hyp3_autorift/utils.py +++ b/src/hyp3_autorift/utils.py @@ -1,16 +1,26 @@ +"""Helper utilities for autoRIFT""" + import logging import netrc import os +import sys +import textwrap from pathlib import Path from platform import system -from typing import Tuple +from typing import Tuple, Union import boto3 +from hyp3lib import DemError from hyp3lib.aws import get_content_type, get_tag_set +from osgeo import gdal, ogr, osr + +from hyp3_autorift.geometry import fix_point_for_antimeridian, flip_point_coordinates ESA_HOST = 'dataspace.copernicus.eu' +log = logging.getLogger(__name__) + def get_esa_credentials() -> Tuple[str, str]: netrc_name = '_netrc' if system().lower() == 'windows' else '.netrc' @@ -54,3 +64,202 @@ def upload_file_to_s3_with_publish_access_keys(path_to_file: Path, bucket: str, tag_set = get_tag_set(path_to_file.name) s3_client.put_object_tagging(Bucket=bucket, Key=key, Tagging=tag_set) + + +def find_jpl_parameter_info(polygon: ogr.Geometry, parameter_file: str) -> dict: + driver = ogr.GetDriverByName('ESRI Shapefile') + shapes = driver.Open(parameter_file, gdal.GA_ReadOnly) + + parameter_info = None + centroid = flip_point_coordinates(polygon.Centroid()) + centroid = fix_point_for_antimeridian(centroid) + for feature in shapes.GetLayer(0): + if feature.geometry().Contains(centroid): + parameter_info = { + 'name': f'{feature["name"]}', + 'epsg': feature['epsg'], + 'geogrid': { + 'dem': f"/vsicurl/{feature['h']}", + 'ssm': f"/vsicurl/{feature['StableSurfa']}", + 'dhdx': f"/vsicurl/{feature['dhdx']}", + 'dhdy': f"/vsicurl/{feature['dhdy']}", + 'vx': f"/vsicurl/{feature['vx0']}", + 'vy': f"/vsicurl/{feature['vy0']}", + 'srx': f"/vsicurl/{feature['vxSearchRan']}", + 'sry': f"/vsicurl/{feature['vySearchRan']}", + 'csminx': f"/vsicurl/{feature['xMinChipSiz']}", + 'csminy': f"/vsicurl/{feature['yMinChipSiz']}", + 'csmaxx': f"/vsicurl/{feature['xMaxChipSiz']}", + 'csmaxy': f"/vsicurl/{feature['yMaxChipSiz']}", + 'sp': f"/vsicurl/{feature['sp']}", + 'dhdxs': f"/vsicurl/{feature['dhdxs']}", + 'dhdys': f"/vsicurl/{feature['dhdys']}", + }, + 'autorift': { + 'grid_location': 'window_location.tif', + 'init_offset': 'window_offset.tif', + 'search_range': 'window_search_range.tif', + 'chip_size_min': 'window_chip_size_min.tif', + 'chip_size_max': 'window_chip_size_max.tif', + 'offset2vx': 'window_rdr_off2vel_x_vec.tif', + 'offset2vy': 'window_rdr_off2vel_y_vec.tif', + 'stable_surface_mask': 'window_stable_surface_mask.tif', + 'scale_factor': 'window_scale_factor.tif', + 'mpflag': 0, + } + } + break + + if parameter_info is None: + raise DemError('Could not determine appropriate DEM for:\n' + f' centroid: {centroid}' + f' using: {parameter_file}') + + dem_geotransform = gdal.Info(parameter_info['geogrid']['dem'], format='json')['geoTransform'] + parameter_info['xsize'] = abs(dem_geotransform[1]) + parameter_info['ysize'] = abs(dem_geotransform[5]) + + return parameter_info + + +def format_tops_xml(reference, secondary, polarization, dem, orbits, xml_file='topsApp.xml'): + xml_template = f""" + + + + {orbits} + {orbits} + reference + ['{reference}.zip'] + {polarization} + + + {orbits} + {orbits} + secondary + ['{secondary}.zip'] + {polarization} + + {dem} + False + True + False + False + False + 32 + 32 + 51 + 51 + 32 + 32 + + + """ + + with open(xml_file, 'w') as f: + f.write(textwrap.dedent(xml_template)) + + +class SysArgvManager: + """Context manager to clear and reset sys.argv + + A bug in the ISCE2 Application class causes sys.argv to always be parsed when + no options are proved, even when setting `cmdline=[]`, preventing programmatic use. + """ + def __init__(self): + self.argv = sys.argv.copy() + + def __enter__(self): + sys.argv = sys.argv[:1] + + def __exit__(self, exc_type, exc_val, exc_tb): + sys.argv = self.argv + + +def get_topsinsar_config(): + from isce.applications.topsApp import TopsInSAR + with SysArgvManager(): + insar = TopsInSAR(name="topsApp") + insar.configure() + + config_data = {} + for name in ['reference', 'secondary']: + scene = insar.__getattribute__(name) + + sensing_times = [] + for swath in range(1, 4): + scene.configure() + scene.swathNumber = swath + scene.parse() + sensing_times.append( + (scene.product.sensingStart, scene.product.sensingStop) + ) + + sensing_start = min([sensing_time[0] for sensing_time in sensing_times]) + sensing_stop = max([sensing_time[1] for sensing_time in sensing_times]) + + sensing_dt = (sensing_stop - sensing_start) / 2 + sensing_start + + config_data[f'{name}_filename'] = Path(scene.safe[0]).name + config_data[f'{name}_dt'] = sensing_dt.strftime("%Y%m%dT%H:%M:%S.%f").rstrip('0') + + return config_data + + +def load_geospatial(infile: str, band: int = 1): + ds = gdal.Open(infile, gdal.GA_ReadOnly) + + data = ds.GetRasterBand(band).ReadAsArray() + nodata = ds.GetRasterBand(band).GetNoDataValue() + projection = ds.GetProjection() + transform = ds.GetGeoTransform() + del ds + return data, transform, projection, nodata + + +def write_geospatial(outfile: str, data, transform, projection, nodata, + driver: str = 'GTiff', dtype: int = gdal.GDT_Float64) -> str: + driver = gdal.GetDriverByName(driver) + + rows, cols = data.shape + ds = driver.Create(outfile, cols, rows, 1, dtype) + ds.SetGeoTransform(transform) + ds.SetProjection(projection) + + if nodata is not None: + ds.GetRasterBand(1).SetNoDataValue(nodata) + ds.GetRasterBand(1).WriteArray(data) + del ds + return outfile + + +def get_epsg_code(info: dict) -> int: + """Get the EPSG code from a GDAL Info dictionary + Args: + info: The dictionary returned by a gdal.Info call + Returns: + epsg_code: The integer EPSG code + """ + proj = osr.SpatialReference(info['coordinateSystem']['wkt']) + epsg_code = int(proj.GetAttrValue('AUTHORITY', 1)) + return epsg_code + + +def ensure_same_projection(reference_path: Union[str, Path], secondary_path: Union[str, Path]) -> Tuple[str, str]: + reprojection_dir = Path('reprojected') + reprojection_dir.mkdir(exist_ok=True) + + ref_info = gdal.Info(str(reference_path), format='json') + ref_epsg = get_epsg_code(ref_info) + + reprojected_reference = str(reprojection_dir / Path(reference_path).name) + reprojected_secondary = str(reprojection_dir / Path(secondary_path).name) + + gdal.Warp(reprojected_reference, str(reference_path), dstSRS=f'EPSG:{ref_epsg}', + xRes=ref_info['geoTransform'][1], yRes=ref_info['geoTransform'][5], + resampleAlg='lanczos', targetAlignedPixels=True) + gdal.Warp(reprojected_secondary, str(secondary_path), dstSRS=f'EPSG:{ref_epsg}', + xRes=ref_info['geoTransform'][1], yRes=ref_info['geoTransform'][5], + resampleAlg='lanczos', targetAlignedPixels=True) + + return reprojected_reference, reprojected_secondary diff --git a/src/hyp3_autorift/vend/CHANGES.diff b/src/hyp3_autorift/vend/CHANGES.diff index 342c545d..0424d0ae 100644 --- a/src/hyp3_autorift/vend/CHANGES.diff +++ b/src/hyp3_autorift/vend/CHANGES.diff @@ -134,7 +134,7 @@ - slave_filename = conts['slave_filename'][0] - master_dt = conts['master_dt'][0] - slave_dt = conts['slave_dt'][0] -+ from hyp3_autorift.io import get_topsinsar_config ++ from hyp3_autorift.utils import get_topsinsar_config + conts = get_topsinsar_config() + master_filename = conts['reference_filename'] + slave_filename = conts['secondary_filename'] @@ -331,7 +331,7 @@ - slave_filename = conts['slave_filename'][0] - master_dt = conts['master_dt'][0] - slave_dt = conts['slave_dt'][0] -+ from hyp3_autorift.io import get_topsinsar_config ++ from hyp3_autorift.utils import get_topsinsar_config + conts = get_topsinsar_config() + master_filename = conts['reference_filename'] + slave_filename = conts['secondary_filename'] diff --git a/src/hyp3_autorift/vend/README.md b/src/hyp3_autorift/vend/README.md index 81e6a180..6fd4f4a3 100644 --- a/src/hyp3_autorift/vend/README.md +++ b/src/hyp3_autorift/vend/README.md @@ -24,7 +24,7 @@ Changes, as listed in `CHANGES.diff`, were done to: * use the full Sentinel-2 COG id in the output netCDF product filename to ensure unique names **Note:** The `topsinsar_filename.py` included here is not used, but retained for reference. -We've replaced it with `hyp3_autorift.io.get_topsinsar_config`. +We've replaced it with `hyp3_autorift.utils.get_topsinsar_config`. ## Additional Patches diff --git a/src/hyp3_autorift/vend/testautoRIFT.py b/src/hyp3_autorift/vend/testautoRIFT.py index d0ab5dd6..a2641f99 100755 --- a/src/hyp3_autorift/vend/testautoRIFT.py +++ b/src/hyp3_autorift/vend/testautoRIFT.py @@ -865,7 +865,7 @@ def generateAutoriftProduct(indir_m, indir_s, grid_location, init_offset, search dt = geogrid_run_info['dt'] epsg = geogrid_run_info['epsg'] - from hyp3_autorift.io import get_topsinsar_config + from hyp3_autorift.utils import get_topsinsar_config conts = get_topsinsar_config() master_filename = conts['reference_filename'] slave_filename = conts['secondary_filename'] diff --git a/src/hyp3_autorift/vend/testautoRIFT_ISCE.py b/src/hyp3_autorift/vend/testautoRIFT_ISCE.py index 0e81debb..1632ee0d 100755 --- a/src/hyp3_autorift/vend/testautoRIFT_ISCE.py +++ b/src/hyp3_autorift/vend/testautoRIFT_ISCE.py @@ -864,7 +864,7 @@ def generateAutoriftProduct(indir_m, indir_s, grid_location, init_offset, search dt = geogrid_run_info['dt'] epsg = geogrid_run_info['epsg'] - from hyp3_autorift.io import get_topsinsar_config + from hyp3_autorift.utils import get_topsinsar_config conts = get_topsinsar_config() master_filename = conts['reference_filename'] slave_filename = conts['secondary_filename'] diff --git a/tests/test_io.py b/tests/test_io.py deleted file mode 100644 index ce873424..00000000 --- a/tests/test_io.py +++ /dev/null @@ -1,105 +0,0 @@ -import pytest -from hyp3lib import DemError - -from hyp3_autorift import geometry, io -from hyp3_autorift.process import DEFAULT_PARAMETER_FILE - - -def test_find_jpl_parameter_info(): - lat_limits = (55, 56) - lon_limits = (40, 41) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'NPS' - - lat_limits = (54, 55) - lon_limits = (40, 41) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'N37' - - lat_limits = (54, 55) - lon_limits = (-40, -41) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'N24' - - lat_limits = (-54, -55) - lon_limits = (-40, -41) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'S24' - - lat_limits = (-55, -56) - lon_limits = (40, 41) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'S37' - - lat_limits = (-56, -57) - lon_limits = (40, 41) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'SPS' - - lat_limits = (-90, -91) - lon_limits = (40, 41) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - with pytest.raises(DemError): - io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - - lat_limits = (90, 91) - lon_limits = (40, 41) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - with pytest.raises(DemError): - io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - - -def test_find_jpl_parameter_info_antimeridian(): - lat_limits = (54, 55) - lon_limits = (180, 181) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'N01' - - lat_limits = (54, 55) - lon_limits = (-180, -181) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'N60' - - lat_limits = (55, 56) - lon_limits = (180, 181) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'NPS' - - lat_limits = (55, 56) - lon_limits = (-180, -181) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'NPS' - - lat_limits = (-56, -55) - lon_limits = (180, 181) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'S01' - - lat_limits = (-56, -55) - lon_limits = (-180, -181) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'S60' - - lat_limits = (-57, -56) - lon_limits = (180, 181) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'SPS' - - lat_limits = (-57, -56) - lon_limits = (-180, -181) - polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) - parameter_info = io.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) - assert parameter_info['name'] == 'SPS' diff --git a/tests/test_utils.py b/tests/test_utils.py index e640d411..b5de92de 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,9 @@ import pytest +from hyp3lib import DemError + +from hyp3_autorift import geometry, utils +from hyp3_autorift.process import DEFAULT_PARAMETER_FILE -from hyp3_autorift.utils import ESA_HOST, get_esa_credentials, upload_file_to_s3_with_publish_access_keys def test_get_esa_credentials_env(tmp_path, monkeypatch): @@ -8,9 +11,9 @@ def test_get_esa_credentials_env(tmp_path, monkeypatch): m.setenv('ESA_USERNAME', 'foo') m.setenv('ESA_PASSWORD', 'bar') m.setenv('HOME', str(tmp_path)) - (tmp_path / '.netrc').write_text(f'machine {ESA_HOST} login netrc_username password netrc_password') + (tmp_path / '.netrc').write_text(f'machine {utils.ESA_HOST} login netrc_username password netrc_password') - username, password = get_esa_credentials() + username, password = utils.get_esa_credentials() assert username == 'foo' assert password == 'bar' @@ -20,9 +23,9 @@ def test_get_esa_credentials_netrc(tmp_path, monkeypatch): m.delenv('ESA_USERNAME', raising=False) m.delenv('ESA_PASSWORD', raising=False) m.setenv('HOME', str(tmp_path)) - (tmp_path / '.netrc').write_text(f'machine {ESA_HOST} login foo password bar') + (tmp_path / '.netrc').write_text(f'machine {utils.ESA_HOST} login foo password bar') - username, password = get_esa_credentials() + username, password = utils.get_esa_credentials() assert username == 'foo' assert password == 'bar' @@ -35,7 +38,7 @@ def test_get_esa_credentials_missing(tmp_path, monkeypatch): (tmp_path / '.netrc').write_text('') msg = 'Please provide.*' with pytest.raises(ValueError, match=msg): - get_esa_credentials() + utils.get_esa_credentials() with monkeypatch.context() as m: m.setenv('ESA_USERNAME', 'env_username') @@ -44,7 +47,7 @@ def test_get_esa_credentials_missing(tmp_path, monkeypatch): (tmp_path / '.netrc').write_text('') msg = 'Please provide.*' with pytest.raises(ValueError, match=msg): - get_esa_credentials() + utils.get_esa_credentials() def test_upload_file_to_s3_credentials_missing(tmp_path, monkeypatch): @@ -53,11 +56,111 @@ def test_upload_file_to_s3_credentials_missing(tmp_path, monkeypatch): m.setenv('PUBLISH_SECRET_ACCESS_KEY', 'publish_access_key_secret') msg = 'Please provide.*' with pytest.raises(ValueError, match=msg): - upload_file_to_s3_with_publish_access_keys('file.zip', 'myBucket') + utils.upload_file_to_s3_with_publish_access_keys('file.zip', 'myBucket') with monkeypatch.context() as m: m.setenv('PUBLISH_ACCESS_KEY_ID', 'publish_access_key_id') m.delenv('PUBLISH_SECRET_ACCESS_KEY', raising=False) msg = 'Please provide.*' with pytest.raises(ValueError, match=msg): - upload_file_to_s3_with_publish_access_keys('file.zip', 'myBucket') + utils.upload_file_to_s3_with_publish_access_keys('file.zip', 'myBucket') + + +def test_find_jpl_parameter_info(): + lat_limits = (55, 56) + lon_limits = (40, 41) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'NPS' + + lat_limits = (54, 55) + lon_limits = (40, 41) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'N37' + + lat_limits = (54, 55) + lon_limits = (-40, -41) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'N24' + + lat_limits = (-54, -55) + lon_limits = (-40, -41) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'S24' + + lat_limits = (-55, -56) + lon_limits = (40, 41) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'S37' + + lat_limits = (-56, -57) + lon_limits = (40, 41) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'SPS' + + lat_limits = (-90, -91) + lon_limits = (40, 41) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + with pytest.raises(DemError): + utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + + lat_limits = (90, 91) + lon_limits = (40, 41) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + with pytest.raises(DemError): + utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + + +def test_find_jpl_parameter_info_antimeridian(): + lat_limits = (54, 55) + lon_limits = (180, 181) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'N01' + + lat_limits = (54, 55) + lon_limits = (-180, -181) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'N60' + + lat_limits = (55, 56) + lon_limits = (180, 181) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'NPS' + + lat_limits = (55, 56) + lon_limits = (-180, -181) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'NPS' + + lat_limits = (-56, -55) + lon_limits = (180, 181) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'S01' + + lat_limits = (-56, -55) + lon_limits = (-180, -181) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'S60' + + lat_limits = (-57, -56) + lon_limits = (180, 181) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'SPS' + + lat_limits = (-57, -56) + lon_limits = (-180, -181) + polygon = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits) + parameter_info = utils.find_jpl_parameter_info(polygon, DEFAULT_PARAMETER_FILE) + assert parameter_info['name'] == 'SPS' From 2186baf09bce9699719a60a0fa27c723825eea26 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 3 May 2024 17:07:40 -0800 Subject: [PATCH 07/13] update script runner to remove warnings --- tests/test_entrypoints.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_entrypoints.py b/tests/test_entrypoints.py index fa5a1834..97680138 100644 --- a/tests/test_entrypoints.py +++ b/tests/test_entrypoints.py @@ -1,8 +1,8 @@ def test_hyp3_autorift(script_runner): - ret = script_runner.run('hyp3_autorift', '-h') + ret = script_runner.run(['hyp3_autorift', '-h']) assert ret.success def test_autorift_proc_pair(script_runner): - ret = script_runner.run('s1_correction', '-h') + ret = script_runner.run(['s1_correction', '-h']) assert ret.success From 02e444d48291be83952b167eba7049caac7c17d3 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 3 May 2024 17:09:31 -0800 Subject: [PATCH 08/13] more changelog updates --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e66c5f5..4f377886 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,13 +7,16 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.15.1] +## [0.16.0] ### Fixed * `hyp3_autorift` will no longer attempt to crop files with no valid data ### Removed * The unused `ASF` naming scheme has been removed from the `hyp3_autorift` CLI and the `hyp3_autorift.process` function +## Changed +* Everything in `hyp3_autorift.io` has been moved into `hyp3_autorift.utils` to prevent shadowing the builtin `io` module + ## [0.15.0] ### Added * `--publish-bucket` option has been added to the HyP3 entry point to additionally publish products an AWS bucket, such as the ITS_LIVE AWS Open Data bucket, `s3://its-live-data`. From 12f0730c7f7bd872228913d0cdc7a752f1c0bc9f Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 3 May 2024 17:10:43 -0800 Subject: [PATCH 09/13] fix flake8 --- src/hyp3_autorift/process.py | 4 ++-- tests/test_utils.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index 1fac5e31..21aed80a 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -261,13 +261,13 @@ def _apply_filter_function(image_path: str, filter_function: Callable) -> Tuple[ image_new_path = create_filtered_filepath(image_path) _ = utils.write_geospatial(image_new_path, image_filtered, image_transform, image_projection, - nodata=None, dtype=gdal.GDT_Float32) + nodata=None, dtype=gdal.GDT_Float32) zero_path = None if zero_mask is not None: zero_path = create_filtered_filepath(f'{Path(image_new_path).stem}_zeroMask{Path(image_new_path).suffix}') _ = utils.write_geospatial(zero_path, zero_mask, image_transform, image_projection, - nodata=np.iinfo(np.uint8).max, dtype=gdal.GDT_Byte) + nodata=np.iinfo(np.uint8).max, dtype=gdal.GDT_Byte) return image_new_path, zero_path diff --git a/tests/test_utils.py b/tests/test_utils.py index b5de92de..f34e5e58 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -5,7 +5,6 @@ from hyp3_autorift.process import DEFAULT_PARAMETER_FILE - def test_get_esa_credentials_env(tmp_path, monkeypatch): with monkeypatch.context() as m: m.setenv('ESA_USERNAME', 'foo') From 58dbacc2ac556e390a6738b2a255b583ceb676ef Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Mon, 6 May 2024 22:18:07 -0800 Subject: [PATCH 10/13] add logging of product file name and cropping --- src/hyp3_autorift/process.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index 21aed80a..0e70de61 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -518,7 +518,10 @@ def process( else: product_file = netcdf_file + log.info(f'Successfully created autoRIFT product: {product_file}') + if not netcdf_file.name.endswith('_P000.nc'): + log.info('Cropping product to the valid data extent') cropped_file = crop_netcdf_product(netcdf_file) netcdf_file.unlink() shutil.move(cropped_file, str(product_file)) From b7794ac36e0a9f10595a5927e0d7182d38bb743a Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 7 May 2024 12:19:49 -0800 Subject: [PATCH 11/13] move thumbnail creation inside process next to browse creation --- src/hyp3_autorift/process.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index 0e70de61..11dde16c 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -351,7 +351,7 @@ def process( naming_scheme: Literal['ITS_LIVE_OD', 'ITS_LIVE_PROD'] = 'ITS_LIVE_OD', esa_username: Optional[str] = None, esa_password: Optional[str] = None, -) -> Tuple[Path, Path]: +) -> Tuple[Path, Path, Path]: """Process a Sentinel-1, Sentinel-2, or Landsat-8 image pair Args: @@ -359,6 +359,9 @@ def process( secondary: Name of the secondary Sentinel-1, Sentinel-2, or Landsat-8 Collection 2 scene parameter_file: Shapefile for determining the correct search parameters by geographic location naming_scheme: Naming scheme to use for product files + + Returns: + the autoRIFT product file, browse image, and thumbnail image """ orbits = None polarization = None @@ -535,7 +538,9 @@ def process( browse_file = product_file.with_suffix('.png') image.make_browse(browse_file, data) - return product_file, browse_file + thumbnail_file = create_thumbnail(browse_file) + + return product_file, browse_file, thumbnail_file def main(): @@ -564,8 +569,9 @@ def main(): g1, g2 = sorted(args.granules, key=get_datetime) - product_file, browse_file = process(g1, g2, parameter_file=args.parameter_file, naming_scheme=args.naming_scheme) - thumbnail_file = create_thumbnail(browse_file) + product_file, browse_file, thumbnail_file = process( + g1, g2, parameter_file=args.parameter_file, naming_scheme=args.naming_scheme + ) if args.bucket: upload_file_to_s3(product_file, args.bucket, args.bucket_prefix) From 28274396d00c805403868fe61124bdd0a7c3e978 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 7 May 2024 12:22:01 -0800 Subject: [PATCH 12/13] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f377886..bf38a8ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## Changed * Everything in `hyp3_autorift.io` has been moved into `hyp3_autorift.utils` to prevent shadowing the builtin `io` module +* `hyp3_autorift.process.process` now returns the product file, browse image, and (new) thumbnail image ## [0.15.0] ### Added From d1d2e5ead166b3b8a7a716cf9609fef363e0ff3f Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 7 May 2024 22:10:26 -0800 Subject: [PATCH 13/13] bad hyp3 arg workaround --- src/hyp3_autorift/process.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index 11dde16c..ab41bad3 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -578,6 +578,10 @@ def main(): upload_file_to_s3(browse_file, args.bucket, args.bucket_prefix) upload_file_to_s3(thumbnail_file, args.bucket, args.bucket_prefix) + # FIXME: HyP3 is passing the default value for this argument as '""' not "", so we're not getting an empty string + if args.publish_bucket == '""': + args.publish_bucket = '' + if args.publish_bucket: prefix = get_opendata_prefix(product_file) utils.upload_file_to_s3_with_publish_access_keys(product_file, args.publish_bucket, prefix)