diff --git a/.ci/docker/Dockerfile_dswx_ni b/.ci/docker/Dockerfile_dswx_ni index e0ce69cb..e916f71c 100644 --- a/.ci/docker/Dockerfile_dswx_ni +++ b/.ci/docker/Dockerfile_dswx_ni @@ -1,4 +1,4 @@ -Dockerfile to produce the production DSWx-NI PGE Docker image for OPERA +# Dockerfile to produce the production DSWx-NI PGE Docker image for OPERA # Authors: Scott Collins, Ray Bambery # Default SAS image path, must be provided by the docker build call via --build-arg @@ -6,8 +6,8 @@ ARG SAS_IMAGE FROM $SAS_IMAGE ARG PGE_SOURCE_DIR -ARG PGE_DEST_DIR=/home/mamba -ARG CONDA_ROOT=/opt/conda +ARG PGE_DEST_DIR=/home/dswx_user +ARG CONDA_ROOT=/home/dswx_user/miniconda3 ENV PGE_DEST_DIR=$PGE_DEST_DIR ENV CONDA_ROOT=$CONDA_ROOT @@ -38,11 +38,13 @@ LABEL org.label-schema.build-date=${BUILD_DATE_TIME} \ # Copy the OPERA PGE software into the container # the base container has a default user "mamba" with UID/GID 1000/1000 -COPY --chown=mamba:mamba ${PGE_SOURCE_DIR} ${PGE_DEST_DIR} +COPY --chown=dswx_user:dswx_user ${PGE_SOURCE_DIR} ${PGE_DEST_DIR} # Switch to root for installing into Conda Env USER 0:0 +# Set the default shell to run commands within the Conda environment named "DSXW-SAR" using bash +SHELL ["conda", "run", "-n", "DSWX-SAR", "/bin/bash", "-c"] # Install dependencies into existing Conda Env # TODO: remove last chmod once permissions are set correctly on /home/mamba for delivered containers @@ -51,16 +53,15 @@ RUN set -ex \ && mkdir -p ${CONDA_ROOT}/bin \ && cp ${PGE_DEST_DIR}/opera/scripts/*_entrypoint.sh ${CONDA_ROOT}/bin \ && chmod +x ${CONDA_ROOT}/bin/*_entrypoint.sh \ - && source /usr/local/bin/_activate_current_env.sh \ - && ${MAMBA_EXE} install --yes --channel conda-forge --file ${PGE_DEST_DIR}/opera/requirements.txt \ - && chmod 775 ${PGE_DEST_DIR} + && python -m pip install -r ${PGE_DEST_DIR}/opera/requirements.txt \ + && conda install --yes --channel conda-forge hdf5 \ + && chmod 777 ${PGE_DEST_DIR}/scratch # Set the Docker entrypoint and clear the default command -ENTRYPOINT ["sh", "-c", "exec ${CONDA_ROOT}/bin/pge_docker_entrypoint.sh \"${@}\"", "--"] +ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "DSWX-SAR", "sh", "-c", "exec ${CONDA_ROOT}/bin/pge_docker_entrypoint.sh \"${@}\"", "--"] CMD [] # Set the user/group back to the default -USER mamba:mamba - +USER dswx_user:dswx_user diff --git a/.ci/scripts/dswx_ni/build_dswx_ni.sh b/.ci/scripts/dswx_ni/build_dswx_ni.sh index 2ab36edc..f5f9363f 100755 --- a/.ci/scripts/dswx_ni/build_dswx_ni.sh +++ b/.ci/scripts/dswx_ni/build_dswx_ni.sh @@ -24,7 +24,7 @@ BUILD_DATE_TIME=$(date -u +'%Y-%m-%dT%H:%M:%SZ') # defaults, SAS image should be updated as necessary for new image releases from ADT [ -z "${WORKSPACE}" ] && WORKSPACE=$(realpath $(dirname $(realpath $0))/../../..) [ -z "${TAG}" ] && TAG="${USER}-dev" -[ -z "${SAS_IMAGE}" ] && SAS_IMAGE="artifactory-fn.jpl.nasa.gov:16001/gov/nasa/jpl/opera/adt/opera/dswx-ni:beta_0.2.1" +[ -z "${SAS_IMAGE}" ] && SAS_IMAGE="artifactory-fn.jpl.nasa.gov:16001/gov/nasa/jpl/opera/adt/opera/dswx-ni:interface_0.1" echo "WORKSPACE: $WORKSPACE" echo "IMAGE: $IMAGE" @@ -49,6 +49,9 @@ mkdir -p ${STAGING_DIR}/opera/pge copy_pge_files $WORKSPACE $STAGING_DIR $PGE_NAME +# DSWx-NI PGE inherits from DSWx-S1 code, so it needs to be included in the container as well +mkdir -p ${STAGING_DIR}/opera/pge/dswx_s1; cp -r ${WORKSPACE}/src/opera/pge/dswx_s1/dswx_s1_pge.py ${STAGING_DIR}/opera/pge/dswx_s1/ + # Create a VERSION file in the staging area to track version and build time printf "pge_version: ${TAG}\npge_build_datetime: ${BUILD_DATE_TIME}\n" \ > ${STAGING_DIR}/opera/VERSION \ diff --git a/.ci/scripts/dswx_ni/test_dswx_ni.sh b/.ci/scripts/dswx_ni/test_dswx_ni.sh new file mode 100755 index 00000000..85995836 --- /dev/null +++ b/.ci/scripts/dswx_ni/test_dswx_ni.sh @@ -0,0 +1,98 @@ +#!/bin/bash +# Script to execute unit tests on the OPERA DSWx-NI PGE Docker image + +set -e + +# Source the build script utility functions +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" + +. "${SCRIPT_DIR}"/../util/util.sh + +# Parse args +parse_build_args "$@" + +echo ' +===================================== + +Testing DSWx-NI PGE Docker image... + +===================================== +' + +PGE_NAME="dswx_ni" +IMAGE="opera_pge/${PGE_NAME}" +TEST_RESULTS_REL_DIR="test_results" +CONTAINER_HOME="/home/dswx_user" +CONDA_ROOT="/home/dswx_user/miniconda3" + +# defaults +[ -z "${WORKSPACE}" ] && WORKSPACE=$(realpath $(dirname $(realpath $0))/../../..) +[ -z "${TAG}" ] && TAG="${USER}-dev" + +TEST_RESULTS_DIR="${WORKSPACE}/${TEST_RESULTS_REL_DIR}/${PGE_NAME}" + +echo "Test results output directory: ${TEST_RESULTS_DIR}" +mkdir --parents ${TEST_RESULTS_DIR} +chmod -R 775 ${WORKSPACE}/${TEST_RESULTS_REL_DIR} + +# Use the environment of the docker image to run linting, tests, etc... +# Note the change of working directory (-w) to a directory without +# Python code so that import statements favor Python code found in the +# Docker image rather than code found on the host. +DOCKER_RUN="docker run --rm \ + -v ${WORKSPACE}:/workspace \ + -v ${WORKSPACE}/src/opera/test/data:${CONTAINER_HOME}/opera/test/data \ + -w /workspace/${TEST_RESULTS_REL_DIR} \ + -u ${UID}:$(id -g) \ + --entrypoint conda \ + ${IMAGE}:${TAG}" + +ENTRYPOINT="run --no-capture-output -n DSWX-SAR ${CONDA_ROOT}/bin/pge_tests_entrypoint.sh" + +# Configure a trap to set permissions on exit regardless of whether the testing succeeds +function set_perms { + # Open up permissions on all test results so we can be sure the CI system can + # delete them after results are archived within Jenkins + ${DOCKER_RUN} ${ENTRYPOINT} bash -c "find \ + /workspace/${TEST_RESULTS_REL_DIR} -type d -exec chmod 775 {} +" + + ${DOCKER_RUN} ${ENTRYPOINT} bash -c "find \ + /workspace/${TEST_RESULTS_REL_DIR} -type f -exec chmod 664 {} +" +} + +trap set_perms EXIT + +# linting and pep8 style check (configured by .flake8 and .pylintrc) +${DOCKER_RUN} ${ENTRYPOINT} flake8 \ + --config ${CONTAINER_HOME}/opera/.flake8 \ + --jobs auto \ + --exit-zero \ + --application-import-names opera \ + --output-file /workspace/${TEST_RESULTS_REL_DIR}/${PGE_NAME}/flake8.log \ + ${CONTAINER_HOME}/opera + +${DOCKER_RUN} ${ENTRYPOINT} pylint \ + --rcfile=${CONTAINER_HOME}/opera/.pylintrc \ + --jobs 0 \ + --exit-zero \ + --output=/workspace/${TEST_RESULTS_REL_DIR}/${PGE_NAME}/pylint.log \ + --enable-all-extensions \ + ${CONTAINER_HOME}/opera + +# pytest (including code coverage) +${DOCKER_RUN} ${ENTRYPOINT} pytest \ + --junit-xml=/workspace/${TEST_RESULTS_REL_DIR}/${PGE_NAME}/pytest-junit.xml \ + --cov=${CONTAINER_HOME}/opera/pge/base \ + --cov=${CONTAINER_HOME}/opera/pge/${PGE_NAME} \ + --cov=${CONTAINER_HOME}/opera/scripts \ + --cov=${CONTAINER_HOME}/opera/util \ + --cov-report=term \ + --cov-report=html:/workspace/${TEST_RESULTS_REL_DIR}/${PGE_NAME}/coverage_html \ + /workspace/src/opera/test/pge/base \ + /workspace/src/opera/test/pge/${PGE_NAME} \ + /workspace/src/opera/test/scripts \ + /workspace/src/opera/test/util > ${TEST_RESULTS_DIR}/pytest.log + +echo "DSWx-NI PGE Docker image test complete" + +exit 0 diff --git a/src/opera/pge/dswx_ni/dswx_ni_pge.py b/src/opera/pge/dswx_ni/dswx_ni_pge.py index 09f28444..6848dc22 100755 --- a/src/opera/pge/dswx_ni/dswx_ni_pge.py +++ b/src/opera/pge/dswx_ni/dswx_ni_pge.py @@ -10,28 +10,29 @@ from opera.pge.base.base_pge import PgeExecutor from opera.pge.base.base_pge import PostProcessorMixin -from opera.pge.base.base_pge import PreProcessorMixin +from opera.pge.dswx_s1.dswx_s1_pge import DSWxS1PreProcessorMixin - -class DSWxNIPreProcessorMixin(PreProcessorMixin): +class DSWxNIPreProcessorMixin(DSWxS1PreProcessorMixin): """ Mixin class responsible for handling all pre-processing steps for the DSWX-NI PGE. The pre-processing phase is defined as all steps necessary prior to SAS execution. - In addition to the base functionality inherited from PreProcessorMixin, this - mixin adds an input validation step to ensure that input(s) defined by the - RunConfig exist and are valid. - """ + This particular pre-processor inherits its functionality from the DSWx-S1 + pre-processor class, as both PGE's share a similar interface. + + """ _pre_mixin_name = "DSWxNIPreProcessorMixin" + _valid_input_extensions = (".h5",) def run_preprocessor(self, **kwargs): """ Executes the pre-processing steps for DSWx-NI PGE initialization. - The DswxS1PreProcessorMixin version of this class performs all actions - of the base PreProcessorMixin class, and adds an input validation step for - the inputs defined within the RunConfig (TODO). + The DSWxNIPreProcessorMixin version of this class performs all actions + of the DSWxS1PreProcessorMixin class. Parameterization of the validation + functions is handled via specialized class attributes (i.e. _valid_input_extensions) + Parameters ---------- **kwargs: dict diff --git a/src/opera/pge/dswx_ni/schema/algorithm_parameters_ni_schema.yaml b/src/opera/pge/dswx_ni/schema/algorithm_parameters_ni_schema.yaml new file mode 100644 index 00000000..fe87a2df --- /dev/null +++ b/src/opera/pge/dswx_ni/schema/algorithm_parameters_ni_schema.yaml @@ -0,0 +1,201 @@ +runconfig: + name: str() + + processing: + # Algorithms for surface water extents + # ['opera_dswx_ni', 'twele'] + dswx_workflow: str(required=False) + + # Polarizations to be used for DSWx-SAR + # [polarizations] for list of specific frequency(s) e.g. [VV, VH] or [VV] + # 'dual-pol', 'co-pol', 'cross-pol' will search the polarizations Input GeoTiff files have. + # For example, 'co-pol' uses ['HH'], ['VV'], or ['HH', 'VV'] by looking at the input data. + polarizations: any(list(str(min=2, max=10), min=1, max=4), str(min=4, max=4), null(), required=False) + # Additional for polarimetric computations to be performed in specific polarization modes (co/cross and co + cross) + polarimetric_option: any(list(enum('ratio', 'span'), min=1, max=2), enum('ratio', 'span'), null(), required=False) + + # Specifiy the max_value for permanent water and no_data_value for invalid pixels + reference_water: + max_value: num(required=False) + no_data_value: num(required=False) + permanent_water_value: num(required=False) + drought_erosion_pixel: int(required=False) + flood_dilation_pixel: int(required=False) + + hand: + mask_value: num(required=False) + + ocean_mask: + # Flag to apply ocean mask + mask_enabled: bool(required=False) + # Margin to apply ocean mask in km + mask_margin_km: int(required=False) + # Flag if the polygon is water + mask_polygon_water: bool(required=False) + + mosaic: + mosaic_prefix: str(required=False) + mosaic_cog_enable: bool(required=False) + mosaic_mode: str(required=False) + read_row_blk_size: int(min=1, required=False) + read_col_blk_size: int(min=1, required=False) + # Flag to turn on/off the filtering for RTC image. + # The enhanced Lee filter is available. + filter: + enabled: bool(required=False) + window_size: num(min=1, max=999, required=False) + line_per_block: num(min=1, required=False) + + initial_threshold: + # Maximum tile size for initial threshold. + maximum_tile_size: + x: num(required=False) + y: num(required=False) + # Minimum tile size for initial threshold. + minimum_tile_size: + x: num(required=False) + y: num(required=False) + # Tile selecting strategy to identify the boundary between water and nonwater + # ['twele', 'chini', 'bimodality', 'combined'] + # 'combined' option applies all selection strategy + selection_method: list(enum('twele', 'chini', 'bimodality', 'combined'), min=1, max=3, required=False) + # Thresholds to select tiles showing the boundary between water and nonwater + # three values are required for twele method + # 1) std / mean of tiles + # 2) min value and 3) max value of mean of subtiles / mean of tiles + tile_selection_twele: list(required=False) + # Thresholds to select tiles showing the boundary between water and nonwater + # using bimodality strategy. + # One values are required for bimodality method + tile_selection_bimodality: num(required=False) + # Strategy to interpolate the tile-based thresholds. + # Currently, only 'smoothed' is available. + extending_method: str(required=False) + # Thresholding algorithm for initial thresholds. + # Currently, 1) Otsu and 2) Kittler-Illingworth algorithms are available. + # ['otsu', 'ki'] + threshold_method: str(required=False) + # Thresholding boundary values in dB. The boundary values are computed internally + # using the statics of the rtc image. If the values are out of the given range, + # adopt these values instead of the computed values + threshold_bounds: + co_pol: list(required=False) + cross_pol: list(required=False) + # Flag to assume the trimodal distribution. + # If flag is false, the distribution is assumed to have bimodal distribution and + # estimate single threshold per tile. If True, the trimodal distribution is assumed, + # the lowest threshold is estimated. + multi_threshold: bool(required=False) + # Flag to adjust threshold where two gaussian distribution is not overlapped. + # If 'adjust_if_nonoverlap' is enabled, + # start to search the alternative threshold when two distributions are not + # overlapped. The 'low_dist_percentile' is the percentile of + # the low distribution and 'high_dist_percentile' is the percentile of + # the high distribution. Both values should be within range of 0 to 1. + adjust_if_nonoverlap: bool(required=False) + low_dist_percentile: num(required=False, min=0, max=1) + high_dist_percentile: num(required=False, min=0, max=1) + # Flag to average the thresholds within the tile. + # The output thresholds are assigned to each tile. + tile_average: bool(required=False) + # Number of threads to run + # -1 represents the all available threads + number_cpu: num(required=False) + line_per_block: num(min=1, required=False) + + fuzzy_value: + line_per_block: num(min=1, required=False) + hand: + # Min and max values for hand are automatically calculated + # from input HAND, but they are not given, + # below values are used. + member_min: num(required=False) + member_max: num(required=False) + # Membership bound for slope angle + slope: + member_min: num(required=False) + member_max: num(required=False) + # Membership bound for reference water + reference_water: + # Minimum reference water value for membership + member_min: num(required=False) + # Maximum reference water value for membership + member_max: num(required=False) + # Membership bound for area of initial water bodies. + # Area membership is only required for 'twele' workflow. + area: + member_min: num(required=False) + member_max: num(required=False) + # Dark area is defined where cross-pol is lower than cross_land + # Water is defined where cross-pol is lower than cross_water + dark_area: + # Threshold [dB] for land in the dark area definition + cross_land: num(required=False) + # Threshold [dB] for water in the dark area definition + cross_water: num(required=False) + # High frequent water is defined based on two values + # water_min_value < high_frequent_water < water_max_value + high_frequent_water: + # Minimum value for high frequent water + water_min_value: num(required=False) + # Maximum value for high frequent water + water_max_value: num(required=False) + + region_growing: + # Seed value for region growing start + initial_threshold: num(min=0, max=1, required=False) + # Value where region growing is stopped + relaxed_threshold: num(min=0, max=1, required=False) + line_per_block: num(min=1, required=False) + + masking_ancillary: + # Land covers that behaves like dark lands in DSWx-SAR. + # The elements should be in given landcover file. + # The elements will be masked out during this step. + land_cover_darkland_list: list(required=False) + # The elements is considered as the dark land candidates + # where these elemtns are spatially connected to the dark land. + land_cover_darkland_extension_list: list(required=False) + land_cover_water_label: list(required=False) + # VV and VH threshold values for dark land candidates + co_pol_threshold: num(min=-30, max=10, required=False) + cross_pol_threshold: num(min=-30, max=10, required=False) + # Reference water threshold value for dark land candidates + water_threshold: num(min=0, max=100, required=False) + minimum_pixel: num(min=0, required=False) + # Flag to enable the darkland extension. + extended_darkland: bool(required=False) + extended_darkland_minimum_pixel: int(required=False) + extended_darkland_water_buffer: int(required=False, min=0) + # Assuming the height of the water/land boundaries has low + # variation, the standard deviation is estimated along the boundaries + # and removed if the std is high. + hand_variation_mask: bool(required=False) + # pixels with HAND threshold is masked out. + hand_variation_threshold: num(min=0, max=100, required=False) + line_per_block: num(min=1, required=False) + number_cpu: num(required=False) + + refine_with_bimodality: + number_cpu: num(required=False) + lines_per_block: num(min=1, required=False) + minimum_pixel: num(min=0, required=False) + thresholds: + ashman: num(min=0, required=False) + Bhattacharyya_coefficient: num(min=0, required=False) + bm_coefficient: num(min=0, required=False) + surface_ratio: num(min=0, required=False) + + inundated_vegetation: + # 'auto' determine the inundated vegetation availability + # based on available cross-polarizations + enabled: enum(True, False, 'auto') + dual_pol_ratio_max: num(min=0, max=30, required=False) + dual_pol_ratio_min: num(min=0, max=30, required=False) + dual_pol_ratio_threshold: num(min=0, max=30, required=False) + cross_pol_min: num(min=-30, max=10, required=False) + line_per_block: num(min=1, required=False) + # Land covers where the inundated vegetation is detected. + target_land_cover: list(required=False) + # If debug mode is true, intermediate product is generated. + debug_mode: bool(required=False) diff --git a/src/opera/pge/dswx_ni/schema/dswx_ni_sas_schema.yaml b/src/opera/pge/dswx_ni/schema/dswx_ni_sas_schema.yaml index ddb783b1..d6384449 100644 --- a/src/opera/pge/dswx_ni/schema/dswx_ni_sas_schema.yaml +++ b/src/opera/pge/dswx_ni/schema/dswx_ni_sas_schema.yaml @@ -12,6 +12,8 @@ runconfig: input_file_group: # REQUIRED - list of RTC products (directory or files) input_file_path: list(str(), min=1) + # Placeholder for historical GCOV data + input_file_path_historical: list(str(), min=1, required=False) dynamic_ancillary_file_group: # Digital elevation model @@ -48,7 +50,12 @@ runconfig: # algorithm parameter algorithm_parameters: str(required=True) + # placeholder for inundated vegettion + mean_backscattering: str(required=False) + standard_deviation_backscattering: str(required=False) + static_ancillary_file_group: + static_ancillary_inputs_flag: bool(required=False) # MGRS database sqlite file @@ -74,6 +81,9 @@ runconfig: product_version: num(required=False) + # Specify the MGRS tile collection ID + output_mgrs_collection_id: str(required=False) + # DSWx-NI product format (default is 'COG') output_imagery_format: enum('GTiff', 'COG', required=False) @@ -83,4 +93,43 @@ runconfig: # DSWx-NI Compression bits for COG output_imagery_nbits: int(min=1, required=False) + # DSWx-NI output spacing + output_spacing: num(min=1, required=False) + + browse_image_group: + # Save a full-res Cloud-Optimized GEOTIFF DSWx-NI browse image and + # a modified-resolution PNG of the browse image for DSWx-NI + save_browse: bool(required=False) + + # Setting `browse_image_height` and `browse_image_width` equal + # will maintain this original DSWx-NI aspect ratio + # of 3660 pixels x 3660 pixels for the PNG browse image. + # If these fields are left empty, 1024 x 1024 will be used. + # Height in pixels for the PNG browse image + browse_image_height: int(min=1, required=False) + + # Width in pixels for the PNG browse image + browse_image_width: int(min=1, required=False) + + # Flag to collapse water classes if set to True. Default is True. + flag_collapse_wtr_classes: bool(required=False) + + # Flag to exclude inundated vegetation from processing if set to True. + exclude_inundated_vegetation: bool(required=False) + + # Flag to set non-water pixels to NoData value if set to True. + set_not_water_to_nodata: bool(required=False) + + # Flag to set HAND mask pixels to NoData value if set to True. + set_hand_mask_to_nodata: bool(required=False) + + # Flag to set layover and shadow pixels to NoData value if set to True. + set_layover_shadow_to_nodata: bool(required=False) + + # Flag to set ocean-masked pixels to NoData value if set to True. + set_ocean_masked_to_nodata: bool(required=False) + + # Flag to save Geotiff to output directory if set to True. + save_tif_to_output: bool(required=False) + log_file: str(required=False) diff --git a/src/opera/pge/dswx_s1/dswx_s1_pge.py b/src/opera/pge/dswx_s1/dswx_s1_pge.py index b66eb0ce..56f02c04 100644 --- a/src/opera/pge/dswx_s1/dswx_s1_pge.py +++ b/src/opera/pge/dswx_s1/dswx_s1_pge.py @@ -41,6 +41,7 @@ class DSWxS1PreProcessorMixin(PreProcessorMixin): """ _pre_mixin_name = "DSWxS1PreProcessorMixin" + _valid_input_extensions = (".tif", ".h5") def _validate_dynamic_ancillary_inputs(self): """ @@ -119,7 +120,8 @@ def run_preprocessor(self, **kwargs): super().run_preprocessor(**kwargs) validate_dswx_inputs( - self.runconfig, self.logger, self.runconfig.pge_name, valid_extensions=(".tif", ".h5") + self.runconfig, self.logger, self.runconfig.pge_name, + valid_extensions=self._valid_input_extensions ) validate_algorithm_parameters_config(self.name, self.runconfig.algorithm_parameters_schema_path, diff --git a/src/opera/test/data/test_dswx_ni_algorithm_parameters.yaml b/src/opera/test/data/test_dswx_ni_algorithm_parameters.yaml index 37981622..3eb67144 100644 --- a/src/opera/test/data/test_dswx_ni_algorithm_parameters.yaml +++ b/src/opera/test/data/test_dswx_ni_algorithm_parameters.yaml @@ -2,12 +2,17 @@ runconfig: name: dswx_ni_workflow_algorithm processing: - # dswx_workflow 'opera_dswx_ni', 'twele', 'opera_dswx_ni_inundated_vegetation' + # dswx_workflow 'opera_dswx_ni', 'twele' dswx_workflow: 'opera_dswx_ni' - # valid values for polarizations - # empty for all polarizations found in RSLC - # [polarizations] for list of specific frequency(s) e.g. [HH, HV] or [HH] - polarizations: ['VV', 'VH'] + # Polarizations to be used for DSWx-SAR + # [polarizations] for list of specific frequency(s) e.g. [VV, VH] or [VV] + # 'dual-pol', 'co-pol', 'cross-pol' will search the polarizations Input GeoTiff files have. + # For example, 'co-pol' uses ['HH'], ['VV'], or ['HH', 'VV'] by looking at the input data. + # ['auto'] will detect available polarizations from given RTC data + polarizations: ['auto'] + # Additional for polarimetric computations to be performed in specific polarization modes (co/cross and co + cross) + # e.g. ['ratio', 'span'] + polarimetric_option: # Specify the max_value for permanent water and no_data_value for invalid pixels reference_water: @@ -23,9 +28,23 @@ runconfig: hand: mask_value: 200 + ocean_mask: + # Flag to apply ocean mask + mask_enabled: False + # Margin to apply ocean mask in km + mask_margin_km: 5 + # Flag if the polygon is water + mask_polygon_water: True + mosaic: mosaic_prefix: 'mosaic' mosaic_cog_enable: True + # Burst Mosaic options + # - average : overlapped areas are averaged. + # - first : choose one burst without average. + mosaic_mode: 'first' + read_row_blk_size: 1000 + read_col_blk_size: 1100 # Flag to turn on/off the filtering for RTC image. # The enhanced Lee filter is available. @@ -33,6 +52,7 @@ runconfig: enabled: True # Window size for filtering. window_size: 5 + line_per_block: 1000 initial_threshold: # Maximum tile size for initial threshold. @@ -46,7 +66,7 @@ runconfig: # tile selecting strategy to identify the boundary between water and nonwater # ['twele', 'chini', 'bimodality', 'combined'] # 'combined' option applies all selection strategy - selection_method: 'combined' + selection_method: ['chini', 'bimodality'] # Thresholds to select tiles showing the boundary between water and nonwater # using bimodality strategy. @@ -63,19 +83,34 @@ runconfig: # Currently, 1) Otsu and 2) Kittler-Illingworth algorithms are available. # ['otsu', 'ki'] threshold_method: 'ki' + # Thresholding boundary values in dB. The boundary values are computed internally + # using the statics of the rtc image. If the values are out of the given range, + # adopt these values instead of the computed values + threshold_bounds: + co_pol: [-28, -11] + cross_pol: [-28, -18] # Flag to assume the trimodal distribution. # If flag is false, the distribution is assumed to have bimodal distribution and # estimate single threshold per tile. If True, the trimodal distribution is assumed, # the lowest threshold is estimated. multi_threshold: True - + # Flag to adjust threshold where two gaussian distribution is not overlapped. + # If 'adjust_if_nonoverlap' is enabled, + # start to search the alternative threshold when two distributions are not + # overlapped. The 'low_dist_percentile' is the percentile of + # the low distribution and 'high_dist_percentile' is the percentile of + # the high distribution. Both values should be within range of 0 to 1. + adjust_if_nonoverlap: True + low_dist_percentile: 0.99 + high_dist_percentile: 0.01 # Number of threads to run # -1 represents the all available threads - number_cpu: 2 - number_iterations: 1 - tile_average: False + number_cpu: -1 + tile_average: True + line_per_block: 300 fuzzy_value: + line_per_block: 200 hand: # The units of the HAND is meters. member_min: 0 @@ -121,29 +156,51 @@ runconfig: line_per_block: 400 masking_ancillary: + # Land covers that behaves like dark lands in DSWx-SAR. + # The elements should be in given landcover file. + # The elements will be masked out during this step. + land_cover_darkland_list: ['Bare sparse vegetation', 'Urban', 'Moss and lichen'] + # The elements is considered as the dark land candidates + # where these elemtns are spatially connected to the dark land. + land_cover_darkland_extension_list: ['Grassland', 'Shrubs'] + land_cover_water_label: ['Permanent water bodies'] # VV and VH threshold values for dark land candidates co_pol_threshold: -14.6 cross_pol_threshold: -22.8 # reference water threshold value for dark land candidates - water_threshold: + water_threshold: 0.05 + # Flag to enable the darkland extension. + extended_darkland: True + extended_darkland_minimum_pixel: 3 + extended_darkland_water_buffer: 10 + # Flag to enable the HAND filter. + hand_variation_mask: True + # pixels with HAND threshold is masked out. + hand_variation_threshold: 2.5 + line_per_block: 400 + number_cpu: 1 refine_with_bimodality: - number_cpu: 1 minimum_pixel: 4 + lines_per_block: 500 + number_cpu: 1 thresholds: ashman: 1.5 Bhattacharyya_coefficient: 0.97 bm_coefficient: 0.7 surface_ratio: 0.1 - inundated_vegetation: - enabled: True + # 'auto' determine the inundated vegetation availability + # based on available cross-polarizations + enabled: auto dual_pol_ratio_max: 12 dual_pol_ratio_min: 7 dual_pol_ratio_threshold: 8 cross_pol_min: -26 line_per_block: 300 + target_land_cover: ['Herbaceous wetland'] # debug mode is true, intermediate product is generated. debug_mode: False + diff --git a/src/opera/test/data/test_dswx_ni_config.yaml b/src/opera/test/data/test_dswx_ni_config.yaml index e27197fa..6bec44df 100644 --- a/src/opera/test/data/test_dswx_ni_config.yaml +++ b/src/opera/test/data/test_dswx_ni_config.yaml @@ -20,10 +20,16 @@ RunConfig: PrimaryExecutable: ProductIdentifier: DSWX_NI ProductVersion: "1.0" - ProgramPath: /bin/echo + ProgramPath: mkdir ProgramOptions: - - hello world > dswx_ni_pge_test/output_dir/OPERA_L3_DSWx-NI_T18MVA_20200702T231843Z_20230317T190549Z_v0.1_B01_WTR.tif; - - /bin/echo DSWx-NI invoked with RunConfig + - '-p dswx_ni_pge_test/output_dir;' + # Create dummy output files containing 1 mb of random data each, but with the expected file names + - 'dd if=/dev/urandom of=dswx_ni_pge_test/output_dir/OPERA_L3_DSWx-NI_T11SLS_20110226T061749Z_20240329T181033Z_LSAR_30_v0.1_B01_WTR.tif bs=1M count=1;' + - 'dd if=/dev/urandom of=dswx_ni_pge_test/output_dir/OPERA_L3_DSWx-NI_T11SLS_20110226T061749Z_20240329T181033Z_LSAR_30_v0.1_B02_BWTR.tif bs=1M count=1;' + - 'dd if=/dev/urandom of=dswx_ni_pge_test/output_dir/OPERA_L3_DSWx-NI_T11SLS_20110226T061749Z_20240329T181033Z_LSAR_30_v0.1_B03_CONF.tif count=1;' + - 'dd if=/dev/urandom of=dswx_ni_pge_test/output_dir/OPERA_L3_DSWx-NI_T11SLS_20110226T061749Z_20240329T181033Z_LSAR_30_v0.1_B04_DIAG.tif bs=1M count=1;' + - 'dd if=/dev/urandom of=dswx_ni_pge_test/output_dir/OPERA_L3_DSWx-NI_T11SLS_20110226T061749Z_20240329T181033Z_LSAR_30_v0.1_BROWSE.png bs=1M count=1;' + - '/bin/echo DSWx-NI invoked with RunConfig' ErrorCodeBase: 400000 SchemaPath: pge/dswx_ni/schema/dswx_ni_sas_schema.yaml AlgorithmParametersSchemaPath: pge/dswx_ni/schema/algorithm_parameters_ni_schema.yaml @@ -62,4 +68,6 @@ RunConfig: product_path: dswx_ni_pge_test/output_dir scratch_path: dswx_ni_pge_test/scratch_dir sas_output_path: dswx_ni_pge_test/output_dir + browse_image_group: + save_browse: true log_file: dswx_ni_pge_test/output_dir/test_log.log diff --git a/src/opera/test/pge/dswx_ni/test_dswx_ni_pge.py b/src/opera/test/pge/dswx_ni/test_dswx_ni_pge.py index 4bf77476..b03a1faa 100644 --- a/src/opera/test/pge/dswx_ni/test_dswx_ni_pge.py +++ b/src/opera/test/pge/dswx_ni/test_dswx_ni_pge.py @@ -9,6 +9,7 @@ import glob import os +import shutil import tempfile import unittest from io import StringIO @@ -54,10 +55,25 @@ def setUp(self) -> None: input_dir = join(self.working_dir.name, "dswx_ni_pge_test/input_dir") os.makedirs(input_dir, exist_ok=True) + # Copy the algorithm_parameters config file into the test input directory. + shutil.copy(join(self.data_dir, 'test_dswx_ni_algorithm_parameters.yaml'), input_dir) + + # Create the input dir expected by the test RunConfig and add a + # dummy input file self.input_file = tempfile.NamedTemporaryFile( - dir=input_dir, prefix="test_input_", suffix=".tiff" + dir=input_dir, prefix="test_input_", suffix=".h5" ) + # Create dummy versions of the expected ancillary inputs + for ancillary_file in ('dem.tif', 'worldcover.tif', + 'reference_water.tif', 'shoreline.shp', + 'shoreline.dbf', 'shoreline.prj', + 'shoreline.shx', 'hand.tif', + 'MGRS_tile.sqlite', 'MGRS_tile_collection.sqlite'): + os.system( + f"touch {join(input_dir, ancillary_file)}" + ) + os.chdir(self.working_dir.name) def tearDown(self) -> None: @@ -110,7 +126,10 @@ def test_dswx_ni_pge_execution(self): # Lastly, check that the dummy output products were created slc_files = glob.glob(join(pge.runconfig.output_product_path, "*.tif")) - self.assertEqual(len(slc_files), 1) + self.assertEqual(len(slc_files), 4) + + output_browse_files = glob.glob(join(pge.runconfig.output_product_path, "*.png")) + self.assertEqual(len(output_browse_files), 1) # Open and read the log with open(expected_log_file, 'r', encoding='utf-8') as infile: diff --git a/src/opera/test/pge/dswx_s1/test_dswx_s1_pge.py b/src/opera/test/pge/dswx_s1/test_dswx_s1_pge.py index cd223ec7..4f339be6 100644 --- a/src/opera/test/pge/dswx_s1/test_dswx_s1_pge.py +++ b/src/opera/test/pge/dswx_s1/test_dswx_s1_pge.py @@ -63,13 +63,11 @@ def setUp(self) -> None: test_input_dir = join(self.working_dir.name, "dswx_s1_pge_test/input_dir") os.makedirs(test_input_dir, exist_ok=True) - self.input_file = tempfile.NamedTemporaryFile( - dir=test_input_dir, prefix="test_h5_", suffix=".h5" - ) - # Copy the algorithm_parameters config file into the test input directory. shutil.copy(join(self.data_dir, 'test_dswx_s1_algorithm_parameters.yaml'), test_input_dir) + # Create the input dir expected by the test RunConfig and add a + # dummy input file self.input_file = tempfile.NamedTemporaryFile( dir=test_input_dir, prefix="test_h5_", suffix=".h5" )