Skip to content

Commit

Permalink
Add time series inversion/velocity estimation to `DisplacementWorkflo…
Browse files Browse the repository at this point in the history
…w` (#246)

* remove commented code in ts

* allow user to specify more than one type of ifg network

* fix ifg tests, add empty `ValueError` tests

* start reference point selection funciton

* create multilooked amp dispersion

* add timeseries options to `DisplacementWorkflow`

* add cor to variance

* add network disp pytest

* back to public date to float

* fix small test errs

* reference the data before running velocity estimation, even without inversion

* add a `read_masked` to `VRTStack`

* Directly use callback when `ntiles=(1,1)`

closes #244

* make the reference point selection from the largest conncomp

* Revert "Directly use callback when `ntiles=(1,1)`"

This reverts commit 2d2dea4.

* ensure same size `amp_disp_looked` as outputs

* make a dummy 0 raster in the case of single-reference

otherwise, we try to estimate velocity with N xs (sar dates) and N-1 data points (unws)

* raise error if there `create_velocity` receives mismatched file lengths

* only pass correlation without an inversion
  • Loading branch information
scottstanie authored Mar 4, 2024
1 parent 39ea994 commit d8fd3c8
Show file tree
Hide file tree
Showing 17 changed files with 732 additions and 276 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,16 @@
- Added `dolphin.timeseries` module with basic functionality:
- Invert a stack of unwrapped interferograms to a timeseries (using correlation weighting optionally)
- Estimate a (weighted) linear velocity from a timeseries
- Added inversion and velocity estimation as options to `DisplacementWorkflow`
- Create `DatasetStackWriter` protocol, with `BackgroundStackWriter` implementation

**Changed**
- Rename `GdalWriter` to `BackgroundBlockWriter`
- Displacement workflow now also creates/returns a stitched, multi-looked version of the amplitude dispersion

**Fixed**
- `BackgroundRasterWriter` was not creating the files necessary before writing
- Allow user to specify more than one type of interferogram in `Network` configuration

# [0.15.3](https://github.com/isce-framework/dolphin/compare/v0.15.2...0.15.3) - 2024-02-27

Expand Down
5 changes: 5 additions & 0 deletions src/dolphin/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,11 @@ class HalfWindow(NamedTuple):
DateOrDatetime = Union[datetime.datetime, datetime.date]


class ReferencePoint(NamedTuple):
row: int
col: int


class TropoModel(str, Enum):
"""Enumeration representing different tropospheric models."""

Expand Down
53 changes: 32 additions & 21 deletions src/dolphin/interferogram.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,33 +375,44 @@ def __post_init__(
def _make_ifg_pairs(self) -> list[IfgPairT]:
"""Form interferogram pairs from a list of SLC files sorted by date."""
assert self.dates is not None
ifgs: list[IfgPairT] = []
# For each type of possible network, add the ifgs to the list
if self.indexes is not None:
# Give the option to select exactly which interferograms to create
ifgs = [
(self.slc_list[ref_idx], self.slc_list[sec_idx])
for ref_idx, sec_idx in self.indexes
]
elif self.max_bandwidth is not None:
ifgs = Network._limit_by_bandwidth(self.slc_list, self.max_bandwidth)
elif self.max_temporal_baseline is not None:
ifgs = Network._limit_by_temporal_baseline(
self.slc_list,
dates=self.dates,
max_temporal_baseline=self.max_temporal_baseline,
ifgs.extend(
[
(self.slc_list[ref_idx], self.slc_list[sec_idx])
for ref_idx, sec_idx in self.indexes
]
)
elif self.reference_idx is not None:
ifgs = Network._single_reference_network(self.slc_list, self.reference_idx)
else:
if self.max_bandwidth is not None:
ifgs.extend(Network._limit_by_bandwidth(self.slc_list, self.max_bandwidth))
if self.max_temporal_baseline is not None:
ifgs.extend(
Network._limit_by_temporal_baseline(
self.slc_list,
dates=self.dates,
max_temporal_baseline=self.max_temporal_baseline,
)
)
if self.reference_idx is not None:
ifgs.extend(
Network._single_reference_network(self.slc_list, self.reference_idx)
)

if self.include_annual:
# Add in the annual pairs, then re-sort
annual_ifgs = Network._find_annuals(
self.slc_list, self.dates, buffer_days=self.annual_buffer_days
)
ifgs.extend(annual_ifgs)

if not ifgs:
msg = "No valid ifg list generation method specified"
raise ValueError(msg)

if not self.include_annual:
return ifgs
# Add in the annual pairs, then re-sort
annual_ifgs = Network._find_annuals(
self.slc_list, self.dates, buffer_days=self.annual_buffer_days
)
return sorted(ifgs + annual_ifgs)
# Sort and dedupe them
return sorted(set(ifgs))

def _create_vrt_ifgs(self) -> list[VRTInterferogram]:
"""Write out a VRTInterferogram for each ifg."""
Expand Down
25 changes: 19 additions & 6 deletions src/dolphin/io/_readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -682,9 +682,6 @@ class VRTStack(StackReader):
sort_files : bool, optional (default = True)
Sort the files in `file_list`. Assumes that the naming convention
will sort the files in increasing time order.
nodata_mask_file : pathlib.Path, optional
Path to file containing a mask of pixels containing with nodata
in every images. Used for skipping the loading of these pixels.
file_date_fmt : str, optional (default = "%Y%m%d")
Format string for parsing the dates from the filenames.
Passed to [opera_utils.get_dates][].
Expand All @@ -703,6 +700,7 @@ def __init__(
fail_on_overwrite: bool = False,
skip_size_check: bool = False,
num_threads: int = 1,
read_masked: bool = False,
):
if Path(outfile).exists() and write_file:
if fail_on_overwrite:
Expand Down Expand Up @@ -730,6 +728,7 @@ def __init__(
self.file_list = files
self.dates = dates
self.num_threads = num_threads
self._read_masked = read_masked

self.outfile = Path(outfile).resolve()
# Assumes that all files use the same subdataset (if NetCDF)
Expand All @@ -740,15 +739,21 @@ def __init__(

# Use the first file in the stack to get size, transform info
ds = gdal.Open(fspath(self._gdal_file_strings[0]))
bnd1 = ds.GetRasterBand(1)
self.xsize = ds.RasterXSize
self.ysize = ds.RasterYSize
self.nodatavals = []
for i in range(1, ds.RasterCount + 1):
bnd = ds.GetRasterBand(i)
self.nodatavals.append(bnd.GetNoDataValue())
self.nodata = self.nodatavals[0]
# Should be CFloat32
self.gdal_dtype = gdal.GetDataTypeName(ds.GetRasterBand(1).DataType)
self.gdal_dtype = gdal.GetDataTypeName(bnd1.DataType)
# Save these for setting at the end
self.gt = ds.GetGeoTransform()
self.proj = ds.GetProjection()
self.srs = ds.GetSpatialRef()
ds = None
ds = bnd1 = None
# Save the subset info

self.xoff, self.yoff = 0, 0
Expand Down Expand Up @@ -794,6 +799,12 @@ def _write(self):
ds.SetGeoTransform(self.gt)
ds.SetProjection(self.proj)
ds.SetSpatialRef(self.srs)
if self.nodata is not None:
for i in range(ds.RasterCount):
# ds.GetRasterBand(i + 1).SetNoDataValue(self.nodatavals[i])
# Force to be the same nodataval for all bands
ds.GetRasterBand(i + 1).SetNoDataValue(self.nodata)

ds = None

@property
Expand Down Expand Up @@ -899,9 +910,11 @@ def read_stack(
subsample_factor: int = 1,
rows: Optional[slice] = None,
cols: Optional[slice] = None,
masked: bool = False,
masked: bool | None = None,
):
"""Read in the SLC stack."""
if masked is None:
masked = self._read_masked
return io.load_gdal(
self.outfile,
band=band,
Expand Down
96 changes: 62 additions & 34 deletions src/dolphin/ps.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,56 +261,84 @@ def _use_existing_files(
shutil.copy(existing_amp_mean_file, output_amp_mean_file)


def multilook_ps_mask(
def multilook_ps_files(
strides: dict[str, int],
ps_mask_file: Filename,
output_file: Optional[Filename] = None,
) -> Path:
"""Create a multilooked version of the full-res PS mask.
amp_dispersion_file: Filename,
) -> tuple[Path, Path]:
"""Create a multilooked version of the full-res PS mask/ampltiude dispersion.
Parameters
----------
strides : dict[str, int]
Decimation factor for 'x', 'y'
ps_mask_file : Filename
Name of input full-res uint8 PS mask file
output_file : Optional[Filename], optional
Name of file to save result to.
Defaults to same as `ps_mask_file`, but with "_looked" added before suffix.
amp_dispersion_file : Filename
Name of input full-res float32 amplitude dispersion file
Returns
-------
output_file : Path
output_ps_file : Path
Multilooked PS mask file
Will be same as `ps_mask_file`, but with "_looked" added before suffix.
output_amp_disp_file : Path
Multilooked amplitude dispersion file
Similar naming scheme to `output_ps_file`
"""
if strides == {"x": 1, "y": 1}:
logger.info("No striding request, skipping multilook.")
return Path(ps_mask_file)
if output_file is None:
ps_suffix = Path(ps_mask_file).suffix
out_path = Path(str(ps_mask_file).replace(ps_suffix, f"_looked{ps_suffix}"))
logger.info(f"Saving a looked PS mask to {out_path}")
else:
out_path = Path(output_file)
return Path(ps_mask_file), Path(amp_dispersion_file)
full_cols, full_rows = io.get_raster_xysize(ps_mask_file)
out_rows, out_cols = full_rows // strides["y"], full_cols // strides["x"]

if Path(out_path).exists():
logger.info(f"{out_path} exists, skipping.")
return out_path
ps_suffix = Path(ps_mask_file).suffix
ps_out_path = Path(str(ps_mask_file).replace(ps_suffix, f"_looked{ps_suffix}"))
logger.info(f"Saving a looked PS mask to {ps_out_path}")

ps_mask = io.load_gdal(ps_mask_file, masked=True).astype(bool)
full_rows, full_cols = ps_mask.shape
ps_mask_looked = utils.take_looks(
ps_mask, strides["y"], strides["x"], func_type="any", edge_strategy="pad"
)
# make sure it's the same size as the MLE result/temp_coh after padding
out_rows, out_cols = full_rows // strides["y"], full_cols // strides["x"]
ps_mask_looked = ps_mask_looked[:out_rows, :out_cols]
ps_mask_looked = ps_mask_looked.astype("uint8").filled(NODATA_VALUES["ps"])
io.write_arr(
arr=ps_mask_looked,
like_filename=ps_mask_file,
output_name=out_path,
strides=strides,
nodata=NODATA_VALUES["ps"],
if Path(ps_out_path).exists():
logger.info(f"{ps_out_path} exists, skipping.")
else:
ps_mask = io.load_gdal(ps_mask_file, masked=True).astype(bool)
ps_mask_looked = utils.take_looks(
ps_mask, strides["y"], strides["x"], func_type="any", edge_strategy="pad"
)
# make sure it's the same size as the MLE result/temp_coh after padding
ps_mask_looked = ps_mask_looked[:out_rows, :out_cols]
ps_mask_looked = ps_mask_looked.astype("uint8").filled(NODATA_VALUES["ps"])
io.write_arr(
arr=ps_mask_looked,
like_filename=ps_mask_file,
output_name=ps_out_path,
strides=strides,
nodata=NODATA_VALUES["ps"],
)

amp_disp_suffix = Path(amp_dispersion_file).suffix
amp_disp_out_path = Path(
str(amp_dispersion_file).replace(amp_disp_suffix, f"_looked{amp_disp_suffix}")
)
return out_path
if amp_disp_out_path.exists():
logger.info(f"{amp_disp_out_path} exists, skipping.")
else:
amp_disp = io.load_gdal(amp_dispersion_file, masked=True)
# We use `nanmin` assuming that the multilooked PS is using
# the strongest PS (the one with the lowest amplitude dispersion)
amp_disp_looked = utils.take_looks(
amp_disp,
strides["y"],
strides["x"],
func_type="nanmin",
edge_strategy="pad",
)
amp_disp_looked = amp_disp_looked[:out_rows, :out_cols]
amp_disp_looked = amp_disp_looked.filled(NODATA_VALUES["amp_dispersion"])
io.write_arr(
arr=amp_disp_looked,
like_filename=amp_dispersion_file,
output_name=amp_disp_out_path,
strides=strides,
nodata=NODATA_VALUES["amp_dispersion"],
)
return ps_out_path, amp_disp_out_path
Loading

0 comments on commit d8fd3c8

Please sign in to comment.