From 25c9bf64029f3290d298cb12f46f106151f52282 Mon Sep 17 00:00:00 2001 From: John Franklin Crenshaw Date: Wed, 4 Dec 2024 10:26:29 -0800 Subject: [PATCH 1/6] Upgrades to forward modeling util. --- doc/versionHistory.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/versionHistory.rst b/doc/versionHistory.rst index d5c1f6fb..0e989479 100644 --- a/doc/versionHistory.rst +++ b/doc/versionHistory.rst @@ -13,6 +13,7 @@ Version History ------------- * Set saveHistory=True and loosen convergence criteria in the Danish production pipeline + * Upgrades to the forward modeling util, including specifying flux ratios for blends, miscentering donuts, and simulating "flat" donuts without intensity patterns * Fixed bug in forward modeling util when adding noise to large flux values From dd631be913a9a540af04b86a65341e1f49a8b3a1 Mon Sep 17 00:00:00 2001 From: John Franklin Crenshaw Date: Wed, 4 Dec 2024 21:04:31 -0800 Subject: [PATCH 2/6] Dnaish joint fit of pairs. --- doc/versionHistory.rst | 8 + python/lsst/ts/wep/estimation/danish.py | 337 ++++++++++++++++++++---- 2 files changed, 291 insertions(+), 54 deletions(-) diff --git a/doc/versionHistory.rst b/doc/versionHistory.rst index 0e989479..a5ec07e8 100644 --- a/doc/versionHistory.rst +++ b/doc/versionHistory.rst @@ -6,6 +6,14 @@ Version History ################## +.. _lsst.ts.wep-13.2.0: + +------------- +13.2.0 +------------- + +* Implemented joint-fitting of donut pairs with Danish. + .. _lsst.ts.wep-13.1.0: ------------- diff --git a/python/lsst/ts/wep/estimation/danish.py b/python/lsst/ts/wep/estimation/danish.py index 988a8514..c7772b44 100644 --- a/python/lsst/ts/wep/estimation/danish.py +++ b/python/lsst/ts/wep/estimation/danish.py @@ -50,11 +50,21 @@ class DanishAlgorithm(WfAlgorithm): binning : int, optional Binning factor to apply to the donut stamps before estimating Zernike coefficients. The default value of 1 means no binning. + jointFitPair : bool, optional + Whether to jointly fit intra/extra pairs, when a pair is provided. + If False, Zernikes are estimated for each individually, then + averaged. (the default is True) """ - def __init__(self, lstsqKwargs: Optional[dict] = None, binning: int = 1) -> None: + def __init__( + self, + lstsqKwargs: Optional[dict] = None, + binning: int = 1, + jointFitPair: bool = True, + ) -> None: self.binning = binning self.lstsqKwargs = lstsqKwargs + self.jointFitPair = jointFitPair @property def requiresPairs(self) -> bool: @@ -112,6 +122,26 @@ def lstsqKwargs(self, value: Union[dict, None]) -> None: self._lstsqKwargs = value + @property + def jointFitPair(self) -> bool: + """Whether to jointly fit intra/extra pairs.""" + return self._jointFitPair + + @jointFitPair.setter + def jointFitPair(self, value: bool) -> None: + """Whether to jointly fit intra/extra pairs, when a pair is provided. + + Parameters + ---------- + value : bool + Whether to jointly fit intra/extra pairs, when a pair is provided. + If False, Zernikes are estimated for each individually, then + averaged. + """ + if not isinstance(value, bool): + raise TypeError("jointFitPair must be a bool.") + self._jointFitPair = value + @property def history(self) -> dict: """The algorithm history. @@ -137,39 +167,13 @@ def history(self) -> dict: """ return super().history - def _estimateSingleZk( + def _prepDanish( self, image: Image, zkStart: np.ndarray, nollIndices: np.ndarray, instrument: Instrument, - factory: danish.DonutFactory, - saveHistory: bool, - ) -> Tuple[np.ndarray, dict]: - """Estimate Zernikes (in meters) for a single donut stamp. - - Parameters - ---------- - image : Image - The ts_wep image of the donut stamp - zkStart : np.ndarray - The starting point for the Zernikes - nollIndices : np.ndarray - Noll indices for which to estimate Zernikes - instrument : Instrument - The ts_wep Instrument - factory : danish.DonutFactory - The Danish donut factory - saveHistory : bool - Whether to create a history to be saved - - Returns - ------- - np.ndarray - The Zernike coefficients (in meters) for Noll indices >= 4 - dict - The single-stamp history. This is empty if saveHistory is False. - """ + ): # Warn about using Danish for blended donuts if np.isfinite(image.blendOffsets).sum() > 0: warnings.warn("Danish is currently only setup for non-blended donuts.") @@ -207,13 +211,58 @@ def _estimateSingleZk( if img.shape[0] % 2 == 0: img = img[:-1, :-1] + # Convert field angle from degrees to radians + angle = np.deg2rad(image.fieldAngle) + + return img, angle, zkRef, backgroundStd + + def _estimateSingleZk( + self, + image: Image, + zkStart: np.ndarray, + nollIndices: np.ndarray, + instrument: Instrument, + factory: danish.DonutFactory, + saveHistory: bool, + ) -> Tuple[np.ndarray, dict]: + """Estimate Zernikes (in meters) for a single donut stamp. + + Parameters + ---------- + image : Image + The ts_wep image of the donut stamp + zkStart : np.ndarray + The starting point for the Zernikes + nollIndices : np.ndarray + Noll indices for which to estimate Zernikes + instrument : Instrument + The ts_wep Instrument + factory : danish.DonutFactory + The Danish donut factory + saveHistory : bool + Whether to create a history to be saved + + Returns + ------- + np.ndarray + The Zernike coefficients (in meters) for Noll indices >= 4 + dict + The single-stamp history. This is empty if saveHistory is False. + """ + img, angle, zkRef, backgroundStd = self._prepDanish( + image=image, + zkStart=zkStart, + nollIndices=nollIndices, + instrument=instrument, + ) + # Create the Danish donut model model = danish.SingleDonutModel( factory, z_ref=zkRef, z_terms=nollIndices, - thx=np.deg2rad(image.fieldAngle[0]), - thy=np.deg2rad(image.fieldAngle[1]), + thx=angle[0], + thy=angle[1], npix=img.shape[0], ) @@ -232,7 +281,7 @@ def _estimateSingleZk( result = dict(result) # Unpack the parameters - dx, dy, fwhm, *zkFit = result["x"] + dx, dy, fwhm, *zkFit = result.x zkFit = np.array(zkFit) # Add the starting zernikes back into the result @@ -248,7 +297,7 @@ def _estimateSingleZk( dy, fwhm, zkFit, - sky_level=backgroundStd, + sky_level=backgroundStd**2, flux=img.sum(), ) @@ -283,6 +332,172 @@ def _estimateSingleZk( return zkSum, hist + def _estimatePairZk( + self, + I1: Image, + I2: Optional[Image], + zkStartI1: np.ndarray, + zkStartI2: Optional[np.ndarray], + nollIndices: np.ndarray, + instrument: Instrument, + factory: danish.DonutFactory, + saveHistory: bool, + ) -> Tuple[np.ndarray, dict]: + """Estimate Zernikes (in meters) for pairs of donut stamps. + + Parameters + ---------- + I1 : Image + An Image object containing an intra- or extra-focal donut image. + I2 : Image or None + A second image, on the opposite side of focus from I1. Can be None. + zkStartI1 : np.ndarray + The starting Zernikes for I1 (in meters, for Noll indices >= 4) + zkStartI2 : np.ndarray or None + The starting Zernikes for I2 (in meters, for Noll indices >= 4) + nollIndices : np.ndarray + Noll indices for which you wish to estimate Zernike coefficients. + instrument : Instrument + The Instrument object associated with the DonutStamps. + factory : danish.DonutFactory + The Danish donut factory + saveHistory : bool + Whether to save the algorithm history in the self.history + attribute. If True, then self.history contains information + about the most recent time the algorithm was run. + + Returns + ------- + Returns + ------- + np.ndarray + The Zernike coefficients (in meters) for Noll indices >= 4 + dict + The single-stamp history. This is empty if saveHistory is False. + """ + # Prep quantities for both images + img1, angle1, zkRef1, backgroundStd1 = self._prepDanish( + image=I1, + zkStart=zkStartI1, + nollIndices=nollIndices, + instrument=instrument, + ) + img2, angle2, zkRef2, backgroundStd2 = self._prepDanish( + image=I2, + zkStart=zkStartI2, + nollIndices=nollIndices, + instrument=instrument, + ) + + # Package these into lists for Danish + imgs = [img1, img2] + thxs = [angle1[0], angle2[0]] + thys = [angle1[1], angle2[1]] + zkRefs = [zkRef1, zkRef2] + skyLevels = [backgroundStd1**2, backgroundStd2**2] + + # Create Double Zernike tuples + dzTerms = [(1, j) for j in nollIndices] + + # Set field radius to max value from mask params + fieldRadius = np.deg2rad( + np.max( + [ + edge["thetaMax"] + for item in instrument.maskParams.values() + for edge in item.values() + ] + ) + ) + + # Create model + model = danish.MultiDonutModel( + factory, + z_refs=zkRefs, + dz_terms=dzTerms, + field_radius=fieldRadius, + thxs=thxs, + thys=thys, + npix=imgs[0].shape[0], + ) + + # Initial guess + x0 = [0.0] * 2 + [0.0] * 2 + [0.7] + [0.0] * len(dzTerms) + + # Use scipy to optimize the parameters + try: + result = least_squares( + model.chi, + jac=model.jac, + x0=x0, + args=(imgs, skyLevels), + **self.lstsqKwargs, + ) + result = dict(result) + + # Unpack the parameters + dxs, dys, fwhm, zkFit = model.unpack_params(result.x) + + # Add the starting zernikes back into the result + zkSum = zkFit + np.nanmean([zkStartI1, zkStartI2], axis=0) + + # Flag that we didn't hit GalSimFFTSizeError + galSimFFTSizeError = False + + # If we're saving the history, compute the model image + if saveHistory: + modelImages = model.model( + dxs, + dys, + fwhm, + zkFit, + sky_levels=skyLevels, + fluxes=np.sum(imgs, axis=(1, 2)), + ) + + # Sometimes this happens with Danish :( + except GalSimFFTSizeError: + # Fill dummy objects + result = None + zkFit = np.full_like(zkStartI1, np.nan) + zkSum = np.full_like(zkStartI1, np.nan) + if saveHistory: + modelImages = np.full_like(imgs, np.nan) + + # Flag the error + galSimFFTSizeError = True + + if saveHistory: + # Save the history + hist = {} + hist[I1.defocalType.value] = { + "image": imgs[0].copy(), + "variance": skyLevels[0], + "nollIndices": nollIndices.copy(), + "zkStart": zkStartI1.copy(), + "lstsqResult": result, + "zkFit": zkFit.copy(), + "zkSum": zkSum.copy(), + "model": modelImages[0], + "GalSimFFTSizeError": galSimFFTSizeError, + } + hist[I2.defocalType.value] = { + "image": imgs[1].copy(), + "variance": skyLevels[1], + "nollIndices": nollIndices.copy(), + "zkStart": zkStartI1.copy(), + "lstsqResult": result, + "zkFit": zkFit.copy(), + "zkSum": zkSum.copy(), + "model": modelImages[1], + "GalSimFFTSizeError": galSimFFTSizeError, + } + + else: + hist = {} + + return zkSum, hist + def _estimateZk( self, I1: Image, @@ -317,7 +532,7 @@ def _estimateZk( Returns ------- np.ndarray - Zernike coefficients (for Noll indices >= 4) estimated from + Zernike coefficients for the provided Noll indices, estimated from the images, in meters. """ # Create the Danish donut factory @@ -329,23 +544,43 @@ def _estimateZk( pixel_scale=instrument.pixelSize * self.binning, ) - # Create an empty history - hist = {} + if I2 is None or not self.jointFitPair: + # Create an empty history + hist = {} - # Estimate for I1 - zk1, hist[I1.defocalType.value] = self._estimateSingleZk( - I1, - zkStartI1, - nollIndices, - instrument, - factory, - saveHistory, - ) + # Estimate for I1 + zk1, hist[I1.defocalType.value] = self._estimateSingleZk( + I1, + zkStartI1, + nollIndices, + instrument, + factory, + saveHistory, + ) + + if I2 is not None: + # If I2 provided, estimate for that donut as well + zk2, hist[I2.defocalType.value] = self._estimateSingleZk( + I2, + zkStartI2, + nollIndices, + instrument, + factory, + saveHistory, + ) + + # Average the Zernikes + zk = np.mean([zk1, zk2], axis=0) + else: + zk = zk1 - if I2 is not None: - # If I2 provided, estimate for that donut as well - zk2, hist[I2.defocalType.value] = self._estimateSingleZk( + hist["zk"] = zk + + else: + zk, hist = self._estimatePairZk( + I1, I2, + zkStartI1, zkStartI2, nollIndices, instrument, @@ -353,13 +588,7 @@ def _estimateZk( saveHistory, ) - # Average the Zernikes - zk = np.mean([zk1, zk2], axis=0) - else: - zk = zk1 - if saveHistory: - hist["zk"] = zk self._history = hist return zk From f2648ae73763b78e40666b4cac3ec83f277c23a8 Mon Sep 17 00:00:00 2001 From: John Franklin Crenshaw Date: Wed, 4 Dec 2024 21:09:03 -0800 Subject: [PATCH 3/6] dict fix --- python/lsst/ts/wep/estimation/danish.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/lsst/ts/wep/estimation/danish.py b/python/lsst/ts/wep/estimation/danish.py index c7772b44..25bbe0d7 100644 --- a/python/lsst/ts/wep/estimation/danish.py +++ b/python/lsst/ts/wep/estimation/danish.py @@ -281,7 +281,7 @@ def _estimateSingleZk( result = dict(result) # Unpack the parameters - dx, dy, fwhm, *zkFit = result.x + dx, dy, fwhm, *zkFit = result["x"] zkFit = np.array(zkFit) # Add the starting zernikes back into the result @@ -436,7 +436,7 @@ def _estimatePairZk( result = dict(result) # Unpack the parameters - dxs, dys, fwhm, zkFit = model.unpack_params(result.x) + dxs, dys, fwhm, zkFit = model.unpack_params(result["x"]) # Add the starting zernikes back into the result zkSum = zkFit + np.nanmean([zkStartI1, zkStartI2], axis=0) From 6f70a2e4dd1b15d882de81e6bfd396d4bed90f6a Mon Sep 17 00:00:00 2001 From: John Franklin Crenshaw Date: Wed, 4 Dec 2024 21:31:45 -0800 Subject: [PATCH 4/6] Wrote test. --- tests/estimation/test_danish.py | 85 +++++++++++++++++---------------- 1 file changed, 43 insertions(+), 42 deletions(-) diff --git a/tests/estimation/test_danish.py b/tests/estimation/test_danish.py index 6d264393..344474af 100644 --- a/tests/estimation/test_danish.py +++ b/tests/estimation/test_danish.py @@ -50,45 +50,46 @@ def testGoodLstsqKwargs(self): self.assertEqual(hist["lstsqResult"]["nfev"], 1) def testAccuracy(self): - # Create estimator - dan = DanishAlgorithm() - danBin = DanishAlgorithm(binning=2) - - # Try several different random seeds - for seed in [12345, 23451, 34512, 45123, 51234]: - # Get the test data - zkTrue, intra, extra = forwardModelPair(seed=seed) - - # Compute shape of binned images - shapex, shapey = intra.image.shape - binned_shapex = shapex // 2 - binned_shapey = shapey // 2 - - # Ensure odd - if binned_shapex % 2 == 0: - binned_shapex -= 1 - if binned_shapey % 2 == 0: - binned_shapey -= 1 - binned_shape = (binned_shapex, binned_shapey) - - # Test estimation with pairs and single donuts: - for images in [[intra, extra], [intra], [extra]]: - # Estimate Zernikes (in meters) - zkEst = dan.estimateZk(*images) - - # Check that results are fairly accurate - self.assertLess(np.sqrt(np.sum((zkEst - zkTrue) ** 2)), 0.35e-6) - - # Test with binning - zkEst = danBin.estimateZk(*images, saveHistory=True) - self.assertLess(np.sqrt(np.sum((zkEst - zkTrue) ** 2)), 0.35e-6) - - # Test that we binned the images. - if "intra" in danBin.history: - self.assertEqual( - danBin.history["intra"]["image"].shape, binned_shape - ) - if "extra" in danBin.history: - self.assertEqual( - danBin.history["extra"]["image"].shape, binned_shape - ) + for jointFitPair in [True, False]: + # Create estimator + dan = DanishAlgorithm(jointFitPair=jointFitPair) + danBin = DanishAlgorithm(binning=2, jointFitPair=jointFitPair) + + # Try several different random seeds + for seed in [12345, 23451, 34512, 45123]: + # Get the test data + zkTrue, intra, extra = forwardModelPair(seed=seed) + + # Compute shape of binned images + shapex, shapey = intra.image.shape + binned_shapex = shapex // 2 + binned_shapey = shapey // 2 + + # Ensure odd + if binned_shapex % 2 == 0: + binned_shapex -= 1 + if binned_shapey % 2 == 0: + binned_shapey -= 1 + binned_shape = (binned_shapex, binned_shapey) + + # Test estimation with pairs and single donuts: + for images in [[intra, extra], [intra], [extra]]: + # Estimate Zernikes (in meters) + zkEst = dan.estimateZk(*images) + + # Check that results are fairly accurate + self.assertLess(np.sqrt(np.sum((zkEst - zkTrue) ** 2)), 0.35e-6) + + # Test with binning + zkEst = danBin.estimateZk(*images, saveHistory=True) + self.assertLess(np.sqrt(np.sum((zkEst - zkTrue) ** 2)), 0.35e-6) + + # Test that we binned the images. + if "intra" in danBin.history: + self.assertEqual( + danBin.history["intra"]["image"].shape, binned_shape + ) + if "extra" in danBin.history: + self.assertEqual( + danBin.history["extra"]["image"].shape, binned_shape + ) From 05dff7c3ba450e1b13ada4399c05588ed392847f Mon Sep 17 00:00:00 2001 From: John Franklin Crenshaw Date: Thu, 5 Dec 2024 07:46:30 -0800 Subject: [PATCH 5/6] Added joitFitPair config to task. --- python/lsst/ts/wep/task/estimateZernikesDanishTask.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/python/lsst/ts/wep/task/estimateZernikesDanishTask.py b/python/lsst/ts/wep/task/estimateZernikesDanishTask.py index e68ff073..de2df13c 100644 --- a/python/lsst/ts/wep/task/estimateZernikesDanishTask.py +++ b/python/lsst/ts/wep/task/estimateZernikesDanishTask.py @@ -44,6 +44,12 @@ class EstimateZernikesDanishConfig(EstimateZernikesBaseConfig): doc="Binning factor to apply to the donut stamps before estimating " + "Zernike coefficients. A value of 1 means no binning.", ) + jointFitPair = pexConfig.Field( + dtype=bool, + default=True, + doc="Whether to jointly fit intra/extra pairs, when a pair is provided. " + + "If False, Zernikes are estimated for each individually, then averaged. ", + ) class EstimateZernikesDanishTask(EstimateZernikesBaseTask): From 51b0f9fa36b0390b8c2378bd5c92eaceda6957a9 Mon Sep 17 00:00:00 2001 From: John Franklin Crenshaw Date: Fri, 6 Dec 2024 14:46:58 -0800 Subject: [PATCH 6/6] small fixes --- doc/versionHistory.rst | 1 - python/lsst/ts/wep/estimation/danish.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/versionHistory.rst b/doc/versionHistory.rst index a5ec07e8..b6604154 100644 --- a/doc/versionHistory.rst +++ b/doc/versionHistory.rst @@ -21,7 +21,6 @@ Version History ------------- * Set saveHistory=True and loosen convergence criteria in the Danish production pipeline - * Upgrades to the forward modeling util, including specifying flux ratios for blends, miscentering donuts, and simulating "flat" donuts without intensity patterns * Fixed bug in forward modeling util when adding noise to large flux values diff --git a/python/lsst/ts/wep/estimation/danish.py b/python/lsst/ts/wep/estimation/danish.py index 25bbe0d7..ff651da7 100644 --- a/python/lsst/ts/wep/estimation/danish.py +++ b/python/lsst/ts/wep/estimation/danish.py @@ -485,7 +485,7 @@ def _estimatePairZk( "image": imgs[1].copy(), "variance": skyLevels[1], "nollIndices": nollIndices.copy(), - "zkStart": zkStartI1.copy(), + "zkStart": zkStartI2.copy(), "lstsqResult": result, "zkFit": zkFit.copy(), "zkSum": zkSum.copy(),