Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tickets/DM-47163: Cut on fraction of bad pixels; run stamp selection during CalcZernikeTask by default #277

Merged
merged 16 commits into from
Oct 28, 2024
Merged
11 changes: 11 additions & 0 deletions doc/versionHistory.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,17 @@
Version History
##################

.. _lsst.ts.wep-12.4.0:

-------------
12.4.0
-------------

* Added a threshold on fraction-of-bad-pixels to DonutStampSelectorTask
* Modified DonutStampSelectorTaskConfig so that, by default, selections are run on fraction-of-bad-pixels and signal-to-noise ratio.
* Modified CalcZernikesTask so that DonutStampSelectorTask is run by default
* Fixed bug where DM mask bits weren't persisting in DonutStamp

.. _lsst.ts.wep-12.3.0:

-------------
Expand Down
9 changes: 6 additions & 3 deletions pipelines/production/comCamRapidAnalysisPipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,20 @@ tasks:
config:
estimateZernikes.maxNollIndex: 28
estimateZernikes.saveHistory: False
estimateZernikes.maskKwargs: {'doMaskBlends': False}
estimateZernikes.maskKwargs: { "doMaskBlends": False }
isr:
class: lsst.ip.isr.IsrTask
class: lsst.ip.isr.IsrTaskLSST
config:
# Although we don't have to apply the amp offset corrections, we do want
# to compute them for analyzeAmpOffsetMetadata to report on as metrics.
doAmpOffset: true
ampOffset.doApplyAmpOffset: false
# Turn off slow steps in ISR
doBrighterFatter: false
doCrosstalk: false
# Mask saturated pixels,
# but turn off quadratic crosstalk because it's currently broken
doSaturation: True
crosstalk.doQuadraticCrosstalkCorrection: False
aggregateZernikeTablesTask:
class: lsst.donut.viz.AggregateZernikeTablesTask
aggregateDonutTablesTask:
Expand Down
11 changes: 8 additions & 3 deletions python/lsst/ts/wep/task/calcZernikesTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,13 @@ class CalcZernikesTaskConfig(
),
)
donutStampSelector = pexConfig.ConfigurableField(
target=DonutStampSelectorTask, doc="How to select donut stamps."
target=DonutStampSelectorTask,
doc="How to select donut stamps.",
)
doDonutStampSelector = pexConfig.Field(
doc="Whether or not to run donut stamp selector.", dtype=bool, default=False
doc="Whether or not to run donut stamp selector.",
dtype=bool,
default=True,
)


Expand Down Expand Up @@ -159,6 +162,8 @@ def initZkTable(self) -> QTable:
("extra_sn", "<f4"),
("intra_entropy", "<f4"),
("extra_entropy", "<f4"),
("intra_frac_bad_pix", "<f4"),
("extra_frac_bad_pix", "<f4"),
]
for j in range(4, self.maxNollIndex + 1):
dtype.append((f"Z{j}", "<f4"))
Expand Down Expand Up @@ -283,7 +288,7 @@ def createZkTable(
* u.pixel
)
)
for key in ["MAG", "SN", "ENTROPY"]:
for key in ["MAG", "SN", "ENTROPY", "FRAC_BAD_PIX"]:
for stamps, foc in [
(intraStamps, "intra"),
(extraStamps, "extra"),
Expand Down
22 changes: 21 additions & 1 deletion python/lsst/ts/wep/task/cutOutDonutsBase.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,11 @@ class CutOutDonutsBaseTaskConfig(
dtype=int,
default=3,
)
badPixelMaskDefinitions = pexConfig.ListField(
doc="List of mask values flagged as 'bad' for Zernike estimation.",
dtype=str,
default=["SAT", "BAD", "NO_DATA", "INTRP"],
)


class CutOutDonutsBaseTask(pipeBase.PipelineTask):
Expand Down Expand Up @@ -338,7 +343,6 @@ def calculateSN(self, stamp):
A dictionary of calculated quantities
"""

stamp.makeMask(self.instConfigFile, self.opticalModel)
imageArray = stamp.stamp_im.image.array
mask = stamp.stamp_im.mask
varianceArray = stamp.stamp_im.variance.array
Expand Down Expand Up @@ -432,6 +436,7 @@ def calculateSN(self, stamp):
"ttl_noise_bkgnd_variance": ttlNoiseBkgndVariance,
"ttl_noise_donut_variance": ttlNoiseDonutVariance,
}

return snDict

def filterBadRecentering(self, xShifts, yShifts):
Expand Down Expand Up @@ -572,6 +577,9 @@ def cutOutStamps(self, exposure, donutCatalog, defocalType, cameraName):
# Value of entropy
stampsEntropy = list()

# Fraction of bad pixels
fracBadPixels = list()

for idx, donutRow in enumerate(donutCatalog):
# Make an initial cutout larger than the actual final stamp
# so that we can centroid to get the stamp centered exactly
Expand Down Expand Up @@ -664,6 +672,9 @@ def cutOutStamps(self, exposure, donutCatalog, defocalType, cameraName):
archive_element=linear_wcs,
)

# Create image mask
donutStamp.makeMask(self.instConfigFile, self.opticalModel)

# Calculate the S/N per stamp
snQuant.append(self.calculateSN(donutStamp))

Expand All @@ -672,6 +683,11 @@ def cutOutStamps(self, exposure, donutCatalog, defocalType, cameraName):
isEffective.append(eff)
stampsEntropy.append(entro)

# Calculate fraction of bad pixels
bits = finalStamp.mask.getPlaneBitMask(self.config.badPixelMaskDefinitions)
badPixels = np.bitwise_and(finalStamp.mask.array, bits) > 0
fracBadPixels.append(np.mean(badPixels))

finalStamps.append(donutStamp)

# Calculate the difference between original centroid and final centroid
Expand Down Expand Up @@ -781,4 +797,8 @@ def cutOutStamps(self, exposure, donutCatalog, defocalType, cameraName):

# Save the peak of the correlated image
stampsMetadata["PEAK_HEIGHT"] = peakHeight

# Save the fraction of bad pixels
stampsMetadata["FRAC_BAD_PIX"] = np.array(fracBadPixels).astype(float)

return DonutStamps(finalStamps, metadata=stampsMetadata, use_archive=True)
19 changes: 13 additions & 6 deletions python/lsst/ts/wep/task/donutStamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,12 +301,7 @@ def makeMask(
):
"""Create the mask for the image.

Note the mask is returned in the original coordinate system of the info
that came from the butler (i.e. the DVCS, and the CWFSs are rotated
with respect to the science sensors). See sitcomtn-003.lsst.io for more
information.

Also note that technically the image masks depend on the optical
Note that technically the image masks depend on the optical
aberrations, but this function assumes the aberrations are zero.

Parameters
Expand Down Expand Up @@ -363,6 +358,18 @@ def makeMask(
nRot = int(eulerZ // 90)
stampMask = np.rot90(stampMask, -nRot)

# First make sure the mask doesn't already have donut/blend bits
# This is so if this function gets called multiple times, the donut
# and blend bits don't get re-added.
mask0 = self.stamp_im.mask.array.copy()
bit = self.stamp_im.mask.getMaskPlaneDict()["DONUT"]
mask0 &= ~(1 << bit)
bit = self.stamp_im.mask.getMaskPlaneDict()["BLEND"]
mask0 &= ~(1 << bit)

# Add original mask to the new mask
stampMask += mask0

# Save mask
self.stamp_im.setMask(afwImage.Mask(stampMask.copy()))

Expand Down
69 changes: 49 additions & 20 deletions python/lsst/ts/wep/task/donutStampSelectorTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,16 @@ class DonutStampSelectorTaskConfig(pexConfig.Config):
)
selectWithSignalToNoise = pexConfig.Field(
dtype=bool,
default=False,
doc="Whether to use signal to noise ratio in deciding to use the donut."
default=True,
doc="Whether to use signal to noise ratio in deciding to use the donut. "
+ "By default the values from snLimitStar.yaml config file are used.",
)
selectWithFracBadPixels = pexConfig.Field(
dtype=bool,
default=True,
doc="Whether to use fraction of bad pixels in deciding to use the donut. "
+ "Bad pixels correspond to mask values of 'SAT', 'BAD', 'NO_DATA'.",
)
useCustomSnLimit = pexConfig.Field(
dtype=bool,
default=False,
Expand All @@ -62,13 +68,17 @@ class DonutStampSelectorTaskConfig(pexConfig.Config):
default=3.5,
doc=str("The entropy threshold to use (keep donuts only below the threshold)."),
)
maxFracBadPixels = pexConfig.Field(
dtype=float,
default=0.0,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It would be really good to run that against even a simulation / early real data. I'm worried that with such strict requirement we may get nothing passing atm.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Working on this

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Checked this works

doc=str("Maximum fraction of bad pixels in selected donuts."),
)


class DonutStampSelectorTask(pipeBase.Task):
"""
Donut Stamp Selector uses information about donut stamp calculated at
the stamp cutting out stage to select those that fulfill entropy
and/or signal-to-noise criteria.
the stamp cutting out stage to select those that specified criteria.
"""

ConfigClass = DonutStampSelectorTaskConfig
Expand Down Expand Up @@ -97,9 +107,9 @@ def run(self, donutStamps):
Boolean array of stamps that were selected, same length as
donutStamps.
- donutsQuality : `astropy.table.QTable`
A table with calculated signal to noise measure and entropy
value per donut, together with selection outcome for all
input donuts.
A table with calculated signal to noise measure, entropy
value per donut, and fraction of bad pixels, together with
selection outcome for all input donuts.

"""
result = self.selectStamps(donutStamps)
Expand All @@ -109,7 +119,7 @@ def run(self, donutStamps):
)
selectedStamps._refresh_metadata()
# Need to copy a few other fields by hand
for k in ["SN", "ENTROPY", "VISIT"]:
for k in ["SN", "ENTROPY", "FRAC_BAD_PIX", "VISIT"]:
if k in donutStamps.metadata:
selectedStamps.metadata[k] = np.array(
[
Expand Down Expand Up @@ -157,7 +167,6 @@ def selectStamps(self, donutStamps):
value per donut, together with selection outcome for all
input donuts.
"""

# Which donuts to use for Zernike estimation
# initiate these by selecting all donuts
entropySelect = np.ones(len(donutStamps), dtype="bool")
Expand All @@ -168,10 +177,8 @@ def selectStamps(self, donutStamps):
if self.config.selectWithEntropy:
entropySelect = entropyValue < self.config.maxEntropy
else:
self.log.warning(
"No entropy cut. Checking if signal-to-noise \
should be applied."
)
self.log.warning("No entropy cut. Checking other conditions.")

# By default select all donuts, only overwritten
# if selectWithSignalToNoise is True
snSelect = np.ones(len(donutStamps), dtype="bool")
Expand All @@ -194,12 +201,24 @@ def selectStamps(self, donutStamps):
# Select using the given threshold
snSelect = snThreshold < snValue
else:
self.log.warning("No signal-to-noise selection applied.")
# AND condition : if both selectWithEntropy
# and selectWithSignalToNoise, then
# only donuts that pass with SN criterion as well
# as entropy criterion are selected
selected = entropySelect * snSelect
self.log.warning(
"No signal-to-noise selection applied. Checking other conditions"
)

# By default select all donuts, only overwritten
# if selectWithFracBadPixels is True
fracBadPixSelect = np.ones(len(donutStamps), dtype="bool")

# collect fraction-of-bad-pixels information if available
if "FRAC_BAD_PIX" in list(donutStamps.metadata):
fracBadPix = np.asarray(donutStamps.metadata.getArray("FRAC_BAD_PIX"))
if self.config.selectWithFracBadPixels:
fracBadPixSelect = fracBadPix <= self.config.maxFracBadPixels
else:
self.log.warning("No fraction-of-bad-pixels cut.")

# choose only donuts that satisfy all selected conditions
selected = entropySelect * snSelect * fracBadPixSelect

# store information about which donuts were selected
# use QTable even though no units at the moment in
Expand All @@ -209,11 +228,21 @@ def selectStamps(self, donutStamps):
data=[
snValue,
entropyValue,
fracBadPix,
snSelect,
entropySelect,
fracBadPixSelect,
selected,
],
names=["SN", "ENTROPY", "SN_SELECT", "ENTROPY_SELECT", "FINAL_SELECT"],
names=[
"SN",
"ENTROPY",
"FRAC_BAD_PIX",
"SN_SELECT",
"ENTROPY_SELECT",
"FRAC_BAD_PIX_SELECT",
"FINAL_SELECT",
],
)

self.log.info("Selected %d/%d donut stamps", selected.sum(), len(donutStamps))
Expand Down
6 changes: 5 additions & 1 deletion tests/task/test_calcZernikesTieTaskScienceSensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def testValidateConfigs(self):

self.assertEqual(type(self.task.combineZernikes), CombineZernikesMeanTask)
self.assertEqual(type(self.task.donutStampSelector), DonutStampSelectorTask)
self.assertEqual(self.task.doDonutStampSelector, False)
self.assertEqual(self.task.doDonutStampSelector, True)

def testEstimateZernikes(self):
donutStampsExtra = self.butler.get(
Expand Down Expand Up @@ -171,6 +171,8 @@ def testCalcZernikes(self):
"extra_sn",
"intra_entropy",
"extra_entropy",
"intra_frac_bad_pix",
"extra_frac_bad_pix",
]
self.assertLessEqual(set(desired_colnames), set(structNormal.zernikes.colnames))

Expand Down Expand Up @@ -204,6 +206,8 @@ def testCalcZernikes(self):
"ENTROPY",
"ENTROPY_SELECT",
"SN_SELECT",
"FRAC_BAD_PIX",
"FRAC_BAD_PIX_SELECT",
"FINAL_SELECT",
"DEFOCAL_TYPE",
]
Expand Down
4 changes: 3 additions & 1 deletion tests/task/test_calcZernikesUnpairedTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def testWithAndWithoutPairs(self):

# Check that results are similar
diff = np.sqrt(np.sum((meanZk - pairedZk) ** 2))
self.assertLess(diff, 0.16)
self.assertLess(diff, 0.17)

def testTable(self):
# Load data from butler
Expand Down Expand Up @@ -219,6 +219,8 @@ def testTable(self):
"ENTROPY",
"ENTROPY_SELECT",
"SN_SELECT",
"FRAC_BAD_PIX",
"FRAC_BAD_PIX_SELECT",
"FINAL_SELECT",
"DEFOCAL_TYPE",
]
Expand Down
Loading
Loading