Skip to content

Commit

Permalink
Merge pull request #288 from lsst-ts/tickets/DM-47442
Browse files Browse the repository at this point in the history
Tickets/DM-47442: Require convergence for TIE Zernike estimates
  • Loading branch information
jfcrenshaw authored Nov 12, 2024
2 parents 10fbca0 + a47198e commit bcedbe6
Show file tree
Hide file tree
Showing 8 changed files with 88 additions and 10 deletions.
9 changes: 9 additions & 0 deletions doc/versionHistory.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,15 @@
Version History
##################

.. _lsst.ts.wep-12.7.0:

-------------
12.7.0
-------------

* Added requireConverge to TIE and defaulted to True in task
* Fixed bug with None types in EstimateZernikeTask metadata histories

.. _lsst.ts.wep-12.6.1:

-------------
Expand Down
4 changes: 3 additions & 1 deletion pipelines/production/comCamRapidAnalysisPipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ tasks:
calcZernikesTask:
class: lsst.ts.wep.task.calcZernikesTask.CalcZernikesTask
config:
estimateZernikes.maxNollIndex: 28
estimateZernikes.maxNollIndex: 22
estimateZernikes.convergeTol: 10.0e-9
estimateZernikes.requireConverge: True
estimateZernikes.saveHistory: False
estimateZernikes.maskKwargs: { "doMaskBlends": False }
donutStampSelector.maxSelect: 5
Expand Down
29 changes: 29 additions & 0 deletions python/lsst/ts/wep/estimation/tie.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,11 @@ class TieAlgorithm(WfAlgorithm):
binning : int, optional
Binning factor to apply to the donut stamps before estimating
Zernike coefficients. The default value of 1 means no binning.
(the default is 1)
requireConverge : bool, optional
Whether to require that the TIE converges. If True, and the TIE
did not converge, the TIE returns NaNs.
(the default is False)
"""

def __init__(
Expand All @@ -109,6 +114,7 @@ def __init__(
maskKwargs: Optional[dict] = None,
modelPupilKernelSize: float = 2,
binning: int = 1,
requireConverge: bool = False,
) -> None:
self.opticalModel = opticalModel
self.maxIter = maxIter
Expand All @@ -120,6 +126,7 @@ def __init__(
self.maskKwargs = maskKwargs
self.modelPupilKernelSize = modelPupilKernelSize
self.binning = binning
self.requireConverge = requireConverge

@property
def requiresPairs(self) -> bool:
Expand Down Expand Up @@ -429,6 +436,24 @@ def binning(self, value: int) -> None:
raise ValueError("binning must be greater than or equal to 1.")
self._binning = value

@property
def requireConverge(self) -> int:
"""Whether to require that the TIE converges."""
return self._requireConverge

@requireConverge.setter
def requireConverge(self, value: int) -> None:
"""Whether to require that the TIE converges.
Parameters
----------
value : bool
Whether to require that the TIE converges.
"""
if not isinstance(value, bool):
raise TypeError("requireConverge must be a bool.")
self._requireConverge = value

@property
def history(self) -> dict:
"""The algorithm history.
Expand Down Expand Up @@ -867,4 +892,8 @@ def _estimateZk(
if caustic or converged:
break

# If we never converged, return NaNs?
if self.requireConverge and not converged:
zkSum *= np.nan

return zkSum
6 changes: 0 additions & 6 deletions python/lsst/ts/wep/task/estimateZernikesBase.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,6 @@ class EstimateZernikesBaseConfig(pexConfig.Config):
+ "to the Noll index. In this case, indices 0-3 are always set to zero, "
+ "because they are not estimated by our pipeline.",
)
binning = pexConfig.Field(
dtype=int,
default=1,
doc="Binning factor to apply to the donut stamps before estimating "
+ "Zernike coefficients. A value of 1 means no binning.",
)
saveHistory = pexConfig.Field(
dtype=bool,
default=False,
Expand Down
6 changes: 6 additions & 0 deletions python/lsst/ts/wep/task/estimateZernikesDanishTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,12 @@ class EstimateZernikesDanishConfig(EstimateZernikesBaseConfig):
doc="A dictionary containing any of the keyword arguments for "
+ "scipy.optimize.least_squares, except `fun`, `x0`, `jac`, or `args`.",
)
binning = pexConfig.Field(
dtype=int,
default=1,
doc="Binning factor to apply to the donut stamps before estimating "
+ "Zernike coefficients. A value of 1 means no binning.",
)


class EstimateZernikesDanishTask(EstimateZernikesBaseTask):
Expand Down
12 changes: 12 additions & 0 deletions python/lsst/ts/wep/task/estimateZernikesTieTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,18 @@ class EstimateZernikesTieConfig(EstimateZernikesBaseConfig):
+ "when estimating Zernikes with a single donut. "
+ "(the default is 2)",
)
binning = pexConfig.Field(
dtype=int,
default=1,
doc="Binning factor to apply to the donut stamps before estimating "
+ "Zernike coefficients. A value of 1 means no binning.",
)
requireConverge = pexConfig.Field(
dtype=bool,
default=False,
doc="Whether to require that the TIE converges. "
+ "If True, and the TIE did not converge, the TIE returns NaNs. ",
)


class EstimateZernikesTieTask(EstimateZernikesBaseTask):
Expand Down
9 changes: 6 additions & 3 deletions python/lsst/ts/wep/utils/taskUtils.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,8 @@ def convertHistoryToMetadata(history: dict) -> pipeBase.TaskMetadata:
"""
if isinstance(history, dict):
history = {
str(key): convertHistoryToMetadata(val) for key, val in history.items()
str(key): ("None" if val is None else convertHistoryToMetadata(val))
for key, val in history.items()
}
history = pipeBase.TaskMetadata.from_dict(history)
elif isinstance(history, np.ndarray):
Expand Down Expand Up @@ -493,9 +494,11 @@ def convertMetadataToHistory(metadata: pipeBase.TaskMetadata) -> dict:
for key, val in metadata.items()
}

# Convert numeric strings back to floats and ints
# Convert "None" strings back to None and numeric strings to floats/ints
elif isinstance(metadata, str):
if "." in metadata:
if metadata == "None":
metadata = None
elif "." in metadata:
try:
metadata = float(metadata)
except: # noqa: E722
Expand Down
23 changes: 23 additions & 0 deletions tests/task/test_calcZernikesTieTaskCwfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,9 @@ def testValidateConfigs(self):

self.assertEqual(type(self.task.combineZernikes), CombineZernikesMeanTask)

self.config.estimateZernikes.binning = 2
self.assertEqual(self.task.estimateZernikes.wfAlgoConfig.binning, 2)

def testEstimateZernikes(self):
zernCoeff = self.task.estimateZernikes.run(
self.donutStampsExtra, self.donutStampsIntra
Expand Down Expand Up @@ -281,3 +284,23 @@ def testUnevenPairs(self):

# Now estimate Zernikes
self.task.run(stampsExtra, stampsIntra)

def testRequireConverge(self):
config = CalcZernikesTaskConfig()
config.estimateZernikes.requireConverge = True # Require to converge
config.estimateZernikes.convergeTol = 0 # But don't allow convergence
task = CalcZernikesTask(config=config, name="Test requireConverge")

# Estimate zernikes
donutStampDir = os.path.join(self.testDataDir, "donutImg", "donutStamps")
donutStampsExtra = DonutStamps.readFits(
os.path.join(donutStampDir, "R04_SW0_donutStamps.fits")
)
donutStampsIntra = DonutStamps.readFits(
os.path.join(donutStampDir, "R04_SW1_donutStamps.fits")
)
output = task.estimateZernikes.run(donutStampsExtra, donutStampsIntra)
zernikes = output.zernikes

# Everything should be NaN because we did not converge
self.assertTrue(np.isnan(zernikes).all())

0 comments on commit bcedbe6

Please sign in to comment.