From d73b2c7a78661ce75aef01edf10bd68821441997 Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Sun, 16 Feb 2025 21:18:27 -0800 Subject: [PATCH 01/17] Add an AdaptiveThresholdDetectionTask for SFM This aims at adaptively adjusting the detection threshold for PSF modelling such that a single pipeline run is able to accommodate a large variety of "scenes" (from sparsely populated, through very crowded fields, and possible extended nebulosity). It can be turned on[off] by setting the do_adaptive_threshold_detection config in pipe_tasks's CalibrateImageTask to True[False]. --- .../lsst/meas/algorithms/dynamicDetection.py | 344 +++++++++++++++++- 1 file changed, 338 insertions(+), 6 deletions(-) diff --git a/python/lsst/meas/algorithms/dynamicDetection.py b/python/lsst/meas/algorithms/dynamicDetection.py index cdb51e12b..815bee794 100644 --- a/python/lsst/meas/algorithms/dynamicDetection.py +++ b/python/lsst/meas/algorithms/dynamicDetection.py @@ -1,10 +1,16 @@ -__all__ = ["DynamicDetectionConfig", "DynamicDetectionTask", "InsufficientSourcesError"] +__all__ = [ + "DynamicDetectionConfig", + "DynamicDetectionTask", + "InsufficientSourcesError", + "AdaptiveThresholdDetectionConfig", + "AdaptiveThresholdDetectionTask", +] import numpy as np -from lsst.pex.config import Field, ConfigurableField -from lsst.pipe.base import Struct +from lsst.pex.config import Field, ConfigurableField, Config, DictField, FieldValidationError +import lsst.pipe.base as pipeBase from .detection import SourceDetectionConfig, SourceDetectionTask from .skyObjects import SkyObjectsTask @@ -309,7 +315,7 @@ def calculateThreshold(self, exposure, seed, sigma=None, minFractionSourcesFacto medianError = np.median(catalog["base_PsfFlux_instFluxErr"][good]) if wcsIsNone: exposure.setWcs(None) - return Struct(multiplicative=medianError/stdevMeas, additive=bgMedian) + return pipeBase.Struct(multiplicative=medianError/stdevMeas, additive=bgMedian) def detectFootprints(self, exposure, doSmooth=True, sigma=None, clearMask=True, expId=None, background=None, backgroundToPhotometricRatio=None): @@ -422,7 +428,7 @@ def detectFootprints(self, exposure, doSmooth=True, sigma=None, clearMask=True, seed = (expId if expId is not None else int(maskedImage.image.array.sum())) % (2**31 - 1) threshResults = self.calculateThreshold(exposure, seed, sigma=sigma) factor = threshResults.multiplicative - self.log.info("Modifying configured detection threshold by factor %f to %f", + self.log.info("Modifying configured detection threshold by factor %.2f to %.2f", factor, factor*self.config.thresholdValue) # Blow away preliminary (low threshold) detection mask @@ -564,7 +570,7 @@ def _computeBrightDetectionMask(self, maskedImage, convolveResults): format(nPixDetNeg, 100*nPixDetNeg/nPix)) if nPixDetNeg/nPix > brightMaskFractionMax or nPixDet/nPix > brightMaskFractionMax: self.log.warn("Too high a fraction (%.1f > %.1f) of pixels were masked with current " - "\"bright\" detection round thresholds. Increasing by a factor of %f " + "\"bright\" detection round thresholds. Increasing by a factor of %.2f " "and trying again.", max(nPixDetNeg, nPixDet)/nPix, brightMaskFractionMax, self.config.bisectFactor) @@ -576,6 +582,332 @@ def _computeBrightDetectionMask(self, maskedImage, convolveResults): return brightDetectedMask +class AdaptiveThresholdDetectionConfig(Config): + """Configuration for AdaptiveThresholdDetectionTask + """ + maxAdaptiveDetIter = Field(dtype=int, default=20, + doc="Maximum number of adaptive threshold detection iterations.") + maxNumPeakPerBand = DictField( + keytype=str, + itemtype=int, + default={ + "u": 3000, + "g": 3000, + "r": 3000, + "i": 5000, + "z": 5000, + "y": 3000, + "fallback": 5000, + }, + doc="Maximum number of peaks per band. If the current band for the exposure " + "is not included as a key in this dict, the value associated with the " + "\"fallback\" key will be used.", + ) + minFootprint = Field(dtype=int, default=15, + doc="Minimum number of footprints considered sufficient to exit the " + "iteration loop. This should be larger than or equal to ``minIsolated`` as " + "it does not take into account whether any given footprint is multi-peaked " + "(i.e. blended thus not providing any isolated sources for PSF modeling.)") + maxPeakToFootRatio = Field(dtype=float, default=800.0, + doc="Maximum ratio of the number of peaks per footprint considered " + "sufficient to exit the iteration loop. This helps guard against " + "large contiguous footprints leaving no isolated sources for " + "inclusion in the PSF modeling.") + minIsolated = Field(dtype=int, default=6, + doc="Minimum number of single-peaked (i.e. isolated) footprints " + "considered sufficient to exit the iteration loop. This should be " + "larger than the minimum number of sources desired for PSF modeling " + "since some may be rejected by the source selector.") + sufficientIsolated = Field(dtype=int, default=100, + doc="Number of single-peaked (isolated) footprints considered " + "sufficient to exit the iteration loop. Must be larger than " + "``minIsolated``.") + sufficientFractionIsolated = Field(dtype=float, default=0.45, + doc="Fraction of single-peaked (isolated) footprints considered " + "sufficient to exit the iteration loop.") + + def validate(self): + super().validate() + if "fallback" not in self.maxNumPeakPerBand: + msg = ("Must include a \"fallback\" key in the config.maxNumPeakPerBand config dict. " + f"It is currenly: {self.maxNumPeakPerBand}.") + raise FieldValidationError(self.__class__.maxNumPeakPerBand, self, msg) + if self.minFootprint < self.minIsolated: + msg = (f"The config.minFootprint (= {self.minFootprint}) must be >= that of " + f"config.minIsolated (= {self.minIsolated}).") + raise FieldValidationError(self.__class__.minFootprint, self, msg) + if self.sufficientIsolated < self.minIsolated: + msg = (f"The config.sufficientIsolated (= {self.sufficientIsolated}) must be >= that of " + f"config.minIsolated (= {self.minIsolated}).") + raise FieldValidationError(self.__class__.sufficientIsolated, self, msg) + + +class AdaptiveThresholdDetectionTask(pipeBase.Task): + """Detection of sources on an image using an adaptive scheme for + the detection threshold. + """ + ConfigClass = AdaptiveThresholdDetectionConfig + _DefaultName = "adaptiveThresholdDetection" + + def __init__(self, *args, **kwargs): + pipeBase.Task.__init__(self, *args, **kwargs) + + def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier=2.0, + doReEstimageBackgroud=True, backgroundToPhotometricRatio=None): + """Perform detection with an adaptive threshold detection scheme + conditioned to maximize the likelihood of a succuessful PSF model fit + for any given "scene". + + In particular, we'd like to be able to handle different scenes, from + sparsely populated ones through very crowded ones, and possibily high + fill-factor nebulosity along the way, with a single pipeline (and set + of configs). This requires some flexibility in setting the detection + thresholds in order to detect enough sources suitable for PSF modeling + (e.g. crowded fields require higher thresholds to ensure the detections + don't end up overlapping into a single or very small number of blended + footprints. + + We first detect sources using the default threshold and multiplier. + Then, cycling through a series of criteria based on the DETECTED mask + planes (number of footprints, number of peaks, number of isolated + footprints, number of peaks-per-footrpint, etc.) conditioned to identify + a "Goldilocks Zone" where we have enough isolated peaks from which to + measure the PSF, we iterate while adjusting the detection thresholds + in the appropriate direction until all criteria are met (or the maximum + number of iterations is reached). + + Parameters + ---------- + table : `lsst.afw.table.SourceTable` + Table object that will be used to create the SourceCatalog. + exposure : `lsst.afw.image.Exposure` + Exposure to process; DETECTED mask plane will be set in-place. + initialThreshold : `float`, optional + Initial threshold for detection of PSF sources. + initialThresholdMultiplier : `float`, optional + Initial threshold for detection of PSF sources. + doReEstimageBackgroud: `bool`, optional + Re-estimate the background during the final detection stage? + backgroundToPhotometricRatio : `lsst.afw.image.Image`, optional + Image to convert photometric-flattened image to + background-flattened image if ``reEstimateBackground`` is `True` + and exposure has been photometric-flattened, and + ``config.doApplyFlatBackgroundRatio`` is `True`. + + Returns + ------- + results : `lsst.pipe.base.Struct` + The adaptive threshold detection results as a struct with + attributes: + + ``detections`` + Results of the final round of detection as a struch with + attributes: + + ``sources`` + Detected sources on the exposure + (`lsst.afw.table.SourceCatalog`). + ``positive`` + Positive polarity footprints + (`lsst.afw.detection.FootprintSet` or `None`). + ``negative`` + Negative polarity footprints + (`lsst.afw.detection.FootprintSet` or `None`). + ``numPos`` + Number of footprints in positive or 0 if detection polarity was + negative (`int`). + ``numNeg`` + Number of footprints in negative or 0 if detection polarity was + positive (`int`). + ``background`` + Re-estimated background. `None` if + ``reEstimateBackground==False`` + (`lsst.afw.math.BackgroundList`). + ``factor`` + Multiplication factor applied to the configured detection + threshold. (`float`). + ``thresholdValue`` + The final threshold value used to the configure the final round + of detection (`float`). + ``includeThresholdMultiplier`` + The final multiplication factor applied to the configured detection + threshold. (`float`). + """ + band = "fallback" + if exposure.filter is not None: + if exposure.filter.hasBandLabel(): + band = exposure.filter.bandLabel + if band in self.config.maxNumPeakPerBand: + maxNumPeak = self.config.maxNumPeakPerBand[band] + else: + maxNumPeak = self.config.maxNumPeakPerBand["fallback"] + + # Set up and configure the adaptive detection task on first iteration. + inAdaptiveDetection = True + nAdaptiveDetIter = 0 + thresholdFactor = 1.0 + if nAdaptiveDetIter == 0: + if initialThreshold is None: + maxSn = float(np.nanmax(exposure.image.array/np.sqrt(exposure.variance.array))) + adaptiveDetThreshold = min(maxSn, 5.0) + else: + adaptiveDetThreshold = initialThreshold + adaptiveDetectionConfig = SourceDetectionConfig() + adaptiveDetectionConfig.thresholdValue = adaptiveDetThreshold + adaptiveDetectionConfig.includeThresholdMultiplier = initialThresholdMultiplier + adaptiveDetectionConfig.reEstimateBackground = False + adaptiveDetectionConfig.doTempWideBackground = True + adaptiveDetectionConfig.tempWideBackground.binSize = 512 + adaptiveDetectionConfig.thresholdPolarity = "both" + self.log.info("Using adaptive detection with thresholdValue = %.2f and multiplier = %.1f", + adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + + maxNumNegFactor = 1.0 + while inAdaptiveDetection: + inAdaptiveDetection = False + nAdaptiveDetIter += 1 + detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True) + sourceCat = detRes.sources + nFootprint = len(sourceCat) + nPeak = 0 + nPosPeak = detRes.numPosPeaks + nNegPeak = detRes.numNegPeaks # Often detected in high nebulosity scenes. + maxNumNegPeak = max(15, int(0.025*nPosPeak))*maxNumNegFactor + nIsolated = 0 + nPeakPerSrcMax = 0 + maxNumPeakPerSrcMax = 0.2*maxNumPeak + for src in sourceCat: + nPeakSrc = len(src.getFootprint().getPeaks()) + if nPeakSrc == 1: + nIsolated += 1 + nPeak += nPeakSrc + nPeakPerSrcMax = max(nPeakPerSrcMax, nPeakSrc) + if nFootprint > 0.0: + fractionIsolated = nIsolated/nFootprint + avgPeakPerFoot = nPeak/nFootprint + else: + fractionIsolated = float("nan") + avgPeakPerFoot = float("nan") + self.log.info("In adaptive detection iter %d: nFootprints = %d, nPosPeak = %d (max is %d), " + "nNegPeak = %d (max is %d), nPeak/nFoot = %.1f (max is %.1f), " + "nPeakPerkSrcMax = %d (max is %d), nIsolated = %d, fractionIsolated = %.2f", + nAdaptiveDetIter, nFootprint, nPeak, maxNumPeak, nNegPeak, maxNumNegPeak, + avgPeakPerFoot, self.config.maxPeakToFootRatio, nPeakPerSrcMax, + maxNumPeakPerSrcMax, nIsolated, fractionIsolated) + if (nIsolated > self.config.sufficientIsolated + and fractionIsolated > self.config.sufficientFractionIsolated + and (nAdaptiveDetIter > 1 or self.config.maxAdaptiveDetIter == 1)): + if ((nIsolated > 5.0*self.config.sufficientIsolated and nPeak < 2.5*maxNumPeak + and nNegPeak < 100.0*maxNumNegPeak) + or (nNegPeak < 5.0*maxNumNegPeak and nPeak < 1.2*maxNumPeak)): + self.log.info("Sufficient isolated footprints (%d > %d) and fraction of isolated " + "footprints (%.2f > %.2f) for PSF modeling. Exiting adaptive detection " + "at iter: %d.", + nIsolated, self.config.sufficientIsolated, fractionIsolated, + self.config.sufficientFractionIsolated, nAdaptiveDetIter) + inAdaptiveDetection = False + continue + + if nFootprint == 0 or nPosPeak == 0: + thresholdFactor = 0.25 + maxNumNegFactor *= 10 + self.log.warning("Adaptive threshold increase went too far (nFootprint = 0). " + "Decreasing threshold to %.2f and rerunning.", + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + adaptiveDetectionConfig.thresholdValue = ( + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + inAdaptiveDetection = False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True + continue + + if ((nPeak/nFootprint > self.config.maxPeakToFootRatio and nIsolated < self.config.minIsolated) + or nNegPeak > maxNumNegPeak): + if nNegPeak > 2*maxNumNegPeak: + thresholdFactor = 1.5 + else: + thresholdFactor = 1.25 + thresholdFactor *= adaptiveDetectionConfig.includeThresholdMultiplier + adaptiveDetectionConfig.includeThresholdMultiplier = 1.0 + self.log.warning("Adaptive detection iter %d: catalog had nPeak/nFootprint = " + "%.1f (max is %.1f) and %d negative peaks (max is %d). " + "Increasing threshold to %.2f and setting multiplier " + "to %.1f and rerunning.", + nAdaptiveDetIter, nPeak/nFootprint, self.config.maxPeakToFootRatio, + nNegPeak, maxNumNegPeak, + thresholdFactor*adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionConfig.thresholdValue = ( + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + inAdaptiveDetection = False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True + continue + + if (nPeak > maxNumPeak or nPeakPerSrcMax > maxNumPeakPerSrcMax + or nFootprint <= self.config.minFootprint): + if nPeak > maxNumPeak or nPeakPerSrcMax > 0.25*maxNumPeak: + if nAdaptiveDetIter < 0.5*self.config.maxAdaptiveDetIter: + if nPeak > 3*maxNumPeak or nPeakPerSrcMax > maxNumPeak: + thresholdFactor = 1.7 + elif nPeak > 2*maxNumPeak or nPeakPerSrcMax > 0.5*maxNumPeak: + thresholdFactor = 1.4 + else: + thresholdFactor = 1.2 + else: + thresholdFactor = 1.2 + thresholdFactor *= adaptiveDetectionConfig.includeThresholdMultiplier + newThresholdMultiplier = max(1.0, 0.5*adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionConfig.includeThresholdMultiplier = newThresholdMultiplier + adaptiveDetectionConfig.thresholdValue = ( + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + self.log.warning("Adaptive detection iter %d catalog had nPeak = %d (max = %d) " + "and nPeakPerSrcMax = %d (max = %d). Increasing threshold to %.2f " + "and setting multiplier to %.1f and rerunning.", + nAdaptiveDetIter, nPeak, maxNumPeak, nPeakPerSrcMax, maxNumPeakPerSrcMax, + adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + inAdaptiveDetection = ( + False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True + ) + continue + + if nFootprint <= self.config.minFootprint: + maxNumNegFactor *= 10 # Allow more -ve detections at this point. + thresholdFactor = min(0.85, 0.4*np.log10(10*(nFootprint + 1))) + self.log.warning("Adaptive detection iter %d catalog had only %d footprints. " + "Lowering threshold to %.2f and increasing the allowance for " + "negative detections and rerunning.", + nAdaptiveDetIter, nFootprint, + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + adaptiveDetectionConfig.thresholdValue = ( + thresholdFactor*adaptiveDetectionConfig.thresholdValue + ) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + inAdaptiveDetection = True + else: + inAdaptiveDetection = False + if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter: + inAdaptiveDetection = False + # Final round of detection with positive polarity + adaptiveDetectionConfig.thresholdPolarity = "positive" + if doReEstimageBackgroud: + adaptiveDetectionConfig.reEstimateBackground = True + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + self.log.info("Perfomring final round of detection with threshold %.2f and multiplier %.1f", + adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True, + backgroundToPhotometricRatio=backgroundToPhotometricRatio) + return pipeBase.Struct( + detections=detRes, + thresholdValue=adaptiveDetectionConfig.thresholdValue, + includeThresholdMultiplier=adaptiveDetectionConfig.includeThresholdMultiplier, + ) + + def countMaskedPixels(maskedIm, maskPlane): """Count the number of pixels in a given mask plane. From c6ed5d5b66ea600a7568180528736f69e386c825 Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Sun, 7 Sep 2025 11:49:22 -0700 Subject: [PATCH 02/17] Add a CentroidErrorLimit to the science selector It may be desirable to limit selected sources to only those with a certain error budget on the centroids (e.g. for astrometric calibration sources). --- python/lsst/meas/algorithms/sourceSelector.py | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/python/lsst/meas/algorithms/sourceSelector.py b/python/lsst/meas/algorithms/sourceSelector.py index 8888b1c17..e40f920d4 100644 --- a/python/lsst/meas/algorithms/sourceSelector.py +++ b/python/lsst/meas/algorithms/sourceSelector.py @@ -22,6 +22,7 @@ __all__ = ["BaseSourceSelectorConfig", "BaseSourceSelectorTask", "sourceSelectorRegistry", "ColorLimit", "MagnitudeLimit", "SignalToNoiseLimit", "MagnitudeErrorLimit", "RequireFlags", "RequireUnresolved", "RequireFiniteRaDec", "RequirePrimary", + "CullFromMaskedRegion", "CentroidErrorLimit", "ScienceSourceSelectorConfig", "ScienceSourceSelectorTask", "ReferenceSourceSelectorConfig", "ReferenceSourceSelectorTask", "NullSourceSelectorTask" @@ -639,6 +640,41 @@ def apply(self, catalog, exposure): return selected +class CentroidErrorLimit(BaseLimit): + """Select sources using a limit on the centroid errors. + + This object can be used as a `lsst.pex.config.Config` for configuring + the limit, and then the `apply` method can be used to identify sources + in the catalog that match the configured limit. + """ + centroidField = pexConfig.Field(dtype=str, default="slot_Centroid", + doc="Name of the source centroid field to use.") + + def setDefaults(self): + self.maximum = 3.0 + + def apply(self, catalog): + """Apply the limit on source centroid errors to a catalog. + + Parameters + ---------- + catalog : `lsst.afw.table.SourceCatalog` + Catalog of sources to which the limit will be applied. + + Returns + ------- + selected : `numpy.ndarray` + Boolean array indicating for each source whether it is selected + (True means selected). + """ + xErrField = self.centroidField + "_xErr" + yErrField = self.centroidField + "_yErr" + selected = ((np.isfinite(catalog[xErrField])) & (np.isfinite(catalog[yErrField]))) + selected &= BaseLimit.apply(self, catalog[xErrField]) + selected &= BaseLimit.apply(self, catalog[yErrField]) + return selected + + class ScienceSourceSelectorConfig(pexConfig.Config): """Configuration for selecting science sources""" doFluxLimit = pexConfig.Field(dtype=bool, default=False, doc="Apply flux limit?") @@ -652,6 +688,7 @@ class ScienceSourceSelectorConfig(pexConfig.Config): doc="Apply source is primary check?") doSkySources = pexConfig.Field(dtype=bool, default=False, doc="Include sky sources, unioned with all other criteria?") + doCentroidErrorLimit = pexConfig.Field(dtype=bool, default=False, doc="Apply limit on centroid errors?") fluxLimit = pexConfig.ConfigField(dtype=FluxLimit, doc="Flux limit to apply") flags = pexConfig.ConfigField(dtype=RequireFlags, doc="Flags to require") unresolved = pexConfig.ConfigField(dtype=RequireUnresolved, doc="Star/galaxy separation to apply") @@ -662,6 +699,8 @@ class ScienceSourceSelectorConfig(pexConfig.Config): requirePrimary = pexConfig.ConfigField(dtype=RequirePrimary, doc="Primary source criteria to apply") skyFlag = pexConfig.ConfigField(dtype=RequireFlags, doc="Sky source flag to include") + centroidErrorLimit = pexConfig.ConfigField(dtype=CentroidErrorLimit, + doc="Limit to place on centroid errors.") def setDefaults(self): pexConfig.Config.setDefaults(self) @@ -728,6 +767,8 @@ def selectSources(self, sourceCat, matches=None, exposure=None): selected &= self.config.requirePrimary.apply(sourceCat) if self.config.doSkySources: selected |= self.config.skyFlag.apply(sourceCat) + if self.config.doCentroidErrorLimit: + selected &= self.config.centroidErrorLimit.apply(sourceCat) self.log.info("Selected %d/%d sources", selected.sum(), len(sourceCat)) From edd6e45db72332c0cb2bb0d09803f61e901a71c2 Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Mon, 22 Sep 2025 16:31:00 -0700 Subject: [PATCH 03/17] Add constraints on reference catalog loading Namely, allow for a minimum magnitude (maximum brightness) as well as a maximum total number of loaded reference objects. This can be useful for memory considerations, particularly for very crowded fields. --- .../meas/algorithms/loadReferenceObjects.py | 123 +++++++++++++++++- 1 file changed, 121 insertions(+), 2 deletions(-) diff --git a/python/lsst/meas/algorithms/loadReferenceObjects.py b/python/lsst/meas/algorithms/loadReferenceObjects.py index 4db94fdc2..6e418ee48 100644 --- a/python/lsst/meas/algorithms/loadReferenceObjects.py +++ b/python/lsst/meas/algorithms/loadReferenceObjects.py @@ -147,6 +147,22 @@ class LoadReferenceObjectsConfig(pexConfig.Config): dtype=bool, default=False, ) + maxRefObjects = pexConfig.Field( + doc="Maximum number of reference objects to send to the matcher. Setting " + "this to a reasonable value may be desirable for memory reasons " + "(particularly in very crowded field).", + dtype=int, + default=None, + optional=True, + ) + minRefMag = pexConfig.Field( + doc="Minimum (i.e. brightest) magnitude for reference catalog (the brightest " + "sources are typically saturated in the images, so may as well remove " + "them from the reference catalog).", + dtype=float, + default=None, + optional=True, + ) def validate(self): super().validate() @@ -678,10 +694,27 @@ class which intersect or are contained within the specified region. The if intersects: overlapList.append((dataId, refCat)) - if len(overlapList) == 0: + nOverlap = len(overlapList) + if nOverlap == 0: raise RuntimeError("No reference tables could be found for input region") + if self.config.maxRefObjects is not None: + maxRefObjectsPerInput = int(self.config.maxRefObjects/nOverlap) + else: + maxRefObjectsPerInput = None + + if self.config.anyFilterMapsToThis is not None: + refFluxField = self.config.anyFilterMapsToThis + "_flux" + else: + refFluxField = None + firstCat = overlapList[0][1].get() + # Filter catalog if limits were imposed by the maxRefObjects and/or + # minRefMag config settings. + if (refFluxField is not None + and (maxRefObjectsPerInput is not None or self.config.minRefMag is not None)): + firstCat = filterRefCat(firstCat, refFluxField, maxRefObjectsPerInput, + minRefMag=self.config.minRefMag, log=self.log) refCat = filtFunc(firstCat, overlapList[0][0].region) trimmedAmount = len(firstCat) - len(refCat) @@ -692,7 +725,13 @@ class which intersect or are contained within the specified region. The if tmpCat.schema != firstCat.schema: raise TypeError("Reference catalogs have mismatching schemas") - filteredCat = filtFunc(tmpCat, dataId.region) + if maxRefObjectsPerInput is not None or self.config.minRefMag is not None: + filteredCat = filterRefCat(tmpCat, refFluxField, maxRefObjectsPerInput, + minRefMag=self.config.minRefMag, log=self.log) + else: + filteredCat = tmpCat + filteredCat = filtFunc(filteredCat, dataId.region) + refCat.extend(filteredCat) trimmedAmount += len(tmpCat) - len(filteredCat) @@ -804,6 +843,86 @@ def loadSchema(self, filterName): return pipeBase.Struct(schema=expandedEmptyCat.schema, fluxField=fluxField) +def filterRefCat(refCat, refFluxField, maxRefObjects=None, minRefMag=None, log=None): + """Sub-select a number of reference objects starting from the brightest + and maxing out at the number specified by maxRefObjects. + + No further trimming is done if len(refCat) > maxRefObjects after trimming + to minRefMag. + + Parameters + ---------- + refCat : `lsst.afw.table.SimpleCatalog` + Catalog of reference objects to trim. + refFluxField : `str` + Field of refCat to use for flux. + maxRefObjects : `int` or `None`, optional + Maximum number of reference objects (i.e. trim refCat down to + this number of objects). + minRefMag : `int` or `None`, optional + Minimum (i.e. brightest) magnitude to include in the reference + catalog. + log : `lsst.log.Log` or `logging.Logger` or `None`, optional + Logger object used to write out messages. If `None`, no messages + will be logged. + + Returns + ------- + filteredCat : `lsst.afw.table.SimpleCatalog` + Catalog trimmed to the maximum brightness and/or maximum number set + in the task config from the brightest flux down. + """ + if maxRefObjects is None and minRefMag is None: + if log is not None: + log.debug("No filtering of the reference catalog has been actually requested " + "(i.e maxRefObjects and minRefMag are both `None`) Returning " + "original catalog.") + return refCat + + filteredCat = refCat.copy(deep=True) + + if minRefMag is not None: + refFlux = filteredCat.get(refFluxField) + refMag = (refFlux*astropy.units.nJy).to_value(astropy.units.ABmag) + if numpy.nanmin(refMag) <= minRefMag: + filteredCat = filteredCat[(refMag > minRefMag)].copy(deep=True) + if log is not None: + log.info("Trimming the loaded reference catalog to %s > %.2f", refFluxField, minRefMag) + + if maxRefObjects is not None: + if len(filteredCat) <= maxRefObjects: + if log is not None: + log.debug("Number of reference objects in reference catalog = %d (max is %d). " + "Returning catalog without further filtering.", + len(filteredCat), maxRefObjects) + return filteredCat + if log is not None: + log.info("Trimming number of reference objects in refCat from %d down to %d. ", + len(refCat), maxRefObjects) + if not filteredCat.isContiguous(): + filteredCat = filteredCat.copy(deep=True) + + refFlux = filteredCat.get(refFluxField) + sortedRefFlux = refFlux[refFlux.argsort()] + minRefFlux = sortedRefFlux[-(maxRefObjects + 1)] + + selected = (filteredCat.get(refFluxField) > minRefFlux) + filteredCat = filteredCat[selected] + + if not filteredCat.isContiguous(): + filteredCat = filteredCat.copy(deep=True) + + if log is not None: + if len(filteredCat) > 0: + refFlux = filteredCat[refFluxField] + refMag = (refFlux*astropy.units.nJy).to_value(astropy.units.ABmag) + log.info("Reference catalog magnitude range for filter %s after trimming: refMagMin = %.2f; " + "refMagMax = %.2f", refFluxField, numpy.nanmin(refMag), numpy.nanmax(refMag)) + else: + log.warning("Length of reference catalog after filtering is 0.") + return filteredCat + + def getRefFluxField(schema, filterName): """Get the name of a flux field from a schema. From 5969377dae18a0b995c547ab1d5e599338e23a60 Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Thu, 9 Oct 2025 14:35:10 -0700 Subject: [PATCH 04/17] Remove local background subtraction from sky measurements It doesn't make sense to subtract a local background from sources that were explicitly placed to represent areas of background. --- .../lsst/meas/algorithms/dynamicDetection.py | 11 +++---- tests/test_dynamicDetection.py | 30 +------------------ 2 files changed, 5 insertions(+), 36 deletions(-) diff --git a/python/lsst/meas/algorithms/dynamicDetection.py b/python/lsst/meas/algorithms/dynamicDetection.py index 815bee794..f361993d8 100644 --- a/python/lsst/meas/algorithms/dynamicDetection.py +++ b/python/lsst/meas/algorithms/dynamicDetection.py @@ -145,7 +145,7 @@ def __init__(self, *args, **kwargs): # Set up forced measurement. config = ForcedMeasurementTask.ConfigClass() - config.plugins.names = ['base_TransformedCentroid', 'base_PsfFlux', 'base_LocalBackground'] + config.plugins.names = ["base_TransformedCentroid", "base_PsfFlux"] # We'll need the "centroid" and "psfFlux" slots for slot in ("shape", "psfShape", "apFlux", "modelFlux", "gaussianFlux", "calibFlux"): setattr(config.slots, slot, None) @@ -276,10 +276,7 @@ def calculateThreshold(self, exposure, seed, sigma=None, minFractionSourcesFacto # Calculate new threshold fluxes = catalog["base_PsfFlux_instFlux"] area = catalog["base_PsfFlux_area"] - bg = catalog["base_LocalBackground_instFlux"] - - good = (~catalog["base_PsfFlux_flag"] & ~catalog["base_LocalBackground_flag"] - & np.isfinite(fluxes) & np.isfinite(area) & np.isfinite(bg)) + good = (~catalog["base_PsfFlux_flag"] & np.isfinite(fluxes)) if good.sum() < minNumSources: if not isBgTweak: @@ -308,9 +305,9 @@ def calculateThreshold(self, exposure, seed, sigma=None, minFractionSourcesFacto else: self.log.info("Number of good sky sources used for dynamic detection background tweak:" " %d (of %d requested).", good.sum(), self.skyObjects.config.nSources) - bgMedian = np.median((fluxes/area)[good]) - lq, uq = np.percentile((fluxes - bg*area)[good], [25.0, 75.0]) + bgMedian = np.median((fluxes/area)[good]) + lq, uq = np.percentile(fluxes[good], [25.0, 75.0]) stdevMeas = 0.741*(uq - lq) medianError = np.median(catalog["base_PsfFlux_instFluxErr"][good]) if wcsIsNone: diff --git a/tests/test_dynamicDetection.py b/tests/test_dynamicDetection.py index 3ca6e5529..a5145da10 100644 --- a/tests/test_dynamicDetection.py +++ b/tests/test_dynamicDetection.py @@ -3,7 +3,6 @@ import lsst.utils.tests import numpy as np from lsst.afw.geom import makeCdMatrix, makeSkyWcs -from lsst.afw.image import PARENT from lsst.afw.table import SourceTable from lsst.geom import Box2I, Extent2I, Point2D, Point2I, SpherePoint, degrees from lsst.meas.algorithms import DynamicDetectionTask @@ -37,35 +36,8 @@ def setUp(self): crval=SpherePoint(0, 0, degrees), cdMatrix=makeCdMatrix(scale=scale))) - # Make a large area of extra background; we should be robust against - # it. Unfortunately, some tuning is required here to get something - # challenging but not impossible: - # * A very large box will cause failures because the "extra" and the - # "normal" are reversed. - # * A small box will not be challenging because it's simple to clip - # out. - # * A large value will cause failures because it produces large edges - # in background-subtrction that broaden flux distributions. - # * A small value will not be challenging because it has little effect. - extraBox = Box2I(xy0 + Extent2I(345, 456), Extent2I(1234, 1234)) # Box for extra background - extraValue = 0.5*noise # Extra background value to add in - self.exposure.image[extraBox, PARENT] += extraValue - self.config = DynamicDetectionTask.ConfigClass() - self.config.skyObjects.nSources = 300 - self.config.reEstimateBackground = False - self.config.doTempWideBackground = True - self.config.thresholdType = "pixel_stdev" - - # Relative tolerance for tweak factor. - # Not sure why this isn't smaller; maybe due to use of Poisson instead - # of Gaussian noise? - # It seems as if some sky objects are being placed in the extra - # background region, which is causing the offset between the expected - # factor and the measured factor to be larger than otherwise expected. - # This relative tolerance was increased from 0.1 to 0.15 on DM-23781 to - # account for this. - self.rtol = 0.15 + self.rtol = 0.1 def tearDown(self): del self.exposure From 27ca5a50a88289857ee13df35d06a7399864dfa6 Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Thu, 23 Oct 2025 16:22:53 -0700 Subject: [PATCH 05/17] Add option to override nSigmaToGrow in detection This is desired for the dynamic detection (particulalry when dealing with crowded fields). --- python/lsst/meas/algorithms/detection.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/python/lsst/meas/algorithms/detection.py b/python/lsst/meas/algorithms/detection.py index ddb737794..9ce8c123a 100644 --- a/python/lsst/meas/algorithms/detection.py +++ b/python/lsst/meas/algorithms/detection.py @@ -600,7 +600,7 @@ def applyThreshold(self, middle, bbox, factor=1.0, factorNeg=None): return results - def finalizeFootprints(self, mask, results, sigma, factor=1.0, factorNeg=None): + def finalizeFootprints(self, mask, results, sigma, factor=1.0, factorNeg=None, growOverride=None): """Finalize the detected footprints. Grow the footprints, set the ``DETECTED`` and ``DETECTED_NEGATIVE`` @@ -629,6 +629,10 @@ def finalizeFootprints(self, mask, results, sigma, factor=1.0, factorNeg=None): for positive detection polarity) is assumed. Note that this is only used here for logging purposes. """ + if growOverride is not None: + self.log.warning("config.nSigmaToGrow is set to %.2f, but the caller has set " + "growOverride to %.2f, so the footprints will be grown by " + "%.2f sigma.", self.config.nSigmaToGrow, growOverride, growOverride) factorNeg = factor if factorNeg is None else factorNeg for polarity, maskName in (("positive", "DETECTED"), ("negative", "DETECTED_NEGATIVE")): fpSet = getattr(results, polarity) From 46f3599e3814c61c4ba7c6011f9c5aa8d0db0813 Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Thu, 9 Oct 2025 14:38:38 -0700 Subject: [PATCH 06/17] Add condition on number of peaks per footprint If the detection mask ends up as a single, or few many-peaked footprints, then all subsequent processing will fall over. Impose a limit to the maximum number of peaks in a given footprint for the detection mask. If this condition is not met, iteravely increase the detection threshold until it does. This is meant to be a no-op for most "typical" scenes and observing conditions, but can help avoid a total downstream meltdown in certain cases (e.g. very crowded regions). --- .../lsst/meas/algorithms/dynamicDetection.py | 72 +++++++++++++++---- 1 file changed, 59 insertions(+), 13 deletions(-) diff --git a/python/lsst/meas/algorithms/dynamicDetection.py b/python/lsst/meas/algorithms/dynamicDetection.py index f361993d8..bb234d505 100644 --- a/python/lsst/meas/algorithms/dynamicDetection.py +++ b/python/lsst/meas/algorithms/dynamicDetection.py @@ -105,6 +105,15 @@ class DynamicDetectionConfig(SourceDetectionConfig): "suitable locations to lay down sky objects. To allow for best effort " "sky source placement, if True, this allows for a slight erosion of " "the detection masks.") + maxPeakToFootRatio = Field(dtype=float, default=150.0, + doc="Maximum ratio of peak per footprint in the detection mask. " + "This is to help prevent single contiguous footprints that nothing " + "can be done with (i.e. deblending will be skipped). If the current " + "detection plane does not satisfy this constraint, the detection " + "threshold is increased iteratively until it is. This behaviour is " + "intended to be an effective no-op for most \"typical\" scenes/standard " + "quality observations, but can avoid total meltdown in, e.g. very " + "crowded regions.") def setDefaults(self): SourceDetectionConfig.setDefaults(self) @@ -424,22 +433,59 @@ def detectFootprints(self, exposure, doSmooth=True, sigma=None, clearMask=True, # seed needs to fit in a C++ 'int' so pybind doesn't choke on it seed = (expId if expId is not None else int(maskedImage.image.array.sum())) % (2**31 - 1) threshResults = self.calculateThreshold(exposure, seed, sigma=sigma) - factor = threshResults.multiplicative + minMultiplicative = 0.5 + if threshResults.multiplicative < minMultiplicative: + self.log.warning("threshResults.multiplicative = %.2f is less than minimum value (%.2f). " + "Setting to %.2f.", threshResults.multiplicative, minMultiplicative, + minMultiplicative) + factor = max(minMultiplicative, threshResults.multiplicative) self.log.info("Modifying configured detection threshold by factor %.2f to %.2f", factor, factor*self.config.thresholdValue) - # Blow away preliminary (low threshold) detection mask - self.clearMask(maskedImage.mask) - if not clearMask: - maskedImage.mask.array |= oldDetected - - # Rinse and repeat thresholding with new calculated threshold - results = self.applyThreshold(middle, maskedImage.getBBox(), factor) - results.prelim = prelim - results.background = background if background is not None else lsst.afw.math.BackgroundList() - if self.config.doTempLocalBackground: - self.applyTempLocalBackground(exposure, middle, results) - self.finalizeFootprints(maskedImage.mask, results, sigma, factor=factor) + growOverride = None + inFinalize = True + while inFinalize: + inFinalize = False + # Blow away preliminary (low threshold) detection mask + self.clearMask(maskedImage.mask) + if not clearMask: + maskedImage.mask.array |= oldDetected + + # Rinse and repeat thresholding with new calculated threshold + results = self.applyThreshold(middle, maskedImage.getBBox(), factor) + results.prelim = prelim + results.background = background if background is not None else lsst.afw.math.BackgroundList() + if self.config.doTempLocalBackground: + self.applyTempLocalBackground(exposure, middle, results) + self.finalizeFootprints(maskedImage.mask, results, sigma, factor=factor, + growOverride=growOverride) + self.log.warning("nPeaks/nFootprint = %.2f (max is %.1f)", + results.numPosPeaks/results.numPos, + self.config.maxPeakToFootRatio) + if results.numPosPeaks/results.numPos > self.config.maxPeakToFootRatio: + if results.numPosPeaks/results.numPos > 3*self.config.maxPeakToFootRatio: + factor *= 1.4 + else: + factor *= 1.2 + if factor > 2.0: + if growOverride is None: + growOverride = 0.75*self.config.nSigmaToGrow + else: + growOverride *= 0.75 + self.log.warning("Decreasing nSigmaToGrow to %.2f", growOverride) + if factor >= 5: + self.log.warning("New theshold value would be > 5 times the initially requested " + "one (%.2f > %.2f). Leaving inFinalize iteration without " + "getting the number of peaks per footprint below %.1f", + factor*self.config.thresholdValue, self.config.thresholdValue, + self.config.maxPeakToFootRatio) + inFinalize = False + else: + inFinalize = True + self.log.warning("numPosPeaks/numPos (%d) > maxPeakPerFootprint (%.1f). " + "Increasing threshold factor to %.2f and re-running,", + results.numPosPeaks/results.numPos, self.config.maxPeakToFootRatio, + factor) self.clearUnwantedResults(maskedImage.mask, results) From 91a19689c84ec926427143201fe02d0bf29ac8de Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Fri, 10 Oct 2025 14:07:31 -0700 Subject: [PATCH 07/17] Flake8 fixes in loadReferenceObjects.py --- .../meas/algorithms/loadReferenceObjects.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/python/lsst/meas/algorithms/loadReferenceObjects.py b/python/lsst/meas/algorithms/loadReferenceObjects.py index 6e418ee48..b65c0f57e 100644 --- a/python/lsst/meas/algorithms/loadReferenceObjects.py +++ b/python/lsst/meas/algorithms/loadReferenceObjects.py @@ -248,7 +248,8 @@ def applyProperMotions(self, catalog, epoch): self.log.debug("No epoch provided: not applying proper motion corrections to refcat.") return - # Warn/raise for a catalog in an incorrect format, if epoch was specified. + # Warn/raise for a catalog in an incorrect format, if epoch was + # specified. if "pm_ra" in catalog.schema: pm_ra_radians = False field = catalog.schema["pm_ra"].asField() @@ -306,10 +307,10 @@ def _remapReferenceCatalogSchema(refCat, *, anyFilterMapsToThis=None, mapper.editOutputSchema().disconnectAliases() if centroids: - # Add and initialize centroid and hasCentroid fields (these are - # added after loading to avoid wasting space in the saved catalogs). - # The new fields are automatically initialized to (nan, nan) and - # False so no need to set them explicitly. + # Add and initialize centroid and hasCentroid fields (these + # are added after loading to avoid wasting space in the saved + # catalogs). The new fields are automatically initialized to + # (nan, nan) and False so no need to set them explicitly. mapper.editOutputSchema().addField("centroid_x", type=float, doReplace=True) mapper.editOutputSchema().addField("centroid_y", type=float, doReplace=True) mapper.editOutputSchema().addField("hasCentroid", type="Flag", doReplace=True) @@ -681,11 +682,12 @@ class which intersect or are contained within the specified region. The regionLat.getA().asDegrees(), regionLat.getB().asDegrees()) if filtFunc is None: filtFunc = _FilterCatalog(region) - # filter out all the regions supplied by the constructor that do not overlap + # Filter out all the regions supplied by the constructor that do not + # overlap. overlapList = [] for dataId, refCat in zip(self.dataIds, self.refCats): - # SphGeom supports some objects intersecting others, but is not symmetric, - # try the intersect operation in both directions + # SphGeom supports some objects intersecting others, but is not + # symmetric, try the intersect operation in both directions. try: intersects = dataId.region.intersects(region) except TypeError: From 3037bb83c47ab6a25380dc654f38b9fa19f7d591 Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Fri, 10 Oct 2025 15:41:29 -0700 Subject: [PATCH 08/17] Add unittest for AdaptiveThresholdDetectionTask --- tests/test_adaptiveThresholdDetection.py | 84 ++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 tests/test_adaptiveThresholdDetection.py diff --git a/tests/test_adaptiveThresholdDetection.py b/tests/test_adaptiveThresholdDetection.py new file mode 100644 index 000000000..bdae32bb1 --- /dev/null +++ b/tests/test_adaptiveThresholdDetection.py @@ -0,0 +1,84 @@ +import unittest + +import lsst.utils.tests +import numpy as np +from lsst.afw.geom import makeCdMatrix, makeSkyWcs +from lsst.afw.table import SourceTable +from lsst.geom import Box2I, Extent2I, Point2D, Point2I, SpherePoint, degrees +from lsst.meas.algorithms import AdaptiveThresholdDetectionTask +from lsst.meas.algorithms.testUtils import plantSources + + +class AdaptiveThresholdDetectionTest(lsst.utils.tests.TestCase): + def setUp(self): + xy0 = Point2I(12345, 67890) # xy0 for image + dims = Extent2I(2345, 2345) # Dimensions of image + self.box = Box2I(xy0, dims) # Bounding box of image + self.sigma = 3.21 # PSF sigma + self.nSigmaForKernel = 5.0 # Number of PSF sigmas for kernel + self.sky = 12345.6 # Sky level + noise = np.sqrt(self.sky)*np.pi*self.sigma**2 # Poisson noise per PSF + self.faint = 1.0*noise # Faintest level for star fluxes + self.bright = 100.0*noise # Brightest level for star fluxes + self.starBox = Box2I(self.box) # Area on image in which we can put star centers + starBoxBuffer = 4.0 # Buffer for star centers around edge + self.starBox.grow(-int(starBoxBuffer*self.sigma)) + self.scale = 1.0e-5*degrees # Pixel scale + self.config = AdaptiveThresholdDetectionTask.ConfigClass() + self.rtol = 1e-4 + + def makeMockExposure(self, numStars=300): + rng = np.random.Generator(np.random.MT19937(12345)) + stars = [(xx, yy, ff, self.sigma) for xx, yy, ff in + zip(rng.uniform(self.starBox.getMinX(), self.starBox.getMaxX(), numStars), + rng.uniform(self.starBox.getMinY(), self.starBox.getMaxY(), numStars), + np.linspace(self.faint, self.bright, numStars))] + exposure = plantSources(self.box, 2*int(self.nSigmaForKernel*self.sigma) + 1, self.sky, stars, True) + exposure.setWcs(makeSkyWcs(crpix=Point2D(0, 0), + crval=SpherePoint(0, 0, degrees), + cdMatrix=makeCdMatrix(scale=self.scale))) + return exposure + + def tearDown(self): + # del self.exposure + del self.config + + def check(self, expectFactor, initialThreshold=None): + schema = SourceTable.makeMinimalSchema() + table = SourceTable.make(schema) + task = AdaptiveThresholdDetectionTask(config=self.config) + results = task.run(table, self.exposure, initialThreshold=initialThreshold) + self.assertFloatsAlmostEqual(results.thresholdValue, expectFactor, rtol=self.rtol) + + def testUncrowded(self): + """Add sparse-ish numbers of stars. + """ + self.exposure = self.makeMockExposure() + self.check(5.0) + self.exposure = self.makeMockExposure(numStars=100) + self.check(5.0) + + def testCrowded(self): + """Add enough stars to be considered crowded and test adjusting + initial detection threshold. + """ + self.exposure = self.makeMockExposure(numStars=8000) + self.check(23.8) + self.exposure = self.makeMockExposure(numStars=8000) + self.check(50.0, initialThreshold=50.0) + self.exposure = self.makeMockExposure(numStars=25000) + self.check(173.4, initialThreshold=20.0) + + +class TestMemory(lsst.utils.tests.MemoryTestCase): + pass + + +def setup_module(module): + lsst.utils.tests.init() + + +if __name__ == "__main__": + import sys + setup_module(sys.modules['__main__']) + unittest.main() From beacbdf5f37739409bcce5ab0299ecfbe2216f86 Mon Sep 17 00:00:00 2001 From: Lauren MacArthur Date: Mon, 20 Oct 2025 14:58:35 -0700 Subject: [PATCH 09/17] Slight hack for unpersistable empty BackgroundList --- .../lsst/meas/algorithms/dynamicDetection.py | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/python/lsst/meas/algorithms/dynamicDetection.py b/python/lsst/meas/algorithms/dynamicDetection.py index bb234d505..47ed76f7d 100644 --- a/python/lsst/meas/algorithms/dynamicDetection.py +++ b/python/lsst/meas/algorithms/dynamicDetection.py @@ -494,15 +494,22 @@ def detectFootprints(self, exposure, doSmooth=True, sigma=None, clearMask=True, self.display(exposure, results, middle) + # Re-do the background tweak after any temporary backgrounds have + # been restored. + # + # But we want to keep any large-scale background (e.g., scattered + # light from bright stars) from being selected for sky objects in + # the calculation, so do another detection pass without either the + # local or wide temporary background subtraction; the DETECTED + # pixels will mark the area to ignore. + + # The following if/else is to workaround the fact that it is + # currently not possible to persist an empty BackgroundList, so + # we instead set the value of the backround tweak to 0.0 if + # doBackgroundTweak is False and call the tweakBackground function + # regardless to get at least one background into the list (do we + # need a TODO here?). if self.config.doBackgroundTweak: - # Re-do the background tweak after any temporary backgrounds have - # been restored. - # - # But we want to keep any large-scale background (e.g., scattered - # light from bright stars) from being selected for sky objects in - # the calculation, so do another detection pass without either the - # local or wide temporary background subtraction; the DETECTED - # pixels will mark the area to ignore. originalMask = maskedImage.mask.array.copy() try: self.clearMask(exposure.mask) @@ -513,7 +520,9 @@ def detectFootprints(self, exposure, doSmooth=True, sigma=None, clearMask=True, isBgTweak=True).additive finally: maskedImage.mask.array[:] = originalMask - self.tweakBackground(exposure, bgLevel, results.background) + else: + bgLevel = 0.0 + self.tweakBackground(exposure, bgLevel, results.background) return results @@ -534,7 +543,8 @@ def tweakBackground(self, exposure, bgLevel, bgList=None): bg : `lsst.afw.math.BackgroundMI` Constant background model. """ - self.log.info("Tweaking background by %f to match sky photometry", bgLevel) + if bgLevel != 0.0: + self.log.info("Tweaking background by %f to match sky photometry", bgLevel) exposure.image -= bgLevel bgStats = lsst.afw.image.MaskedImageF(1, 1) bgStats.set(bgLevel, 0, bgLevel) From 0146b36d2bf4a60b66a836d7b17dd9af46a8deb6 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Fri, 31 Oct 2025 10:45:14 -0400 Subject: [PATCH 10/17] Move adaptive threshold detection to its own module. --- python/lsst/meas/algorithms/__init__.py | 4 + .../meas/algorithms/adaptive_thresholds.py | 360 ++++++++++++++++++ .../lsst/meas/algorithms/dynamicDetection.py | 334 +--------------- tests/test_adaptiveThresholdDetection.py | 2 +- 4 files changed, 368 insertions(+), 332 deletions(-) create mode 100644 python/lsst/meas/algorithms/adaptive_thresholds.py diff --git a/python/lsst/meas/algorithms/__init__.py b/python/lsst/meas/algorithms/__init__.py index 245b34a23..eff9e02ed 100644 --- a/python/lsst/meas/algorithms/__init__.py +++ b/python/lsst/meas/algorithms/__init__.py @@ -72,3 +72,7 @@ from .version import * import lsst.utils + +# adaptive_thresholds.py is intentionally not imported and lifted, to +# (belatedly) try to limit how much code is run when just importing +# lsst.meas.algorithm. diff --git a/python/lsst/meas/algorithms/adaptive_thresholds.py b/python/lsst/meas/algorithms/adaptive_thresholds.py new file mode 100644 index 000000000..da3505cb5 --- /dev/null +++ b/python/lsst/meas/algorithms/adaptive_thresholds.py @@ -0,0 +1,360 @@ +# This file is part of meas_algorithms. +# +# Developed for the LSST Data Management System. +# This product includes software developed by the LSST Project +# (https://www.lsst.org). +# See the COPYRIGHT file at the top-level directory of this distribution +# for details of code ownership. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +__all__ = [ + "AdaptiveThresholdDetectionConfig", + "AdaptiveThresholdDetectionTask", +] + +import numpy as np + +from lsst.pex.config import Field, Config, DictField, FieldValidationError +from lsst.pipe.base import Struct, Task + +from .detection import SourceDetectionConfig, SourceDetectionTask + + +class AdaptiveThresholdDetectionConfig(Config): + """Configuration for AdaptiveThresholdDetectionTask + """ + maxAdaptiveDetIter = Field(dtype=int, default=20, + doc="Maximum number of adaptive threshold detection iterations.") + maxDynDetIter = Field(dtype=int, default=20, + doc="DEPRECATED...just including for config reading of prev runs...") + maxNumPeakPerBand = DictField( + keytype=str, + itemtype=int, + default={ + "u": 3000, + "g": 3000, + "r": 3000, + "i": 5000, + "z": 5000, + "y": 3000, + "fallback": 5000, + }, + doc="Maximum number of peaks per band. If the current band for the exposure " + "is not included as a key in this dict, the value associated with the " + "\"fallback\" key will be used.", + ) + minFootprint = Field(dtype=int, default=15, + doc="Minimum number of footprints considered sufficient to exit the " + "iteration loop. This should be larger than or equal to ``minIsolated`` as " + "it does not take into account whether any given footprint is multi-peaked " + "(i.e. blended thus not providing any isolated sources for PSF modeling.)") + maxPeakToFootRatio = Field(dtype=float, default=800.0, + doc="Maximum ratio of the number of peaks per footprint considered " + "sufficient to exit the iteration loop. This helps guard against " + "large contiguous footprints leaving no isolated sources for " + "inclusion in the PSF modeling.") + minIsolated = Field(dtype=int, default=6, + doc="Minimum number of single-peaked (i.e. isolated) footprints " + "considered sufficient to exit the iteration loop. This should be " + "larger than the minimum number of sources desired for PSF modeling " + "since some may be rejected by the source selector.") + sufficientIsolated = Field(dtype=int, default=100, + doc="Number of single-peaked (isolated) footprints considered " + "sufficient to exit the iteration loop. Must be larger than " + "``minIsolated``.") + sufficientFractionIsolated = Field(dtype=float, default=0.45, + doc="Fraction of single-peaked (isolated) footprints considered " + "sufficient to exit the iteration loop.") + + def validate(self): + super().validate() + if "fallback" not in self.maxNumPeakPerBand: + msg = ("Must include a \"fallback\" key in the config.maxNumPeakPerBand config dict. " + f"It is currenly: {self.maxNumPeakPerBand}.") + raise FieldValidationError(self.__class__.maxNumPeakPerBand, self, msg) + if self.minFootprint < self.minIsolated: + msg = (f"The config.minFootprint (= {self.minFootprint}) must be >= that of " + f"config.minIsolated (= {self.minIsolated}).") + raise FieldValidationError(self.__class__.minFootprint, self, msg) + if self.sufficientIsolated < self.minIsolated: + msg = (f"The config.sufficientIsolated (= {self.sufficientIsolated}) must be >= that of " + f"config.minIsolated (= {self.minIsolated}).") + raise FieldValidationError(self.__class__.sufficientIsolated, self, msg) + + +class AdaptiveThresholdDetectionTask(Task): + """Detection of sources on an image using an adaptive scheme for + the detection threshold. + """ + ConfigClass = AdaptiveThresholdDetectionConfig + _DefaultName = "adaptiveThresholdDetection" + + def __init__(self, *args, **kwargs): + Task.__init__(self, *args, **kwargs) + + def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier=2.0, + doReEstimageBackgroud=True, backgroundToPhotometricRatio=None): + """Perform detection with an adaptive threshold detection scheme + conditioned to maximize the likelihood of a succuessful PSF model fit + for any given "scene". + + In particular, we'd like to be able to handle different scenes, from + sparsely populated ones through very crowded ones, and possibily high + fill-factor nebulosity along the way, with a single pipeline (and set + of configs). This requires some flexibility in setting the detection + thresholds in order to detect enough sources suitable for PSF modeling + (e.g. crowded fields require higher thresholds to ensure the detections + don't end up overlapping into a single or very small number of blended + footprints. + + We first detect sources using the default threshold and multiplier. + Then, cycling through a series of criteria based on the DETECTED mask + planes (number of footprints, number of peaks, number of isolated + footprints, number of peaks-per-footrpint, etc.) conditioned to identify + a "Goldilocks Zone" where we have enough isolated peaks from which to + measure the PSF, we iterate while adjusting the detection thresholds + in the appropriate direction until all criteria are met (or the maximum + number of iterations is reached). + + Parameters + ---------- + table : `lsst.afw.table.SourceTable` + Table object that will be used to create the SourceCatalog. + exposure : `lsst.afw.image.Exposure` + Exposure to process; DETECTED mask plane will be set in-place. + initialThreshold : `float`, optional + Initial threshold for detection of PSF sources. + initialThresholdMultiplier : `float`, optional + Initial threshold for detection of PSF sources. + doReEstimageBackgroud: `bool`, optional + Re-estimate the background during the final detection stage? + backgroundToPhotometricRatio : `lsst.afw.image.Image`, optional + Image to convert photometric-flattened image to + background-flattened image if ``reEstimateBackground`` is `True` + and exposure has been photometric-flattened, and + ``config.doApplyFlatBackgroundRatio`` is `True`. + + Returns + ------- + results : `lsst.pipe.base.Struct` + The adaptive threshold detection results as a struct with + attributes: + + ``detections`` + Results of the final round of detection as a struch with + attributes: + + ``sources`` + Detected sources on the exposure + (`lsst.afw.table.SourceCatalog`). + ``positive`` + Positive polarity footprints + (`lsst.afw.detection.FootprintSet` or `None`). + ``negative`` + Negative polarity footprints + (`lsst.afw.detection.FootprintSet` or `None`). + ``numPos`` + Number of footprints in positive or 0 if detection polarity was + negative (`int`). + ``numNeg`` + Number of footprints in negative or 0 if detection polarity was + positive (`int`). + ``background`` + Re-estimated background. `None` if + ``reEstimateBackground==False`` + (`lsst.afw.math.BackgroundList`). + ``factor`` + Multiplication factor applied to the configured detection + threshold. (`float`). + ``thresholdValue`` + The final threshold value used to the configure the final round + of detection (`float`). + ``includeThresholdMultiplier`` + The final multiplication factor applied to the configured detection + threshold. (`float`). + """ + band = "fallback" + if exposure.filter is not None: + if exposure.filter.hasBandLabel(): + band = exposure.filter.bandLabel + if band in self.config.maxNumPeakPerBand: + maxNumPeak = self.config.maxNumPeakPerBand[band] + else: + maxNumPeak = self.config.maxNumPeakPerBand["fallback"] + + # Set up and configure the adaptive detection task on first iteration. + inAdaptiveDetection = True + nAdaptiveDetIter = 0 + thresholdFactor = 1.0 + if nAdaptiveDetIter == 0: + if initialThreshold is None: + maxSn = float(np.nanmax(exposure.image.array/np.sqrt(exposure.variance.array))) + adaptiveDetThreshold = min(maxSn, 5.0) + else: + adaptiveDetThreshold = initialThreshold + adaptiveDetectionConfig = SourceDetectionConfig() + adaptiveDetectionConfig.thresholdValue = adaptiveDetThreshold + adaptiveDetectionConfig.includeThresholdMultiplier = initialThresholdMultiplier + adaptiveDetectionConfig.reEstimateBackground = False + adaptiveDetectionConfig.doTempWideBackground = True + adaptiveDetectionConfig.tempWideBackground.binSize = 512 + adaptiveDetectionConfig.thresholdPolarity = "both" + self.log.info("Using adaptive detection with thresholdValue = %.2f and multiplier = %.1f", + adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + + maxNumNegFactor = 1.0 + while inAdaptiveDetection: + inAdaptiveDetection = False + nAdaptiveDetIter += 1 + detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True) + sourceCat = detRes.sources + nFootprint = len(sourceCat) + nPeak = 0 + nPosPeak = detRes.numPosPeaks + nNegPeak = detRes.numNegPeaks # Often detected in high nebulosity scenes. + maxNumNegPeak = max(15, int(0.025*nPosPeak))*maxNumNegFactor + nIsolated = 0 + nPeakPerSrcMax = 0 + maxNumPeakPerSrcMax = 0.2*maxNumPeak + for src in sourceCat: + nPeakSrc = len(src.getFootprint().getPeaks()) + if nPeakSrc == 1: + nIsolated += 1 + nPeak += nPeakSrc + nPeakPerSrcMax = max(nPeakPerSrcMax, nPeakSrc) + if nFootprint > 0.0: + fractionIsolated = nIsolated/nFootprint + avgPeakPerFoot = nPeak/nFootprint + else: + fractionIsolated = float("nan") + avgPeakPerFoot = float("nan") + self.log.info("In adaptive detection iter %d: nFootprints = %d, nPosPeak = %d (max is %d), " + "nNegPeak = %d (max is %d), nPeak/nFoot = %.1f (max is %.1f), " + "nPeakPerkSrcMax = %d (max is %d), nIsolated = %d, fractionIsolated = %.2f", + nAdaptiveDetIter, nFootprint, nPeak, maxNumPeak, nNegPeak, maxNumNegPeak, + avgPeakPerFoot, self.config.maxPeakToFootRatio, nPeakPerSrcMax, + maxNumPeakPerSrcMax, nIsolated, fractionIsolated) + if (nIsolated > self.config.sufficientIsolated + and fractionIsolated > self.config.sufficientFractionIsolated + and (nAdaptiveDetIter > 1 or self.config.maxAdaptiveDetIter == 1)): + if ((nIsolated > 5.0*self.config.sufficientIsolated and nPeak < 2.5*maxNumPeak + and nNegPeak < 100.0*maxNumNegPeak) + or (nNegPeak < 5.0*maxNumNegPeak and nPeak < 1.2*maxNumPeak)): + self.log.info("Sufficient isolated footprints (%d > %d) and fraction of isolated " + "footprints (%.2f > %.2f) for PSF modeling. Exiting adaptive detection " + "at iter: %d.", + nIsolated, self.config.sufficientIsolated, fractionIsolated, + self.config.sufficientFractionIsolated, nAdaptiveDetIter) + inAdaptiveDetection = False + continue + + if nFootprint == 0 or nPosPeak == 0: + thresholdFactor = 0.25 + maxNumNegFactor *= 10 + self.log.warning("Adaptive threshold increase went too far (nFootprint = 0). " + "Decreasing threshold to %.2f and rerunning.", + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + adaptiveDetectionConfig.thresholdValue = ( + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + inAdaptiveDetection = False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True + continue + + if ((nPeak/nFootprint > self.config.maxPeakToFootRatio and nIsolated < self.config.minIsolated) + or nNegPeak > maxNumNegPeak): + if nNegPeak > 2*maxNumNegPeak: + thresholdFactor = 1.5 + else: + thresholdFactor = 1.25 + thresholdFactor *= adaptiveDetectionConfig.includeThresholdMultiplier + adaptiveDetectionConfig.includeThresholdMultiplier = 1.0 + self.log.warning("Adaptive detection iter %d: catalog had nPeak/nFootprint = " + "%.1f (max is %.1f) and %d negative peaks (max is %d). " + "Increasing threshold to %.2f and setting multiplier " + "to %.1f and rerunning.", + nAdaptiveDetIter, nPeak/nFootprint, self.config.maxPeakToFootRatio, + nNegPeak, maxNumNegPeak, + thresholdFactor*adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionConfig.thresholdValue = ( + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + inAdaptiveDetection = False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True + continue + + if (nPeak > maxNumPeak or nPeakPerSrcMax > maxNumPeakPerSrcMax + or nFootprint <= self.config.minFootprint): + if nPeak > maxNumPeak or nPeakPerSrcMax > 0.25*maxNumPeak: + if nAdaptiveDetIter < 0.5*self.config.maxAdaptiveDetIter: + if nPeak > 3*maxNumPeak or nPeakPerSrcMax > maxNumPeak: + thresholdFactor = 1.7 + elif nPeak > 2*maxNumPeak or nPeakPerSrcMax > 0.5*maxNumPeak: + thresholdFactor = 1.4 + else: + thresholdFactor = 1.2 + else: + thresholdFactor = 1.2 + thresholdFactor *= adaptiveDetectionConfig.includeThresholdMultiplier + newThresholdMultiplier = max(1.0, 0.5*adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionConfig.includeThresholdMultiplier = newThresholdMultiplier + adaptiveDetectionConfig.thresholdValue = ( + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + self.log.warning("Adaptive detection iter %d catalog had nPeak = %d (max = %d) " + "and nPeakPerSrcMax = %d (max = %d). Increasing threshold to %.2f " + "and setting multiplier to %.1f and rerunning.", + nAdaptiveDetIter, nPeak, maxNumPeak, nPeakPerSrcMax, maxNumPeakPerSrcMax, + adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + inAdaptiveDetection = ( + False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True + ) + continue + + if nFootprint <= self.config.minFootprint: + maxNumNegFactor *= 10 # Allow more -ve detections at this point. + thresholdFactor = min(0.85, 0.4*np.log10(10*(nFootprint + 1))) + self.log.warning("Adaptive detection iter %d catalog had only %d footprints. " + "Lowering threshold to %.2f and increasing the allowance for " + "negative detections and rerunning.", + nAdaptiveDetIter, nFootprint, + thresholdFactor*adaptiveDetectionConfig.thresholdValue) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + adaptiveDetectionConfig.thresholdValue = ( + thresholdFactor*adaptiveDetectionConfig.thresholdValue + ) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + inAdaptiveDetection = True + else: + inAdaptiveDetection = False + if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter: + inAdaptiveDetection = False + # Final round of detection with positive polarity + adaptiveDetectionConfig.thresholdPolarity = "positive" + if doReEstimageBackgroud: + adaptiveDetectionConfig.reEstimateBackground = True + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + self.log.info("Perfomring final round of detection with threshold %.2f and multiplier %.1f", + adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True, + backgroundToPhotometricRatio=backgroundToPhotometricRatio) + return Struct( + detections=detRes, + thresholdValue=adaptiveDetectionConfig.thresholdValue, + includeThresholdMultiplier=adaptiveDetectionConfig.includeThresholdMultiplier, + ) diff --git a/python/lsst/meas/algorithms/dynamicDetection.py b/python/lsst/meas/algorithms/dynamicDetection.py index 47ed76f7d..27fc9f810 100644 --- a/python/lsst/meas/algorithms/dynamicDetection.py +++ b/python/lsst/meas/algorithms/dynamicDetection.py @@ -3,14 +3,11 @@ "DynamicDetectionConfig", "DynamicDetectionTask", "InsufficientSourcesError", - "AdaptiveThresholdDetectionConfig", - "AdaptiveThresholdDetectionTask", ] import numpy as np -from lsst.pex.config import Field, ConfigurableField, Config, DictField, FieldValidationError -import lsst.pipe.base as pipeBase +from lsst.pex.config import Field, ConfigurableField from .detection import SourceDetectionConfig, SourceDetectionTask from .skyObjects import SkyObjectsTask @@ -19,6 +16,7 @@ from lsst.afw.geom import makeCdMatrix, makeSkyWcs, SpanSet from lsst.afw.table import SourceCatalog, SourceTable from lsst.meas.base import ForcedMeasurementTask +from lsst.pipe.base import Struct import lsst.afw.image import lsst.afw.math @@ -321,7 +319,7 @@ def calculateThreshold(self, exposure, seed, sigma=None, minFractionSourcesFacto medianError = np.median(catalog["base_PsfFlux_instFluxErr"][good]) if wcsIsNone: exposure.setWcs(None) - return pipeBase.Struct(multiplicative=medianError/stdevMeas, additive=bgMedian) + return Struct(multiplicative=medianError/stdevMeas, additive=bgMedian) def detectFootprints(self, exposure, doSmooth=True, sigma=None, clearMask=True, expId=None, background=None, backgroundToPhotometricRatio=None): @@ -635,332 +633,6 @@ def _computeBrightDetectionMask(self, maskedImage, convolveResults): return brightDetectedMask -class AdaptiveThresholdDetectionConfig(Config): - """Configuration for AdaptiveThresholdDetectionTask - """ - maxAdaptiveDetIter = Field(dtype=int, default=20, - doc="Maximum number of adaptive threshold detection iterations.") - maxNumPeakPerBand = DictField( - keytype=str, - itemtype=int, - default={ - "u": 3000, - "g": 3000, - "r": 3000, - "i": 5000, - "z": 5000, - "y": 3000, - "fallback": 5000, - }, - doc="Maximum number of peaks per band. If the current band for the exposure " - "is not included as a key in this dict, the value associated with the " - "\"fallback\" key will be used.", - ) - minFootprint = Field(dtype=int, default=15, - doc="Minimum number of footprints considered sufficient to exit the " - "iteration loop. This should be larger than or equal to ``minIsolated`` as " - "it does not take into account whether any given footprint is multi-peaked " - "(i.e. blended thus not providing any isolated sources for PSF modeling.)") - maxPeakToFootRatio = Field(dtype=float, default=800.0, - doc="Maximum ratio of the number of peaks per footprint considered " - "sufficient to exit the iteration loop. This helps guard against " - "large contiguous footprints leaving no isolated sources for " - "inclusion in the PSF modeling.") - minIsolated = Field(dtype=int, default=6, - doc="Minimum number of single-peaked (i.e. isolated) footprints " - "considered sufficient to exit the iteration loop. This should be " - "larger than the minimum number of sources desired for PSF modeling " - "since some may be rejected by the source selector.") - sufficientIsolated = Field(dtype=int, default=100, - doc="Number of single-peaked (isolated) footprints considered " - "sufficient to exit the iteration loop. Must be larger than " - "``minIsolated``.") - sufficientFractionIsolated = Field(dtype=float, default=0.45, - doc="Fraction of single-peaked (isolated) footprints considered " - "sufficient to exit the iteration loop.") - - def validate(self): - super().validate() - if "fallback" not in self.maxNumPeakPerBand: - msg = ("Must include a \"fallback\" key in the config.maxNumPeakPerBand config dict. " - f"It is currenly: {self.maxNumPeakPerBand}.") - raise FieldValidationError(self.__class__.maxNumPeakPerBand, self, msg) - if self.minFootprint < self.minIsolated: - msg = (f"The config.minFootprint (= {self.minFootprint}) must be >= that of " - f"config.minIsolated (= {self.minIsolated}).") - raise FieldValidationError(self.__class__.minFootprint, self, msg) - if self.sufficientIsolated < self.minIsolated: - msg = (f"The config.sufficientIsolated (= {self.sufficientIsolated}) must be >= that of " - f"config.minIsolated (= {self.minIsolated}).") - raise FieldValidationError(self.__class__.sufficientIsolated, self, msg) - - -class AdaptiveThresholdDetectionTask(pipeBase.Task): - """Detection of sources on an image using an adaptive scheme for - the detection threshold. - """ - ConfigClass = AdaptiveThresholdDetectionConfig - _DefaultName = "adaptiveThresholdDetection" - - def __init__(self, *args, **kwargs): - pipeBase.Task.__init__(self, *args, **kwargs) - - def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier=2.0, - doReEstimageBackgroud=True, backgroundToPhotometricRatio=None): - """Perform detection with an adaptive threshold detection scheme - conditioned to maximize the likelihood of a succuessful PSF model fit - for any given "scene". - - In particular, we'd like to be able to handle different scenes, from - sparsely populated ones through very crowded ones, and possibily high - fill-factor nebulosity along the way, with a single pipeline (and set - of configs). This requires some flexibility in setting the detection - thresholds in order to detect enough sources suitable for PSF modeling - (e.g. crowded fields require higher thresholds to ensure the detections - don't end up overlapping into a single or very small number of blended - footprints. - - We first detect sources using the default threshold and multiplier. - Then, cycling through a series of criteria based on the DETECTED mask - planes (number of footprints, number of peaks, number of isolated - footprints, number of peaks-per-footrpint, etc.) conditioned to identify - a "Goldilocks Zone" where we have enough isolated peaks from which to - measure the PSF, we iterate while adjusting the detection thresholds - in the appropriate direction until all criteria are met (or the maximum - number of iterations is reached). - - Parameters - ---------- - table : `lsst.afw.table.SourceTable` - Table object that will be used to create the SourceCatalog. - exposure : `lsst.afw.image.Exposure` - Exposure to process; DETECTED mask plane will be set in-place. - initialThreshold : `float`, optional - Initial threshold for detection of PSF sources. - initialThresholdMultiplier : `float`, optional - Initial threshold for detection of PSF sources. - doReEstimageBackgroud: `bool`, optional - Re-estimate the background during the final detection stage? - backgroundToPhotometricRatio : `lsst.afw.image.Image`, optional - Image to convert photometric-flattened image to - background-flattened image if ``reEstimateBackground`` is `True` - and exposure has been photometric-flattened, and - ``config.doApplyFlatBackgroundRatio`` is `True`. - - Returns - ------- - results : `lsst.pipe.base.Struct` - The adaptive threshold detection results as a struct with - attributes: - - ``detections`` - Results of the final round of detection as a struch with - attributes: - - ``sources`` - Detected sources on the exposure - (`lsst.afw.table.SourceCatalog`). - ``positive`` - Positive polarity footprints - (`lsst.afw.detection.FootprintSet` or `None`). - ``negative`` - Negative polarity footprints - (`lsst.afw.detection.FootprintSet` or `None`). - ``numPos`` - Number of footprints in positive or 0 if detection polarity was - negative (`int`). - ``numNeg`` - Number of footprints in negative or 0 if detection polarity was - positive (`int`). - ``background`` - Re-estimated background. `None` if - ``reEstimateBackground==False`` - (`lsst.afw.math.BackgroundList`). - ``factor`` - Multiplication factor applied to the configured detection - threshold. (`float`). - ``thresholdValue`` - The final threshold value used to the configure the final round - of detection (`float`). - ``includeThresholdMultiplier`` - The final multiplication factor applied to the configured detection - threshold. (`float`). - """ - band = "fallback" - if exposure.filter is not None: - if exposure.filter.hasBandLabel(): - band = exposure.filter.bandLabel - if band in self.config.maxNumPeakPerBand: - maxNumPeak = self.config.maxNumPeakPerBand[band] - else: - maxNumPeak = self.config.maxNumPeakPerBand["fallback"] - - # Set up and configure the adaptive detection task on first iteration. - inAdaptiveDetection = True - nAdaptiveDetIter = 0 - thresholdFactor = 1.0 - if nAdaptiveDetIter == 0: - if initialThreshold is None: - maxSn = float(np.nanmax(exposure.image.array/np.sqrt(exposure.variance.array))) - adaptiveDetThreshold = min(maxSn, 5.0) - else: - adaptiveDetThreshold = initialThreshold - adaptiveDetectionConfig = SourceDetectionConfig() - adaptiveDetectionConfig.thresholdValue = adaptiveDetThreshold - adaptiveDetectionConfig.includeThresholdMultiplier = initialThresholdMultiplier - adaptiveDetectionConfig.reEstimateBackground = False - adaptiveDetectionConfig.doTempWideBackground = True - adaptiveDetectionConfig.tempWideBackground.binSize = 512 - adaptiveDetectionConfig.thresholdPolarity = "both" - self.log.info("Using adaptive detection with thresholdValue = %.2f and multiplier = %.1f", - adaptiveDetectionConfig.thresholdValue, - adaptiveDetectionConfig.includeThresholdMultiplier) - adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - - maxNumNegFactor = 1.0 - while inAdaptiveDetection: - inAdaptiveDetection = False - nAdaptiveDetIter += 1 - detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True) - sourceCat = detRes.sources - nFootprint = len(sourceCat) - nPeak = 0 - nPosPeak = detRes.numPosPeaks - nNegPeak = detRes.numNegPeaks # Often detected in high nebulosity scenes. - maxNumNegPeak = max(15, int(0.025*nPosPeak))*maxNumNegFactor - nIsolated = 0 - nPeakPerSrcMax = 0 - maxNumPeakPerSrcMax = 0.2*maxNumPeak - for src in sourceCat: - nPeakSrc = len(src.getFootprint().getPeaks()) - if nPeakSrc == 1: - nIsolated += 1 - nPeak += nPeakSrc - nPeakPerSrcMax = max(nPeakPerSrcMax, nPeakSrc) - if nFootprint > 0.0: - fractionIsolated = nIsolated/nFootprint - avgPeakPerFoot = nPeak/nFootprint - else: - fractionIsolated = float("nan") - avgPeakPerFoot = float("nan") - self.log.info("In adaptive detection iter %d: nFootprints = %d, nPosPeak = %d (max is %d), " - "nNegPeak = %d (max is %d), nPeak/nFoot = %.1f (max is %.1f), " - "nPeakPerkSrcMax = %d (max is %d), nIsolated = %d, fractionIsolated = %.2f", - nAdaptiveDetIter, nFootprint, nPeak, maxNumPeak, nNegPeak, maxNumNegPeak, - avgPeakPerFoot, self.config.maxPeakToFootRatio, nPeakPerSrcMax, - maxNumPeakPerSrcMax, nIsolated, fractionIsolated) - if (nIsolated > self.config.sufficientIsolated - and fractionIsolated > self.config.sufficientFractionIsolated - and (nAdaptiveDetIter > 1 or self.config.maxAdaptiveDetIter == 1)): - if ((nIsolated > 5.0*self.config.sufficientIsolated and nPeak < 2.5*maxNumPeak - and nNegPeak < 100.0*maxNumNegPeak) - or (nNegPeak < 5.0*maxNumNegPeak and nPeak < 1.2*maxNumPeak)): - self.log.info("Sufficient isolated footprints (%d > %d) and fraction of isolated " - "footprints (%.2f > %.2f) for PSF modeling. Exiting adaptive detection " - "at iter: %d.", - nIsolated, self.config.sufficientIsolated, fractionIsolated, - self.config.sufficientFractionIsolated, nAdaptiveDetIter) - inAdaptiveDetection = False - continue - - if nFootprint == 0 or nPosPeak == 0: - thresholdFactor = 0.25 - maxNumNegFactor *= 10 - self.log.warning("Adaptive threshold increase went too far (nFootprint = 0). " - "Decreasing threshold to %.2f and rerunning.", - thresholdFactor*adaptiveDetectionConfig.thresholdValue) - adaptiveDetectionConfig.thresholdValue = ( - thresholdFactor*adaptiveDetectionConfig.thresholdValue) - adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - inAdaptiveDetection = False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True - continue - - if ((nPeak/nFootprint > self.config.maxPeakToFootRatio and nIsolated < self.config.minIsolated) - or nNegPeak > maxNumNegPeak): - if nNegPeak > 2*maxNumNegPeak: - thresholdFactor = 1.5 - else: - thresholdFactor = 1.25 - thresholdFactor *= adaptiveDetectionConfig.includeThresholdMultiplier - adaptiveDetectionConfig.includeThresholdMultiplier = 1.0 - self.log.warning("Adaptive detection iter %d: catalog had nPeak/nFootprint = " - "%.1f (max is %.1f) and %d negative peaks (max is %d). " - "Increasing threshold to %.2f and setting multiplier " - "to %.1f and rerunning.", - nAdaptiveDetIter, nPeak/nFootprint, self.config.maxPeakToFootRatio, - nNegPeak, maxNumNegPeak, - thresholdFactor*adaptiveDetectionConfig.thresholdValue, - adaptiveDetectionConfig.includeThresholdMultiplier) - adaptiveDetectionConfig.thresholdValue = ( - thresholdFactor*adaptiveDetectionConfig.thresholdValue) - adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - inAdaptiveDetection = False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True - continue - - if (nPeak > maxNumPeak or nPeakPerSrcMax > maxNumPeakPerSrcMax - or nFootprint <= self.config.minFootprint): - if nPeak > maxNumPeak or nPeakPerSrcMax > 0.25*maxNumPeak: - if nAdaptiveDetIter < 0.5*self.config.maxAdaptiveDetIter: - if nPeak > 3*maxNumPeak or nPeakPerSrcMax > maxNumPeak: - thresholdFactor = 1.7 - elif nPeak > 2*maxNumPeak or nPeakPerSrcMax > 0.5*maxNumPeak: - thresholdFactor = 1.4 - else: - thresholdFactor = 1.2 - else: - thresholdFactor = 1.2 - thresholdFactor *= adaptiveDetectionConfig.includeThresholdMultiplier - newThresholdMultiplier = max(1.0, 0.5*adaptiveDetectionConfig.includeThresholdMultiplier) - adaptiveDetectionConfig.includeThresholdMultiplier = newThresholdMultiplier - adaptiveDetectionConfig.thresholdValue = ( - thresholdFactor*adaptiveDetectionConfig.thresholdValue) - self.log.warning("Adaptive detection iter %d catalog had nPeak = %d (max = %d) " - "and nPeakPerSrcMax = %d (max = %d). Increasing threshold to %.2f " - "and setting multiplier to %.1f and rerunning.", - nAdaptiveDetIter, nPeak, maxNumPeak, nPeakPerSrcMax, maxNumPeakPerSrcMax, - adaptiveDetectionConfig.thresholdValue, - adaptiveDetectionConfig.includeThresholdMultiplier) - adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - inAdaptiveDetection = ( - False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True - ) - continue - - if nFootprint <= self.config.minFootprint: - maxNumNegFactor *= 10 # Allow more -ve detections at this point. - thresholdFactor = min(0.85, 0.4*np.log10(10*(nFootprint + 1))) - self.log.warning("Adaptive detection iter %d catalog had only %d footprints. " - "Lowering threshold to %.2f and increasing the allowance for " - "negative detections and rerunning.", - nAdaptiveDetIter, nFootprint, - thresholdFactor*adaptiveDetectionConfig.thresholdValue) - adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - adaptiveDetectionConfig.thresholdValue = ( - thresholdFactor*adaptiveDetectionConfig.thresholdValue - ) - adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - inAdaptiveDetection = True - else: - inAdaptiveDetection = False - if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter: - inAdaptiveDetection = False - # Final round of detection with positive polarity - adaptiveDetectionConfig.thresholdPolarity = "positive" - if doReEstimageBackgroud: - adaptiveDetectionConfig.reEstimateBackground = True - adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - self.log.info("Perfomring final round of detection with threshold %.2f and multiplier %.1f", - adaptiveDetectionConfig.thresholdValue, - adaptiveDetectionConfig.includeThresholdMultiplier) - detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True, - backgroundToPhotometricRatio=backgroundToPhotometricRatio) - return pipeBase.Struct( - detections=detRes, - thresholdValue=adaptiveDetectionConfig.thresholdValue, - includeThresholdMultiplier=adaptiveDetectionConfig.includeThresholdMultiplier, - ) - - def countMaskedPixels(maskedIm, maskPlane): """Count the number of pixels in a given mask plane. diff --git a/tests/test_adaptiveThresholdDetection.py b/tests/test_adaptiveThresholdDetection.py index bdae32bb1..9812ba752 100644 --- a/tests/test_adaptiveThresholdDetection.py +++ b/tests/test_adaptiveThresholdDetection.py @@ -5,7 +5,7 @@ from lsst.afw.geom import makeCdMatrix, makeSkyWcs from lsst.afw.table import SourceTable from lsst.geom import Box2I, Extent2I, Point2D, Point2I, SpherePoint, degrees -from lsst.meas.algorithms import AdaptiveThresholdDetectionTask +from lsst.meas.algorithms.adaptive_thresholds import AdaptiveThresholdDetectionTask from lsst.meas.algorithms.testUtils import plantSources From 7e30cc65608317eb2dd3608c5f367a4b6ea49360 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Fri, 31 Oct 2025 10:58:39 -0400 Subject: [PATCH 11/17] Fix some typos. --- python/lsst/meas/algorithms/adaptive_thresholds.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/lsst/meas/algorithms/adaptive_thresholds.py b/python/lsst/meas/algorithms/adaptive_thresholds.py index da3505cb5..93f76dff7 100644 --- a/python/lsst/meas/algorithms/adaptive_thresholds.py +++ b/python/lsst/meas/algorithms/adaptive_thresholds.py @@ -105,13 +105,13 @@ def __init__(self, *args, **kwargs): Task.__init__(self, *args, **kwargs) def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier=2.0, - doReEstimageBackgroud=True, backgroundToPhotometricRatio=None): + doReEstimateBackground=True, backgroundToPhotometricRatio=None): """Perform detection with an adaptive threshold detection scheme - conditioned to maximize the likelihood of a succuessful PSF model fit + conditioned to maximize the likelihood of a successful PSF model fit for any given "scene". In particular, we'd like to be able to handle different scenes, from - sparsely populated ones through very crowded ones, and possibily high + sparsely populated ones through very crowded ones, and possibly high fill-factor nebulosity along the way, with a single pipeline (and set of configs). This requires some flexibility in setting the detection thresholds in order to detect enough sources suitable for PSF modeling @@ -138,7 +138,7 @@ def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier Initial threshold for detection of PSF sources. initialThresholdMultiplier : `float`, optional Initial threshold for detection of PSF sources. - doReEstimageBackgroud: `bool`, optional + doReEstimateBackground: `bool`, optional Re-estimate the background during the final detection stage? backgroundToPhotometricRatio : `lsst.afw.image.Image`, optional Image to convert photometric-flattened image to @@ -345,7 +345,7 @@ def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier inAdaptiveDetection = False # Final round of detection with positive polarity adaptiveDetectionConfig.thresholdPolarity = "positive" - if doReEstimageBackgroud: + if doReEstimateBackground: adaptiveDetectionConfig.reEstimateBackground = True adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) self.log.info("Perfomring final round of detection with threshold %.2f and multiplier %.1f", From 1bebe17696db59128aa49268621c948488f98e35 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Fri, 31 Oct 2025 11:06:23 -0400 Subject: [PATCH 12/17] Drop pre-deprecated testing config options. --- python/lsst/meas/algorithms/adaptive_thresholds.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/python/lsst/meas/algorithms/adaptive_thresholds.py b/python/lsst/meas/algorithms/adaptive_thresholds.py index 93f76dff7..841930f09 100644 --- a/python/lsst/meas/algorithms/adaptive_thresholds.py +++ b/python/lsst/meas/algorithms/adaptive_thresholds.py @@ -37,8 +37,6 @@ class AdaptiveThresholdDetectionConfig(Config): """ maxAdaptiveDetIter = Field(dtype=int, default=20, doc="Maximum number of adaptive threshold detection iterations.") - maxDynDetIter = Field(dtype=int, default=20, - doc="DEPRECATED...just including for config reading of prev runs...") maxNumPeakPerBand = DictField( keytype=str, itemtype=int, From 7002cb2adeb02acf890f7ef76195a02b35053af8 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Fri, 31 Oct 2025 11:09:27 -0400 Subject: [PATCH 13/17] Drop support for bg reestimation in adaptive det task. --- .../meas/algorithms/adaptive_thresholds.py | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/python/lsst/meas/algorithms/adaptive_thresholds.py b/python/lsst/meas/algorithms/adaptive_thresholds.py index 841930f09..e77f701b9 100644 --- a/python/lsst/meas/algorithms/adaptive_thresholds.py +++ b/python/lsst/meas/algorithms/adaptive_thresholds.py @@ -102,8 +102,7 @@ class AdaptiveThresholdDetectionTask(Task): def __init__(self, *args, **kwargs): Task.__init__(self, *args, **kwargs) - def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier=2.0, - doReEstimateBackground=True, backgroundToPhotometricRatio=None): + def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier=2.0): """Perform detection with an adaptive threshold detection scheme conditioned to maximize the likelihood of a successful PSF model fit for any given "scene". @@ -136,13 +135,6 @@ def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier Initial threshold for detection of PSF sources. initialThresholdMultiplier : `float`, optional Initial threshold for detection of PSF sources. - doReEstimateBackground: `bool`, optional - Re-estimate the background during the final detection stage? - backgroundToPhotometricRatio : `lsst.afw.image.Image`, optional - Image to convert photometric-flattened image to - background-flattened image if ``reEstimateBackground`` is `True` - and exposure has been photometric-flattened, and - ``config.doApplyFlatBackgroundRatio`` is `True`. Returns ------- @@ -170,9 +162,8 @@ def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier Number of footprints in negative or 0 if detection polarity was positive (`int`). ``background`` - Re-estimated background. `None` if - ``reEstimateBackground==False`` - (`lsst.afw.math.BackgroundList`). + Always `None`; provided for compatibility with + `SourceDetectionTask`. ``factor`` Multiplication factor applied to the configured detection threshold. (`float`). @@ -343,14 +334,12 @@ def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier inAdaptiveDetection = False # Final round of detection with positive polarity adaptiveDetectionConfig.thresholdPolarity = "positive" - if doReEstimateBackground: - adaptiveDetectionConfig.reEstimateBackground = True adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) self.log.info("Perfomring final round of detection with threshold %.2f and multiplier %.1f", adaptiveDetectionConfig.thresholdValue, adaptiveDetectionConfig.includeThresholdMultiplier) detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True, - backgroundToPhotometricRatio=backgroundToPhotometricRatio) + backgroundToPhotometricRatio=None) return Struct( detections=detRes, thresholdValue=adaptiveDetectionConfig.thresholdValue, From 7a03e69dce5fe8148c6d201232316e67862edb54 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Sat, 1 Nov 2025 08:40:16 -0400 Subject: [PATCH 14/17] Make AdaptiveThresholdDetection compatible with SourceDetectionTask. ...at least as far as the usage in CalibrateImageTask. This includes: - Nesting the baseline detection configuration. - Modifying the run arguments to accept only those expected by SourceDetectionTask.run (possibly since the initial threshold and multiplier are now nested in that baseline configuration). - Modifying the result struct to add new attributes to the struct returned by SourceDetectionTask instead of nesting that within a new struct. --- .../meas/algorithms/adaptive_thresholds.py | 107 +++++++----------- tests/test_adaptiveThresholdDetection.py | 6 +- 2 files changed, 49 insertions(+), 64 deletions(-) diff --git a/python/lsst/meas/algorithms/adaptive_thresholds.py b/python/lsst/meas/algorithms/adaptive_thresholds.py index e77f701b9..94070528d 100644 --- a/python/lsst/meas/algorithms/adaptive_thresholds.py +++ b/python/lsst/meas/algorithms/adaptive_thresholds.py @@ -26,8 +26,8 @@ import numpy as np -from lsst.pex.config import Field, Config, DictField, FieldValidationError -from lsst.pipe.base import Struct, Task +from lsst.pex.config import Field, Config, ConfigField, DictField, FieldValidationError +from lsst.pipe.base import Task from .detection import SourceDetectionConfig, SourceDetectionTask @@ -75,12 +75,25 @@ class AdaptiveThresholdDetectionConfig(Config): sufficientFractionIsolated = Field(dtype=float, default=0.45, doc="Fraction of single-peaked (isolated) footprints considered " "sufficient to exit the iteration loop.") + baseline = ConfigField( + "Baseline configuration for SourceDetectionTask in the absence of any iteration. " + "All options other than thresholdPolarity, thresholdValue, and includeThresholdMultiplier " + "are held fixed at these values.", + SourceDetectionConfig, + ) + + def setDefaults(self): + self.baseline.reEstimateBackground = False + self.baseline.doTempWideBackground = True + self.baseline.tempWideBackground.binSize = 512 + self.baseline.thresholdPolarity = "positive" # for schema and final run. + self.baseline.includeThresholdMultiplier = 2.0 def validate(self): super().validate() if "fallback" not in self.maxNumPeakPerBand: msg = ("Must include a \"fallback\" key in the config.maxNumPeakPerBand config dict. " - f"It is currenly: {self.maxNumPeakPerBand}.") + f"It is currently: {self.maxNumPeakPerBand}.") raise FieldValidationError(self.__class__.maxNumPeakPerBand, self, msg) if self.minFootprint < self.minIsolated: msg = (f"The config.minFootprint (= {self.minFootprint}) must be >= that of " @@ -90,6 +103,11 @@ def validate(self): msg = (f"The config.sufficientIsolated (= {self.sufficientIsolated}) must be >= that of " f"config.minIsolated (= {self.minIsolated}).") raise FieldValidationError(self.__class__.sufficientIsolated, self, msg) + if self.baseline.reEstimateBackground: + raise FieldValidationError( + self.__class__.baseline, self, + "Baseline detection configuration must not include background re-estimation." + ) class AdaptiveThresholdDetectionTask(Task): @@ -97,12 +115,15 @@ class AdaptiveThresholdDetectionTask(Task): the detection threshold. """ ConfigClass = AdaptiveThresholdDetectionConfig - _DefaultName = "adaptiveThresholdDetection" + _DefaultName = "detection" - def __init__(self, *args, **kwargs): - Task.__init__(self, *args, **kwargs) + def __init__(self, schema=None, **kwargs): + super().__init__(**kwargs) + # We make a baseline SourceDetectionTask only to set up the schema. + if schema is not None: + SourceDetectionTask(config=self.config.baseline, schema=schema) - def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier=2.0): + def run(self, table, exposure, **kwargs): """Perform detection with an adaptive threshold detection scheme conditioned to maximize the likelihood of a successful PSF model fit for any given "scene". @@ -131,42 +152,15 @@ def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier Table object that will be used to create the SourceCatalog. exposure : `lsst.afw.image.Exposure` Exposure to process; DETECTED mask plane will be set in-place. - initialThreshold : `float`, optional - Initial threshold for detection of PSF sources. - initialThresholdMultiplier : `float`, optional - Initial threshold for detection of PSF sources. + **kwargs + Forwarded to internal runs of `SourceDetectionTask`. Returns ------- results : `lsst.pipe.base.Struct` - The adaptive threshold detection results as a struct with - attributes: + The adaptive threshold detection results. Most fields are directly + produced by `SourceDetectionTask.run`. Additional fields include: - ``detections`` - Results of the final round of detection as a struch with - attributes: - - ``sources`` - Detected sources on the exposure - (`lsst.afw.table.SourceCatalog`). - ``positive`` - Positive polarity footprints - (`lsst.afw.detection.FootprintSet` or `None`). - ``negative`` - Negative polarity footprints - (`lsst.afw.detection.FootprintSet` or `None`). - ``numPos`` - Number of footprints in positive or 0 if detection polarity was - negative (`int`). - ``numNeg`` - Number of footprints in negative or 0 if detection polarity was - positive (`int`). - ``background`` - Always `None`; provided for compatibility with - `SourceDetectionTask`. - ``factor`` - Multiplication factor applied to the configured detection - threshold. (`float`). ``thresholdValue`` The final threshold value used to the configure the final round of detection (`float`). @@ -187,29 +181,18 @@ def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier inAdaptiveDetection = True nAdaptiveDetIter = 0 thresholdFactor = 1.0 - if nAdaptiveDetIter == 0: - if initialThreshold is None: - maxSn = float(np.nanmax(exposure.image.array/np.sqrt(exposure.variance.array))) - adaptiveDetThreshold = min(maxSn, 5.0) - else: - adaptiveDetThreshold = initialThreshold - adaptiveDetectionConfig = SourceDetectionConfig() - adaptiveDetectionConfig.thresholdValue = adaptiveDetThreshold - adaptiveDetectionConfig.includeThresholdMultiplier = initialThresholdMultiplier - adaptiveDetectionConfig.reEstimateBackground = False - adaptiveDetectionConfig.doTempWideBackground = True - adaptiveDetectionConfig.tempWideBackground.binSize = 512 - adaptiveDetectionConfig.thresholdPolarity = "both" - self.log.info("Using adaptive detection with thresholdValue = %.2f and multiplier = %.1f", - adaptiveDetectionConfig.thresholdValue, - adaptiveDetectionConfig.includeThresholdMultiplier) - adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) + adaptiveDetectionConfig = self.config.baseline.copy() + adaptiveDetectionConfig.thresholdPolarity = "both" + self.log.info("Using adaptive detection with thresholdValue = %.2f and multiplier = %.1f", + adaptiveDetectionConfig.thresholdValue, + adaptiveDetectionConfig.includeThresholdMultiplier) + adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) maxNumNegFactor = 1.0 while inAdaptiveDetection: inAdaptiveDetection = False nAdaptiveDetIter += 1 - detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True) + detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True, **kwargs) sourceCat = detRes.sources nFootprint = len(sourceCat) nPeak = 0 @@ -338,10 +321,8 @@ def run(self, table, exposure, initialThreshold=None, initialThresholdMultiplier self.log.info("Perfomring final round of detection with threshold %.2f and multiplier %.1f", adaptiveDetectionConfig.thresholdValue, adaptiveDetectionConfig.includeThresholdMultiplier) - detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True, - backgroundToPhotometricRatio=None) - return Struct( - detections=detRes, - thresholdValue=adaptiveDetectionConfig.thresholdValue, - includeThresholdMultiplier=adaptiveDetectionConfig.includeThresholdMultiplier, - ) + detections = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True, **kwargs) + detections.thresholdValue = adaptiveDetectionConfig.thresholdValue + detections.includeThresholdMultiplier = adaptiveDetectionConfig.includeThresholdMultiplier + return detections + diff --git a/tests/test_adaptiveThresholdDetection.py b/tests/test_adaptiveThresholdDetection.py index 9812ba752..69c2e53f1 100644 --- a/tests/test_adaptiveThresholdDetection.py +++ b/tests/test_adaptiveThresholdDetection.py @@ -44,10 +44,14 @@ def tearDown(self): del self.config def check(self, expectFactor, initialThreshold=None): + if initialThreshold is None: + maxSn = float(np.nanmax(self.exposure.image.array/np.sqrt(self.exposure.variance.array))) + initialThreshold = min(maxSn, 5.0) schema = SourceTable.makeMinimalSchema() table = SourceTable.make(schema) + self.config.baseline.thresholdValue = initialThreshold task = AdaptiveThresholdDetectionTask(config=self.config) - results = task.run(table, self.exposure, initialThreshold=initialThreshold) + results = task.run(table, self.exposure) self.assertFloatsAlmostEqual(results.thresholdValue, expectFactor, rtol=self.rtol) def testUncrowded(self): From be4be725cdeb2dffd311b4a719cb52bd22e62462 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Sun, 2 Nov 2025 10:58:45 -0500 Subject: [PATCH 15/17] Simplify adaptive threshold iteration tracking. This should not change the behavior at all. --- .../meas/algorithms/adaptive_thresholds.py | 20 ++++--------------- 1 file changed, 4 insertions(+), 16 deletions(-) diff --git a/python/lsst/meas/algorithms/adaptive_thresholds.py b/python/lsst/meas/algorithms/adaptive_thresholds.py index 94070528d..2454aee71 100644 --- a/python/lsst/meas/algorithms/adaptive_thresholds.py +++ b/python/lsst/meas/algorithms/adaptive_thresholds.py @@ -178,8 +178,6 @@ def run(self, table, exposure, **kwargs): maxNumPeak = self.config.maxNumPeakPerBand["fallback"] # Set up and configure the adaptive detection task on first iteration. - inAdaptiveDetection = True - nAdaptiveDetIter = 0 thresholdFactor = 1.0 adaptiveDetectionConfig = self.config.baseline.copy() adaptiveDetectionConfig.thresholdPolarity = "both" @@ -189,9 +187,8 @@ def run(self, table, exposure, **kwargs): adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) maxNumNegFactor = 1.0 - while inAdaptiveDetection: - inAdaptiveDetection = False - nAdaptiveDetIter += 1 + # We use a 1-indexed iteration variable just to make logs intuitive. + for nAdaptiveDetIter in range(1, self.config.maxAdaptiveDetIter + 1): detRes = adaptiveDetectionTask.run(table=table, exposure=exposure, doSmooth=True, **kwargs) sourceCat = detRes.sources nFootprint = len(sourceCat) @@ -231,8 +228,7 @@ def run(self, table, exposure, **kwargs): "at iter: %d.", nIsolated, self.config.sufficientIsolated, fractionIsolated, self.config.sufficientFractionIsolated, nAdaptiveDetIter) - inAdaptiveDetection = False - continue + break if nFootprint == 0 or nPosPeak == 0: thresholdFactor = 0.25 @@ -243,7 +239,6 @@ def run(self, table, exposure, **kwargs): adaptiveDetectionConfig.thresholdValue = ( thresholdFactor*adaptiveDetectionConfig.thresholdValue) adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - inAdaptiveDetection = False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True continue if ((nPeak/nFootprint > self.config.maxPeakToFootRatio and nIsolated < self.config.minIsolated) @@ -265,7 +260,6 @@ def run(self, table, exposure, **kwargs): adaptiveDetectionConfig.thresholdValue = ( thresholdFactor*adaptiveDetectionConfig.thresholdValue) adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - inAdaptiveDetection = False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True continue if (nPeak > maxNumPeak or nPeakPerSrcMax > maxNumPeakPerSrcMax @@ -292,9 +286,6 @@ def run(self, table, exposure, **kwargs): adaptiveDetectionConfig.thresholdValue, adaptiveDetectionConfig.includeThresholdMultiplier) adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - inAdaptiveDetection = ( - False if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter else True - ) continue if nFootprint <= self.config.minFootprint: @@ -310,11 +301,8 @@ def run(self, table, exposure, **kwargs): thresholdFactor*adaptiveDetectionConfig.thresholdValue ) adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) - inAdaptiveDetection = True else: - inAdaptiveDetection = False - if nAdaptiveDetIter >= self.config.maxAdaptiveDetIter: - inAdaptiveDetection = False + break # Final round of detection with positive polarity adaptiveDetectionConfig.thresholdPolarity = "positive" adaptiveDetectionTask = SourceDetectionTask(config=adaptiveDetectionConfig) From 1f35effdb00b3782daaed992dfe5a40c7434c6b3 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Mon, 3 Nov 2025 09:57:50 -0500 Subject: [PATCH 16/17] Move adaptive background subtraction to new task here. --- .../meas/algorithms/adaptive_thresholds.py | 265 +++++++++++++++++- 1 file changed, 264 insertions(+), 1 deletion(-) diff --git a/python/lsst/meas/algorithms/adaptive_thresholds.py b/python/lsst/meas/algorithms/adaptive_thresholds.py index 2454aee71..bd2d4b1bd 100644 --- a/python/lsst/meas/algorithms/adaptive_thresholds.py +++ b/python/lsst/meas/algorithms/adaptive_thresholds.py @@ -22,14 +22,22 @@ __all__ = [ "AdaptiveThresholdDetectionConfig", "AdaptiveThresholdDetectionTask", + "AdaptiveThresholdBackgroundConfig", + "AdaptiveThresholdBackgroundTask", ] +from contextlib import contextmanager + import numpy as np -from lsst.pex.config import Field, Config, ConfigField, DictField, FieldValidationError +from lsst.pex.config import Field, Config, ConfigField, DictField, FieldValidationError, ListField from lsst.pipe.base import Task +from lsst.afw.geom import SpanSet +from lsst.afw.image import Mask +from lsst.afw.math import BackgroundList from .detection import SourceDetectionConfig, SourceDetectionTask +from .subtractBackground import SubtractBackgroundConfig, SubtractBackgroundTask class AdaptiveThresholdDetectionConfig(Config): @@ -314,3 +322,258 @@ def run(self, table, exposure, **kwargs): detections.includeThresholdMultiplier = adaptiveDetectionConfig.includeThresholdMultiplier return detections + + +class AdaptiveThresholdBackgroundConfig(SubtractBackgroundConfig): + detectedFractionBadMaskPlanes = ListField( + "Mask planes to ignore when computing the detected fraction.", dtype=str, + default=["BAD", "EDGE", "NO_DATA"] + ) + minDetFracForFinalBg = Field( + "Minimum detected fraction for the final background.", + dtype=float, default=0.02 + ) + maxDetFracForFinalBg = Field( + "Maximum detected fraction for the final background.", + dtype=float, default=0.93 + ) + + +class AdaptiveThresholdBackgroundTask(SubtractBackgroundTask): + """A background subtraction task that does its own masking of detected + sources, using an adaptive scheme that iterates until bounds on the mask + fraction are satisfied. + + Notes + ----- + This task is only designed for use on detector images, as it is aware of + amplifier geometry (to deal with the fact that some amps have much higher + noise than others, and hence very different detected-mask fractions for the + same detection threshold. + """ + + ConfigClass = AdaptiveThresholdBackgroundConfig + _DETECTED_MASK_PLANES = ("DETECTED", "DETECTED_NEGATIVE") + + def run(self, exposure, background=None, stats=True, statsKeys=None, backgroundToPhotometricRatio=None): + # Restore the previously measured background and remeasure it + # using an adaptive threshold detection iteration to ensure a + # "Goldilocks Zone" for the fraction of detected pixels. + if not background: + background = BackgroundList() + median_background = 0.0 + else: + median_background = np.median(background.getImage().array) + self.log.warning("Original median_background = %.2f", median_background) + # TODO: apply backgroundToPhotometricRatio here! + exposure.image.array += background.getImage().array + + with self._restore_mask_when_done(exposure) as original_mask: + self._dilate_original_mask(exposure, original_mask) + self._set_adaptive_detection_mask(exposure, median_background) + # Do not pass the original background in, since we want to wholly + # replace it. + return super().run(exposure=exposure, stats=stats, statsKeys=statsKeys, + backgroundToPhotometricRatio=backgroundToPhotometricRatio) + + def _dilate_original_mask(self, exposure, original_mask): + nPixToDilate = 10 + detected_fraction_orig = self._compute_mask_fraction(exposure.mask) + # Dilate the current detected mask planes and don't clear + # them in the detection step. + inDilating = True + while inDilating: + dilatedMask = original_mask.clone() + for maskName in self._DETECTED_MASK_PLANES: + # Compute the grown detection mask plane using SpanSet + detectedMaskBit = dilatedMask.getPlaneBitMask(maskName) + detectedMaskSpanSet = SpanSet.fromMask(dilatedMask, detectedMaskBit) + detectedMaskSpanSet = detectedMaskSpanSet.dilated(nPixToDilate) + detectedMaskSpanSet = detectedMaskSpanSet.clippedTo(dilatedMask.getBBox()) + # Clear the detected mask plane + detectedMask = dilatedMask.getMaskPlane(maskName) + dilatedMask.clearMaskPlane(detectedMask) + # Set the mask plane to the dilated one + detectedMaskSpanSet.setMask(dilatedMask, detectedMaskBit) + + detected_fraction_dilated = self._compute_mask_fraction(dilatedMask) + if detected_fraction_dilated < self.config.maxDetFracForFinalBg or nPixToDilate == 1: + inDilating = False + else: + nPixToDilate -= 1 + exposure.mask = dilatedMask + self.log.warning("detected_fraction_orig = %.3f detected_fraction_dilated = %.3f", + detected_fraction_orig, detected_fraction_dilated) + n_above_max_per_amp = -99 + highest_detected_fraction_per_amp = float("nan") + doCheckPerAmpDetFraction = True + if doCheckPerAmpDetFraction: # detected_fraction < maxDetFracForFinalBg: + n_above_max_per_amp, highest_detected_fraction_per_amp, no_zero_det_amps = \ + self._compute_per_amp_fraction(exposure, detected_fraction_dilated) + self.log.warning("Dilated mask: n_above_max_per_amp = %d, " + "highest_detected_fraction_per_amp = %.3f", + n_above_max_per_amp, highest_detected_fraction_per_amp) + + def _set_adaptive_detection_mask(self, exposure, median_background): + inBackgroundDet = True + detected_fraction = 1.0 + maxIter = 40 + nIter = 0 + nFootprintTemp = 1e12 + starBackgroundDetectionConfig = SourceDetectionConfig() + starBackgroundDetectionConfig.doTempLocalBackground = False + starBackgroundDetectionConfig.nSigmaToGrow = 70.0 + starBackgroundDetectionConfig.reEstimateBackground = False + starBackgroundDetectionConfig.includeThresholdMultiplier = 1.0 + starBackgroundDetectionConfig.thresholdValue = max(2.0, 0.2*median_background) + starBackgroundDetectionConfig.thresholdType = "pixel_stdev" # "stdev" + + n_above_max_per_amp = -99 + highest_detected_fraction_per_amp = float("nan") + doCheckPerAmpDetFraction = True + + while inBackgroundDet: + currentThresh = starBackgroundDetectionConfig.thresholdValue + if detected_fraction > self.config.maxDetFracForFinalBg: + starBackgroundDetectionConfig.thresholdValue = 1.07*currentThresh + if nFootprintTemp < 3 and detected_fraction > 0.9*self.config.maxDetFracForFinalBg: + starBackgroundDetectionConfig.thresholdValue = 1.2*currentThresh + if n_above_max_per_amp > 1: + starBackgroundDetectionConfig.thresholdValue = 1.1*currentThresh + if detected_fraction < self.config.minDetFracForFinalBg: + starBackgroundDetectionConfig.thresholdValue = 0.8*currentThresh + starBackgroundDetectionTask = SourceDetectionTask( + config=starBackgroundDetectionConfig) + tempDetections = starBackgroundDetectionTask.detectFootprints( + exposure=exposure, clearMask=True) + exposure.mask |= dilatedMask + nFootprintTemp = ( + (len(tempDetections.positive.getFootprints()) if tempDetections is not None else 0) + + (len(tempDetections.negative.getFootprints()) if tempDetections.negative is not None else 0) + ) + detected_fraction = self._compute_mask_fraction(exposure.mask) + self.log.info("nIter = %d, thresh = %.2f: Fraction of pixels marked as DETECTED or " + "DETECTED_NEGATIVE in star_background_detection = %.3f " + "(max is %.3f; min is %.3f)", + nIter, starBackgroundDetectionConfig.thresholdValue, + detected_fraction, self.config.maxDetFracForFinalBg, self.config.minDetFracForFinalBg) + + n_amp = len(exposure.detector.getAmplifiers()) + if doCheckPerAmpDetFraction: # detected_fraction < maxDetFracForFinalBg: + n_above_max_per_amp, highest_detected_fraction_per_amp, no_zero_det_amps = \ + self._compute_per_amp_fraction(exposure, detected_fraction) + + if not no_zero_det_amps: + starBackgroundDetectionConfig.thresholdValue = 0.95*currentThresh + nIter += 1 + if nIter > maxIter: + inBackgroundDet = False + + if (detected_fraction < self.config.maxDetFracForFinalBg and detected_fraction > self.config.minDetFracForFinalBg + and n_above_max_per_amp < int(0.75*n_amp) + and no_zero_det_amps): + if (n_above_max_per_amp < max(1, int(0.15*n_amp)) + or detected_fraction < 0.85*self.config.maxDetFracForFinalBg): + inBackgroundDet = False + else: + self.log.warning("Making small tweak....") + starBackgroundDetectionConfig.thresholdValue = 1.05*currentThresh + self.log.warning("n_above_max_per_amp = %d (abs max is %d)", n_above_max_per_amp, int(0.75*n_amp)) + + self.log.info("Fraction of pixels marked as DETECTED or DETECTED_NEGATIVE is now %.5f " + "(highest per amp section = %.5f)", + detected_fraction, highest_detected_fraction_per_amp) + + if detected_fraction > self.config.maxDetFracForFinalBg: + exposure.mask = dilatedMask + self.log.warning("Final fraction of pixels marked as DETECTED or DETECTED_NEGATIVE " + "was too large in star_background_detection = %.3f (max = %.3f). " + "Reverting to dilated mask from PSF detection...", + detected_fraction, self.config.maxDetFracForFinalBg) + + def _compute_mask_fraction(self, mask): + """Evaluate the fraction of masked pixels in a (set of) mask plane(s). + + Parameters + ---------- + mask : `lsst.afw.image.Mask` + The mask on which to evaluate the fraction. + + Returns + ------- + detected_fraction : `float` + The calculated fraction of masked pixels + """ + bad_pixel_mask = Mask.getPlaneBitMask(self.config.detectedFractionBadMaskPlanes) + n_good_pix = np.sum(mask.array & bad_pixel_mask == 0) + if n_good_pix == 0: + detected_fraction = float("nan") + return detected_fraction + detected_pixel_mask = Mask.getPlaneBitMask(self._DETECTED_MASK_PLANES) + n_detected_pix = np.sum((mask.array & detected_pixel_mask != 0) + & (mask.array & bad_pixel_mask == 0)) + detected_fraction = n_detected_pix/n_good_pix + return detected_fraction + + def _compute_per_amp_fraction(self, exposure, detected_fraction): + """Evaluate the maximum per-amplifier fraction of masked pixels. + + Parameters + ---------- + exposure : `lsst.afw.image.ExposureF` + The exposure on which to compute the per-amp masked fraction. + detected_fraction : `float` + The current detected_fraction of the detected mask planes for the + full detector. + + Returns + ------- + n_above_max_per_amp : `int` + The number of amplifiers with masked fractions above a maximum + value (set by the current full-detector ``detected_fraction``). + highest_detected_fraction_per_amp : `float` + The highest value of the per-amplifier fraction of masked pixels. + no_zero_det_amps : `bool` + A boolean representing whether any of the amplifiers has zero + masked pixels. + """ + highest_detected_fraction_per_amp = -9.99 + n_above_max_per_amp = 0 + n_no_zero_det_amps = 0 + no_zero_det_amps = True + amps = exposure.detector.getAmplifiers() + if amps is not None: + for ia, amp in enumerate(amps): + amp_bbox = amp.getBBox() + exp_bbox = exposure.getBBox() + if not exp_bbox.contains(amp_bbox): + self.log.info("Bounding box of amplifier (%s) does not fit in exposure's " + "bounding box (%s). Skipping...", amp_bbox, exp_bbox) + continue + sub_image = exposure.subset(amp.getBBox()) + detected_fraction_amp = self._compute_mask_fraction(sub_image.mask) + self.log.debug("Current detected fraction for amplifier %s = %.3f", + amp.getName(), detected_fraction_amp) + if detected_fraction_amp < 0.002: + n_no_zero_det_amps += 1 + if n_no_zero_det_amps > 2: + no_zero_det_amps = False + break + highest_detected_fraction_per_amp = max(detected_fraction_amp, + highest_detected_fraction_per_amp) + if highest_detected_fraction_per_amp > min(0.998, max(0.8, 3.0*detected_fraction)): + n_above_max_per_amp += 1 + if n_above_max_per_amp > 2: + break + else: + self.log.info("No amplifier object for detector %d, so skipping per-amp " + "detection fraction checks.", exposure.detector.getId()) + return n_above_max_per_amp, highest_detected_fraction_per_amp, no_zero_det_amps + + @contextmanager + def _restore_mask_when_done(self, exposure): + original_mask = exposure.mask.clone() + try: + yield original_mask + finally: + exposure.mask = original_mask From 7f241a66f2623cfff867bdaadb054a9aabf963c1 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Mon, 3 Nov 2025 11:01:30 -0500 Subject: [PATCH 17/17] Cleanups for the new adaptive background task. - Replace "are we looping" variables with 'break' and 'for x in range'. - Use snake_case consistently for internal variables, but keep camelCase for configs, for consistency with SubtractBackgroundTask configs. - Move some magic numbers (not all) to config fields. --- .../meas/algorithms/adaptive_thresholds.py | 131 ++++++++++-------- 1 file changed, 70 insertions(+), 61 deletions(-) diff --git a/python/lsst/meas/algorithms/adaptive_thresholds.py b/python/lsst/meas/algorithms/adaptive_thresholds.py index bd2d4b1bd..06a8d069f 100644 --- a/python/lsst/meas/algorithms/adaptive_thresholds.py +++ b/python/lsst/meas/algorithms/adaptive_thresholds.py @@ -30,7 +30,7 @@ import numpy as np -from lsst.pex.config import Field, Config, ConfigField, DictField, FieldValidationError, ListField +from lsst.pex.config import Field, Config, ConfigField, DictField, FieldValidationError, ListField, RangeField from lsst.pipe.base import Task from lsst.afw.geom import SpanSet @@ -327,17 +327,42 @@ def run(self, table, exposure, **kwargs): class AdaptiveThresholdBackgroundConfig(SubtractBackgroundConfig): detectedFractionBadMaskPlanes = ListField( "Mask planes to ignore when computing the detected fraction.", dtype=str, - default=["BAD", "EDGE", "NO_DATA"] + default=["BAD", "EDGE", "NO_DATA"], ) minDetFracForFinalBg = Field( "Minimum detected fraction for the final background.", - dtype=float, default=0.02 + dtype=float, default=0.02, ) maxDetFracForFinalBg = Field( "Maximum detected fraction for the final background.", - dtype=float, default=0.93 + dtype=float, default=0.93, + ) + initialDilateRadius = RangeField( + "Number of pixels to dilate the original mask by.", + dtype=int, default=10, min=1, + ) + doCheckPerAmpDetectionFraction = Field( + "Whether to check per-amplifier detection fractions.", + dtype=bool, default=True, + ) + maxDetectionIterationCount = Field( + "Maximum number of adaptive detection iterations.", + dtype=int, default=39, # Original code had 40, but iterated from [0, n] inclusive. + ) + detection = ConfigField( + "Baseline configuration for SourceDetectionTask prior to iteration.", + SourceDetectionConfig, ) + def setDefaults(self): + super().setDefaults() + self.detection.doTempLocalBackground = False + self.detection.nSigmaToGrow = 70.0 + self.detection.reEstimateBackground = False + self.detection.includeThresholdMultiplier = 1.0 + self.detection.thresholdValue = 2.0 # Actually used only as a floor. + self.detection.thresholdType = "pixel_stdev" + class AdaptiveThresholdBackgroundTask(SubtractBackgroundTask): """A background subtraction task that does its own masking of detected @@ -369,85 +394,72 @@ def run(self, exposure, background=None, stats=True, statsKeys=None, backgroundT exposure.image.array += background.getImage().array with self._restore_mask_when_done(exposure) as original_mask: - self._dilate_original_mask(exposure, original_mask) - self._set_adaptive_detection_mask(exposure, median_background) + dilated_mask = self._dilate_original_mask(exposure, original_mask) + self._set_adaptive_detection_mask(exposure, median_background, dilated_mask) # Do not pass the original background in, since we want to wholly # replace it. return super().run(exposure=exposure, stats=stats, statsKeys=statsKeys, backgroundToPhotometricRatio=backgroundToPhotometricRatio) def _dilate_original_mask(self, exposure, original_mask): - nPixToDilate = 10 detected_fraction_orig = self._compute_mask_fraction(exposure.mask) # Dilate the current detected mask planes and don't clear # them in the detection step. - inDilating = True - while inDilating: - dilatedMask = original_mask.clone() - for maskName in self._DETECTED_MASK_PLANES: + for n_pix_to_dilate in range(self.config.initialDilateRadius, 0, -1): + dilated_mask = original_mask.clone() + for mask_name in self._DETECTED_MASK_PLANES: # Compute the grown detection mask plane using SpanSet - detectedMaskBit = dilatedMask.getPlaneBitMask(maskName) - detectedMaskSpanSet = SpanSet.fromMask(dilatedMask, detectedMaskBit) - detectedMaskSpanSet = detectedMaskSpanSet.dilated(nPixToDilate) - detectedMaskSpanSet = detectedMaskSpanSet.clippedTo(dilatedMask.getBBox()) + detected_mask_bit = dilated_mask.getPlaneBitMask(mask_name) + detected_mask_span_set = SpanSet.fromMask(dilated_mask, detected_mask_bit) + detected_mask_span_set = detected_mask_span_set.dilated(n_pix_to_dilate) + detected_mask_span_set = detected_mask_span_set.clippedTo(dilated_mask.getBBox()) # Clear the detected mask plane - detectedMask = dilatedMask.getMaskPlane(maskName) - dilatedMask.clearMaskPlane(detectedMask) + detectedMask = dilated_mask.getMaskPlane(mask_name) + dilated_mask.clearMaskPlane(detectedMask) # Set the mask plane to the dilated one - detectedMaskSpanSet.setMask(dilatedMask, detectedMaskBit) + detected_mask_span_set.setMask(dilated_mask, detected_mask_bit) - detected_fraction_dilated = self._compute_mask_fraction(dilatedMask) - if detected_fraction_dilated < self.config.maxDetFracForFinalBg or nPixToDilate == 1: - inDilating = False - else: - nPixToDilate -= 1 - exposure.mask = dilatedMask + detected_fraction_dilated = self._compute_mask_fraction(dilated_mask) + if detected_fraction_dilated < self.config.maxDetFracForFinalBg or n_pix_to_dilate == 1: + break + exposure.mask = dilated_mask self.log.warning("detected_fraction_orig = %.3f detected_fraction_dilated = %.3f", detected_fraction_orig, detected_fraction_dilated) n_above_max_per_amp = -99 highest_detected_fraction_per_amp = float("nan") - doCheckPerAmpDetFraction = True - if doCheckPerAmpDetFraction: # detected_fraction < maxDetFracForFinalBg: + if self.config.doCheckPerAmpDetectionFraction: n_above_max_per_amp, highest_detected_fraction_per_amp, no_zero_det_amps = \ self._compute_per_amp_fraction(exposure, detected_fraction_dilated) self.log.warning("Dilated mask: n_above_max_per_amp = %d, " "highest_detected_fraction_per_amp = %.3f", n_above_max_per_amp, highest_detected_fraction_per_amp) + return dilated_mask - def _set_adaptive_detection_mask(self, exposure, median_background): - inBackgroundDet = True + def _set_adaptive_detection_mask(self, exposure, median_background, dilated_mask): detected_fraction = 1.0 - maxIter = 40 - nIter = 0 - nFootprintTemp = 1e12 - starBackgroundDetectionConfig = SourceDetectionConfig() - starBackgroundDetectionConfig.doTempLocalBackground = False - starBackgroundDetectionConfig.nSigmaToGrow = 70.0 - starBackgroundDetectionConfig.reEstimateBackground = False - starBackgroundDetectionConfig.includeThresholdMultiplier = 1.0 - starBackgroundDetectionConfig.thresholdValue = max(2.0, 0.2*median_background) - starBackgroundDetectionConfig.thresholdType = "pixel_stdev" # "stdev" + n_footprint_tmp = 1e12 + detection_config = self.config.detection.copy() + detection_config.thresholdValue = max(detection_config.thresholdValue, 0.2*median_background) + detection_config.thresholdType = "pixel_stdev" # "stdev" + no_zero_det_amps = 0 n_above_max_per_amp = -99 highest_detected_fraction_per_amp = float("nan") - doCheckPerAmpDetFraction = True - while inBackgroundDet: - currentThresh = starBackgroundDetectionConfig.thresholdValue + for n_iter in range(0, self.config.maxDetectionIterationCount): + current_threshold = detection_config.thresholdValue if detected_fraction > self.config.maxDetFracForFinalBg: - starBackgroundDetectionConfig.thresholdValue = 1.07*currentThresh - if nFootprintTemp < 3 and detected_fraction > 0.9*self.config.maxDetFracForFinalBg: - starBackgroundDetectionConfig.thresholdValue = 1.2*currentThresh + detection_config.thresholdValue = 1.07*current_threshold + if n_footprint_tmp < 3 and detected_fraction > 0.9*self.config.maxDetFracForFinalBg: + detection_config.thresholdValue = 1.2*current_threshold if n_above_max_per_amp > 1: - starBackgroundDetectionConfig.thresholdValue = 1.1*currentThresh + detection_config.thresholdValue = 1.1*current_threshold if detected_fraction < self.config.minDetFracForFinalBg: - starBackgroundDetectionConfig.thresholdValue = 0.8*currentThresh - starBackgroundDetectionTask = SourceDetectionTask( - config=starBackgroundDetectionConfig) - tempDetections = starBackgroundDetectionTask.detectFootprints( - exposure=exposure, clearMask=True) - exposure.mask |= dilatedMask - nFootprintTemp = ( + detection_config.thresholdValue = 0.8*current_threshold + detection_task = SourceDetectionTask(config=detection_config) + tempDetections = detection_task.detectFootprints(exposure=exposure, clearMask=True) + exposure.mask |= dilated_mask + n_footprint_tmp = ( (len(tempDetections.positive.getFootprints()) if tempDetections is not None else 0) + (len(tempDetections.negative.getFootprints()) if tempDetections.negative is not None else 0) ) @@ -455,29 +467,26 @@ def _set_adaptive_detection_mask(self, exposure, median_background): self.log.info("nIter = %d, thresh = %.2f: Fraction of pixels marked as DETECTED or " "DETECTED_NEGATIVE in star_background_detection = %.3f " "(max is %.3f; min is %.3f)", - nIter, starBackgroundDetectionConfig.thresholdValue, + n_iter, detection_config.thresholdValue, detected_fraction, self.config.maxDetFracForFinalBg, self.config.minDetFracForFinalBg) n_amp = len(exposure.detector.getAmplifiers()) - if doCheckPerAmpDetFraction: # detected_fraction < maxDetFracForFinalBg: + if self.config.doCheckPerAmpDetectionFraction: n_above_max_per_amp, highest_detected_fraction_per_amp, no_zero_det_amps = \ self._compute_per_amp_fraction(exposure, detected_fraction) if not no_zero_det_amps: - starBackgroundDetectionConfig.thresholdValue = 0.95*currentThresh - nIter += 1 - if nIter > maxIter: - inBackgroundDet = False + detection_config.thresholdValue = 0.95*current_threshold if (detected_fraction < self.config.maxDetFracForFinalBg and detected_fraction > self.config.minDetFracForFinalBg and n_above_max_per_amp < int(0.75*n_amp) and no_zero_det_amps): if (n_above_max_per_amp < max(1, int(0.15*n_amp)) or detected_fraction < 0.85*self.config.maxDetFracForFinalBg): - inBackgroundDet = False + break else: self.log.warning("Making small tweak....") - starBackgroundDetectionConfig.thresholdValue = 1.05*currentThresh + detection_config.thresholdValue = 1.05*current_threshold self.log.warning("n_above_max_per_amp = %d (abs max is %d)", n_above_max_per_amp, int(0.75*n_amp)) self.log.info("Fraction of pixels marked as DETECTED or DETECTED_NEGATIVE is now %.5f " @@ -485,7 +494,7 @@ def _set_adaptive_detection_mask(self, exposure, median_background): detected_fraction, highest_detected_fraction_per_amp) if detected_fraction > self.config.maxDetFracForFinalBg: - exposure.mask = dilatedMask + exposure.mask = dilated_mask self.log.warning("Final fraction of pixels marked as DETECTED or DETECTED_NEGATIVE " "was too large in star_background_detection = %.3f (max = %.3f). " "Reverting to dilated mask from PSF detection...",