diff --git a/news/add-convergence-flag.rst b/news/add-convergence-flag.rst new file mode 100644 index 0000000..e4776b7 --- /dev/null +++ b/news/add-convergence-flag.rst @@ -0,0 +1,25 @@ +**Added:** + +* SNMFOptimizer.converged_ attribute to indicate whether the optimization + successfully reached the convergence tolerance (True) or stopped because the + maximum number of iterations was reached (False). + +**Changed:** + +* + +**Deprecated:** + +* + +**Removed:** + +* + +**Fixed:** + +* + +**Security:** + +* diff --git a/src/diffpy/stretched_nmf/snmf_class.py b/src/diffpy/stretched_nmf/snmf_class.py index d5f6603..35692b9 100644 --- a/src/diffpy/stretched_nmf/snmf_class.py +++ b/src/diffpy/stretched_nmf/snmf_class.py @@ -210,6 +210,7 @@ def fit(self, rho=0, eta=0, reset=True): the output of the previous fit() as their input. """ + self.converged_ = False if reset: self.components_ = self.init_components.copy() @@ -251,11 +252,12 @@ def fit(self, rho=0, eta=0, reset=True): sparsity_term = self.eta * np.sum( np.sqrt(self.components_) ) # Square root penalty + obj_diff = ( + self.objective_function - regularization_term - sparsity_term + ) print( f"Start, Objective function: {self.objective_function:.5e}" - f", Obj - reg/sparse: {self.objective_function - - regularization_term - - sparsity_term:.5e}" + f", Obj - reg/sparse: {obj_diff:.5e}" ) # Main optimization loop @@ -274,11 +276,12 @@ def fit(self, rho=0, eta=0, reset=True): sparsity_term = self.eta * np.sum( np.sqrt(self.components_) ) # Square root penalty + obj_diff = ( + self.objective_function - regularization_term - sparsity_term + ) print( f"Obj fun: {self.objective_function:.5e}, " - f"Obj - reg/sparse: {self.objective_function - - regularization_term - - sparsity_term:.5e}, " + f", Obj - reg/sparse: {obj_diff:.5e}" f"Iter: {self.outiter}" ) @@ -294,6 +297,7 @@ def fit(self, rho=0, eta=0, reset=True): self.objective_difference < self.objective_function * self.tol and outiter >= self.min_iter ): + self.converged_ = True break self.normalize_results()