diff --git a/docs/explanations/model_diagnostics/degeneracy_hunter.rst b/docs/explanations/model_diagnostics/degeneracy_hunter.rst new file mode 100644 index 0000000000..5af66b7959 --- /dev/null +++ b/docs/explanations/model_diagnostics/degeneracy_hunter.rst @@ -0,0 +1,16 @@ +Degeneracy Hunter +================= + +.. contents:: + :depth: 3 + :local: + +Examples +-------- + +An example of using ``Degeneracy Hunter`` can be found here: https://idaes-examples.readthedocs.io/en/latest/docs/diagnostics/degeneracy_hunter_doc.html + +Introduction +------------ + +TBA diff --git a/docs/explanations/model_diagnostics/index.rst b/docs/explanations/model_diagnostics/index.rst index 0b07cfe24f..d3c3a14e2a 100644 --- a/docs/explanations/model_diagnostics/index.rst +++ b/docs/explanations/model_diagnostics/index.rst @@ -5,6 +5,12 @@ Model Diagnostics Workflow :depth: 3 :local: +.. toctree:: + :maxdepth: 1 + + svd_analysis + degeneracy_hunter + Introduction ------------ diff --git a/docs/explanations/model_diagnostics/svd_analysis.rst b/docs/explanations/model_diagnostics/svd_analysis.rst new file mode 100644 index 0000000000..8d5aefd59d --- /dev/null +++ b/docs/explanations/model_diagnostics/svd_analysis.rst @@ -0,0 +1,8 @@ +SVD Analysis +============ + +.. contents:: + :depth: 3 + :local: + +TBA diff --git a/docs/reference_guides/core/util/diagnostics/degeneracy_hunter.rst b/docs/reference_guides/core/util/diagnostics/degeneracy_hunter.rst index c5ecf06d13..a0f294d376 100644 --- a/docs/reference_guides/core/util/diagnostics/degeneracy_hunter.rst +++ b/docs/reference_guides/core/util/diagnostics/degeneracy_hunter.rst @@ -1,12 +1,6 @@ Degeneracy Hunter ================= -.. note:: - - v2.2: The Degeneracy Hunter tool is being deprecated in favor of the newer Diagnostics Toolbox. - - Over the next few releases the functionality of Degeneracy Hunter will be moved over to the new Diagnostics Toolbox which will also contain a number of other tools for diagnosing model issues. - -.. autoclass:: idaes.core.util.model_diagnostics.DegeneracyHunter +.. autoclass:: idaes.core.util.model_diagnostics.DegeneracyHunter2 :members: diff --git a/docs/reference_guides/core/util/diagnostics/degeneracy_hunter_legacy.rst b/docs/reference_guides/core/util/diagnostics/degeneracy_hunter_legacy.rst new file mode 100644 index 0000000000..83b2101e2f --- /dev/null +++ b/docs/reference_guides/core/util/diagnostics/degeneracy_hunter_legacy.rst @@ -0,0 +1,12 @@ +Degeneracy Hunter (Legacy) +========================== + +.. note:: + + v2.2: The original Degeneracy Hunter tool is being deprecated in favor of the newer Diagnostics Toolbox. + + Over the next few releases the functionality of Degeneracy Hunter will be moved over to the new Diagnostics Toolbox which will also contain a number of other tools for diagnosing model issues. + +.. autoclass:: idaes.core.util.model_diagnostics.DegeneracyHunter + :members: + diff --git a/docs/reference_guides/core/util/diagnostics/svd_toolbox.rst b/docs/reference_guides/core/util/diagnostics/svd_toolbox.rst new file mode 100644 index 0000000000..bdfc5261cf --- /dev/null +++ b/docs/reference_guides/core/util/diagnostics/svd_toolbox.rst @@ -0,0 +1,16 @@ +SVD Toolbox +=========== + +The IDAES SVD Toolbox is an advanced diagnostics tool for helping to identify scaling and degeneracy issues in models using Singular Value Decomposition and is accessible through the ``DiagnosticsToolbox``. Further discussion of how to use and interpret SVD results can be found :ref:`here`. + +.. autoclass:: idaes.core.util.model_diagnostics.SVDToolbox + :members: + +SVD Callbacks +------------- + +The SVD Toolbox supports callbacks to select the SVD analysis tool to use. Two callbacks are provided to make use of methods available in Scipy. + +.. automodule:: idaes.core.util.model_diagnostics + :noindex: + :members: svd_dense, svd_sparse diff --git a/docs/reference_guides/core/util/model_diagnostics.rst b/docs/reference_guides/core/util/model_diagnostics.rst index 2ede432576..02e35aa5ec 100644 --- a/docs/reference_guides/core/util/model_diagnostics.rst +++ b/docs/reference_guides/core/util/model_diagnostics.rst @@ -7,12 +7,14 @@ The IDAES toolset contains a number of utility functions which can be useful for :maxdepth: 1 diagnostics/diagnostics_toolbox + diagnostics/svd_toolbox diagnostics/degeneracy_hunter + diagnostics/degeneracy_hunter_legacy Other Methods ^^^^^^^^^^^^^ .. automodule:: idaes.core.util.model_diagnostics - :exclude-members: DegeneracyHunter, DiagnosticsToolbox + :exclude-members: DegeneracyHunter, DiagnosticsToolbox, SVDToolbox, DegeneracyHunter2, svd_dense, svd_sparse :members: diff --git a/idaes/core/util/model_diagnostics.py b/idaes/core/util/model_diagnostics.py index 92127f7c2e..6400d219a3 100644 --- a/idaes/core/util/model_diagnostics.py +++ b/idaes/core/util/model_diagnostics.py @@ -18,7 +18,8 @@ __author__ = "Alexander Dowling, Douglas Allan, Andrew Lee" from operator import itemgetter -from sys import stdout +import sys +from inspect import signature from math import log import numpy as np @@ -40,14 +41,22 @@ Var, ) from pyomo.core.base.block import _BlockData +from pyomo.core.base.var import _VarData +from pyomo.core.base.constraint import _ConstraintData from pyomo.common.collections import ComponentSet -from pyomo.common.config import ConfigDict, ConfigValue, document_kwargs_from_configdict +from pyomo.common.config import ( + ConfigDict, + ConfigValue, + document_kwargs_from_configdict, + PositiveInt, +) from pyomo.util.check_units import identify_inconsistent_units from pyomo.contrib.incidence_analysis import IncidenceGraphInterface from pyomo.core.expr.visitor import identify_variables from pyomo.contrib.pynumero.interfaces.pyomo_nlp import PyomoNLP from pyomo.contrib.pynumero.asl import AmplInterface from pyomo.common.deprecation import deprecation_warning +from pyomo.common.errors import PyomoException from idaes.core.util.model_statistics import ( activated_blocks_set, @@ -79,8 +88,75 @@ MAX_STR_LENGTH = 84 TAB = " " * 4 +# Constants for Degeneracy Hunter +YTOL = 0.9 +MMULT = 0.99 + # TODO: Add suggested steps to cautions - how? + +def svd_callback_validator(val): + """Domain validator for SVD callbacks + + Args: + val : value to be checked + + Returns: + TypeError if val is not a valid callback + """ + if callable(val): + sig = signature(val) + if len(sig.parameters) >= 2: + return val + + _log.error( + f"SVD callback {val} must be a callable which takes at least two arguments." + ) + raise ValueError( + "SVD callback must be a callable which takes at least two arguments." + ) + + +def svd_dense(jacobian, number_singular_values): + """ + Callback for performing SVD analysis using scipy.linalg.svd + + Args: + jacobian: Jacobian to be analysed + number_singular_values: number of singular values to compute + + Returns: + u, s and v numpy arrays + + """ + u, s, vT = svd(jacobian.todense(), full_matrices=False) + # Reorder singular values and vectors so that the singular + # values are from least to greatest + u = np.flip(u[:, -number_singular_values:], axis=1) + s = np.flip(s[-number_singular_values:], axis=0) + vT = np.flip(vT[-number_singular_values:, :], axis=0) + + return u, s, vT.transpose() + + +def svd_sparse(jacobian, number_singular_values): + """ + Callback for performing SVD analysis using scipy.sparse.linalg.svds + + Args: + jacobian: Jacobian to be analysed + number_singular_values: number of singular values to compute + + Returns: + u, s and v numpy arrays + + """ + u, s, vT = svds(jacobian, k=number_singular_values, which="SM") + + print(u, s, vT, number_singular_values) + return u, s, vT.transpose() + + CONFIG = ConfigDict() CONFIG.declare( "variable_bounds_absolute_tolerance", @@ -177,6 +253,95 @@ ) +SVDCONFIG = ConfigDict() +SVDCONFIG.declare( + "number_of_smallest_singular_values", + ConfigValue( + domain=PositiveInt, + description="Number of smallest singular values to compute", + ), +) +SVDCONFIG.declare( + "svd_callback", + ConfigValue( + default=svd_dense, + domain=svd_callback_validator, + description="Callback to SVD method of choice (default = svd_dense)", + doc="Callback to SVD method of choice (default = svd_dense). " + "Callbacks should take the Jacobian and number of singular values " + "to compute as options, plus any method specific arguments, and should " + "return the u, s and v matrices as numpy arrays.", + ), +) +SVDCONFIG.declare( + "svd_callback_arguments", + ConfigValue( + default=None, + domain=dict, + description="Optional arguments to pass to SVD callback (default = None)", + ), +) +SVDCONFIG.declare( + "singular_value_tolerance", + ConfigValue( + default=1e-6, + domain=float, + description="Tolerance for defining a small singular value", + ), +) +SVDCONFIG.declare( + "size_cutoff_in_singular_vector", + ConfigValue( + default=0.1, + domain=float, + description="Size below which to ignore constraints and variables in " + "the singular vector", + ), +) + + +DHCONFIG = ConfigDict() +DHCONFIG.declare( + "solver", + ConfigValue( + default="scip", + domain=str, + description="MILP solver to use for finding irreducible degenerate sets.", + ), +) +DHCONFIG.declare( + "solver_options", + ConfigValue( + domain=None, + description="Options to pass to MILP solver.", + ), +) +DHCONFIG.declare( + "M", # TODO: Need better name + ConfigValue( + default=1e5, + domain=float, + description="Maximum value for nu in MILP models.", + ), +) +DHCONFIG.declare( + "m_small", # TODO: Need better name + ConfigValue( + default=1e-5, + domain=float, + description="Smallest value for nu to be considered non-zero in MILP models.", + ), +) +DHCONFIG.declare( + "trivial_constraint_tolerance", + ConfigValue( + default=1e-6, + domain=float, + description="Tolerance for identifying non-zero rows in Jacobian.", + ), +) + + @document_kwargs_from_configdict(CONFIG) class DiagnosticsToolbox: """ @@ -241,7 +406,7 @@ def model(self): """ return self._model - def display_external_variables(self, stream=stdout): + def display_external_variables(self, stream=None): """ Prints a list of variables that appear within activated Constraints in the model but are not contained within the model themselves. @@ -253,6 +418,9 @@ def display_external_variables(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + ext_vars = [] for v in variables_in_activated_constraints_set(self._model): if not _var_in_block(v, self._model): @@ -266,7 +434,7 @@ def display_external_variables(self, stream=stdout): footer="=", ) - def display_unused_variables(self, stream=stdout): + def display_unused_variables(self, stream=None): """ Prints a list of variables that do not appear in any activated Constraints. @@ -277,6 +445,9 @@ def display_unused_variables(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _write_report_section( stream=stream, lines_list=variables_not_in_activated_constraints_set(self._model), @@ -285,7 +456,7 @@ def display_unused_variables(self, stream=stdout): footer="=", ) - def display_variables_fixed_to_zero(self, stream=stdout): + def display_variables_fixed_to_zero(self, stream=None): """ Prints a list of variables that are fixed to an absolute value of 0. @@ -296,6 +467,9 @@ def display_variables_fixed_to_zero(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _write_report_section( stream=stream, lines_list=_vars_fixed_to_zero(self._model), @@ -304,7 +478,7 @@ def display_variables_fixed_to_zero(self, stream=stdout): footer="=", ) - def display_variables_at_or_outside_bounds(self, stream=stdout): + def display_variables_at_or_outside_bounds(self, stream=None): """ Prints a list of variables with values that fall at or outside the bounds on the variable. @@ -316,6 +490,9 @@ def display_variables_at_or_outside_bounds(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _write_report_section( stream=stream, lines_list=[ @@ -325,12 +502,13 @@ def display_variables_at_or_outside_bounds(self, stream=stdout): tolerance=self.config.variable_bounds_violation_tolerance, ) ], - title="The following variable(s) have values at or outside their bounds:", + title=f"The following variable(s) have values at or outside their bounds " + f"(tol={self.config.variable_bounds_violation_tolerance:.1E}):", header="=", footer="=", ) - def display_variables_with_none_value(self, stream=stdout): + def display_variables_with_none_value(self, stream=None): """ Prints a list of variables with a value of None. @@ -341,6 +519,9 @@ def display_variables_with_none_value(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _write_report_section( stream=stream, lines_list=_vars_with_none_value(self._model), @@ -349,7 +530,7 @@ def display_variables_with_none_value(self, stream=stdout): footer="=", ) - def display_variables_with_value_near_zero(self, stream=stdout): + def display_variables_with_value_near_zero(self, stream=None): """ Prints a list of variables with a value close to zero. The tolerance for determining what is close to zero can be set in the class configuration @@ -362,6 +543,9 @@ def display_variables_with_value_near_zero(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _write_report_section( stream=stream, lines_list=[ @@ -370,12 +554,13 @@ def display_variables_with_value_near_zero(self, stream=stdout): self._model, self.config.variable_zero_value_tolerance ) ], - title="The following variable(s) have a value close to zero:", + title=f"The following variable(s) have a value close to zero " + f"(tol={self.config.variable_zero_value_tolerance:.1E}):", header="=", footer="=", ) - def display_variables_with_extreme_values(self, stream=stdout): + def display_variables_with_extreme_values(self, stream=None): """ Prints a list of variables with extreme values. @@ -388,6 +573,9 @@ def display_variables_with_extreme_values(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _write_report_section( stream=stream, lines_list=[ @@ -399,12 +587,14 @@ def display_variables_with_extreme_values(self, stream=stdout): zero=self.config.variable_zero_value_tolerance, ) ], - title="The following variable(s) have extreme values:", + title=f"The following variable(s) have extreme values " + f"(<{self.config.variable_small_value_tolerance:.1E} or " + f"> {self.config.variable_large_value_tolerance:.1E}):", header="=", footer="=", ) - def display_variables_near_bounds(self, stream=stdout): + def display_variables_near_bounds(self, stream=None): """ Prints a list of variables with values close to their bounds. Tolerance can be set in the class configuration options. @@ -416,6 +606,9 @@ def display_variables_near_bounds(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _write_report_section( stream=stream, lines_list=[ @@ -426,12 +619,14 @@ def display_variables_near_bounds(self, stream=stdout): rel_tol=self.config.variable_bounds_relative_tolerance, ) ], - title="The following variable(s) have values close to their bounds:", + title=f"The following variable(s) have values close to their bounds " + f"(abs={self.config.variable_bounds_absolute_tolerance:.1E}, " + f"rel={self.config.variable_bounds_relative_tolerance:.1E}):", header="=", footer="=", ) - def display_components_with_inconsistent_units(self, stream=stdout): + def display_components_with_inconsistent_units(self, stream=None): """ Prints a list of all Constraints, Expressions and Objectives in the model with inconsistent units of measurement. @@ -443,6 +638,9 @@ def display_components_with_inconsistent_units(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _write_report_section( stream=stream, lines_list=identify_inconsistent_units(self._model), @@ -453,7 +651,7 @@ def display_components_with_inconsistent_units(self, stream=stdout): footer="=", ) - def display_constraints_with_large_residuals(self, stream=stdout): + def display_constraints_with_large_residuals(self, stream=None): """ Prints a list of Constraints with residuals greater than a specified tolerance. Tolerance can be set in the class configuration options. @@ -465,12 +663,24 @@ def display_constraints_with_large_residuals(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + + lrdict = large_residuals_set( + self._model, + tol=self.config.constraint_residual_tolerance, + return_residual_values=True, + ) + + lrs = [] + for k, v in lrdict.items(): + lrs.append(f"{k.name}: {v:.5E}") + _write_report_section( stream=stream, - lines_list=large_residuals_set( - self._model, tol=self.config.constraint_residual_tolerance - ), - title="The following constraint(s) have large residuals:", + lines_list=lrs, + title=f"The following constraint(s) have large residuals " + f"(>{self.config.constraint_residual_tolerance:.1E}):", header="=", footer="=", ) @@ -501,7 +711,7 @@ def get_dulmage_mendelsohn_partition(self): return uc_vblocks, uc_cblocks, oc_vblocks, oc_cblocks - def display_underconstrained_set(self, stream=stdout): + def display_underconstrained_set(self, stream=None): """ Prints the variables and constraints in the under-constrained sub-problem from a Dulmage-Mendelsohn partitioning. @@ -516,6 +726,9 @@ def display_underconstrained_set(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + uc_vblocks, uc_cblocks, _, _ = self.get_dulmage_mendelsohn_partition() stream.write("=" * MAX_STR_LENGTH + "\n") @@ -534,7 +747,7 @@ def display_underconstrained_set(self, stream=stdout): stream.write("=" * MAX_STR_LENGTH + "\n") - def display_overconstrained_set(self, stream=stdout): + def display_overconstrained_set(self, stream=None): """ Prints the variables and constraints in the over-constrained sub-problem from a Dulmage-Mendelsohn partitioning. @@ -549,6 +762,9 @@ def display_overconstrained_set(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + _, _, oc_vblocks, oc_cblocks = self.get_dulmage_mendelsohn_partition() stream.write("=" * MAX_STR_LENGTH + "\n") @@ -567,7 +783,7 @@ def display_overconstrained_set(self, stream=stdout): stream.write("=" * MAX_STR_LENGTH + "\n") - def display_variables_with_extreme_jacobians(self, stream=stdout): + def display_variables_with_extreme_jacobians(self, stream=None): """ Prints the variables associated with columns in the Jacobian with extreme L2 norms. This often indicates poorly scaled variables. @@ -581,6 +797,9 @@ def display_variables_with_extreme_jacobians(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + xjc = extreme_jacobian_columns( m=self._model, scaled=False, @@ -592,12 +811,14 @@ def display_variables_with_extreme_jacobians(self, stream=stdout): _write_report_section( stream=stream, lines_list=[f"{i[1].name}: {i[0]:.3E}" for i in xjc], - title="The following variable(s) are associated with extreme Jacobian values:", + title=f"The following variable(s) are associated with extreme Jacobian values " + f"(<{self.config.jacobian_small_value_caution:.1E} or" + f">{self.config.jacobian_large_value_caution:.1E}):", header="=", footer="=", ) - def display_constraints_with_extreme_jacobians(self, stream=stdout): + def display_constraints_with_extreme_jacobians(self, stream=None): """ Prints the constraints associated with rows in the Jacobian with extreme L2 norms. This often indicates poorly scaled constraints. @@ -611,6 +832,9 @@ def display_constraints_with_extreme_jacobians(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + xjr = extreme_jacobian_rows( m=self._model, scaled=False, @@ -622,12 +846,14 @@ def display_constraints_with_extreme_jacobians(self, stream=stdout): _write_report_section( stream=stream, lines_list=[f"{i[1].name}: {i[0]:.3E}" for i in xjr], - title="The following constraint(s) are associated with extreme Jacobian values:", + title="The following constraint(s) are associated with extreme Jacobian values " + f"(<{self.config.jacobian_small_value_caution:.1E} or" + f">{self.config.jacobian_large_value_caution:.1E}):", header="=", footer="=", ) - def display_extreme_jacobian_entries(self, stream=stdout): + def display_extreme_jacobian_entries(self, stream=None): """ Prints variables and constraints associated with entries in the Jacobian with extreme values. This can be indicative of poor scaling, especially for isolated terms (e.g. @@ -642,6 +868,9 @@ def display_extreme_jacobian_entries(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + xje = extreme_jacobian_entries( m=self._model, scaled=False, @@ -654,7 +883,9 @@ def display_extreme_jacobian_entries(self, stream=stdout): _write_report_section( stream=stream, lines_list=[f"{i[1].name}, {i[2].name}: {i[0]:.3E}" for i in xje], - title="The following constraint(s) and variable(s) are associated with extreme Jacobian\nvalues:", + title="The following constraint(s) and variable(s) are associated with extreme " + f"Jacobian\nvalues (<{self.config.jacobian_small_value_caution:.1E} or" + f">{self.config.jacobian_large_value_caution:.1E}):", header="=", footer="=", ) @@ -763,7 +994,8 @@ def _collect_numerical_warnings(self, jac=None, nlp=None): if len(large_residuals) == 1: cstring = "Constraint" warnings.append( - f"WARNING: {len(large_residuals)} {cstring} with large residuals" + f"WARNING: {len(large_residuals)} {cstring} with large residuals " + f"(>{self.config.constraint_residual_tolerance:.1E})" ) next_steps.append( self.display_constraints_with_large_residuals.__name__ + "()" @@ -778,7 +1010,8 @@ def _collect_numerical_warnings(self, jac=None, nlp=None): if len(violated_bounds) == 1: cstring = "Variable" warnings.append( - f"WARNING: {len(violated_bounds)} {cstring} at or outside bounds" + f"WARNING: {len(violated_bounds)} {cstring} at or outside bounds " + f"(tol={self.config.variable_bounds_violation_tolerance:.1E})" ) next_steps.append( self.display_variables_at_or_outside_bounds.__name__ + "()" @@ -796,7 +1029,9 @@ def _collect_numerical_warnings(self, jac=None, nlp=None): if len(jac_col) == 1: cstring = "Variable" warnings.append( - f"WARNING: {len(jac_col)} {cstring} with extreme Jacobian values" + f"WARNING: {len(jac_col)} {cstring} with extreme Jacobian values " + f"(<{self.config.jacobian_small_value_warning:.1E} or " + f">{self.config.jacobian_large_value_warning:.1E})" ) next_steps.append( self.display_variables_with_extreme_jacobians.__name__ + "()" @@ -813,7 +1048,9 @@ def _collect_numerical_warnings(self, jac=None, nlp=None): if len(jac_row) == 1: cstring = "Constraint" warnings.append( - f"WARNING: {len(jac_row)} {cstring} with extreme Jacobian values" + f"WARNING: {len(jac_row)} {cstring} with extreme Jacobian values " + f"(<{self.config.jacobian_small_value_warning:.1E} or " + f">{self.config.jacobian_large_value_warning:.1E})" ) next_steps.append( self.display_constraints_with_extreme_jacobians.__name__ + "()" @@ -845,7 +1082,9 @@ def _collect_numerical_cautions(self, jac=None, nlp=None): if len(near_bounds) == 1: cstring = "Variable" cautions.append( - f"Caution: {len(near_bounds)} {cstring} with value close to their bounds" + f"Caution: {len(near_bounds)} {cstring} with value close to their bounds " + f"(abs={self.config.variable_bounds_absolute_tolerance:.1E}, " + f"rel={self.config.variable_bounds_absolute_tolerance:.1E})" ) # Variables near zero @@ -857,7 +1096,8 @@ def _collect_numerical_cautions(self, jac=None, nlp=None): if len(near_zero) == 1: cstring = "Variable" cautions.append( - f"Caution: {len(near_zero)} {cstring} with value close to zero" + f"Caution: {len(near_zero)} {cstring} with value close to zero " + f"(tol={self.config.variable_zero_value_tolerance:.1E})" ) # Variables with extreme values @@ -871,7 +1111,11 @@ def _collect_numerical_cautions(self, jac=None, nlp=None): cstring = "Variables" if len(xval) == 1: cstring = "Variable" - cautions.append(f"Caution: {len(xval)} {cstring} with extreme value") + cautions.append( + f"Caution: {len(xval)} {cstring} with extreme value " + f"(<{self.config.variable_small_value_tolerance:.1E} or " + f">{self.config.variable_large_value_tolerance:.1E})" + ) # Variables with value None none_value = _vars_with_none_value(self._model) @@ -893,7 +1137,9 @@ def _collect_numerical_cautions(self, jac=None, nlp=None): if len(jac_col) == 1: cstring = "Variable" cautions.append( - f"Caution: {len(jac_col)} {cstring} with extreme Jacobian values" + f"Caution: {len(jac_col)} {cstring} with extreme Jacobian values " + f"(<{self.config.jacobian_small_value_caution:.1E} or " + f">{self.config.jacobian_large_value_caution:.1E})" ) jac_row = extreme_jacobian_rows( @@ -907,7 +1153,9 @@ def _collect_numerical_cautions(self, jac=None, nlp=None): if len(jac_row) == 1: cstring = "Constraint" cautions.append( - f"Caution: {len(jac_row)} {cstring} with extreme Jacobian values" + f"Caution: {len(jac_row)} {cstring} with extreme Jacobian values " + f"(<{self.config.jacobian_small_value_caution:.1E} or " + f">{self.config.jacobian_large_value_caution:.1E})" ) # Extreme Jacobian entries @@ -922,7 +1170,11 @@ def _collect_numerical_cautions(self, jac=None, nlp=None): cstring = "Entries" if len(extreme_jac) == 1: cstring = "Entry" - cautions.append(f"Caution: {len(extreme_jac)} extreme Jacobian {cstring}") + cautions.append( + f"Caution: {len(extreme_jac)} extreme Jacobian {cstring} " + f"(<{self.config.jacobian_small_value_caution:.1E} or " + f">{self.config.jacobian_large_value_caution:.1E})" + ) return cautions @@ -952,7 +1204,7 @@ def assert_no_numerical_warnings(self): if len(warnings) > 0: raise AssertionError(f"Numerical issues found ({len(warnings)}).") - def report_structural_issues(self, stream=stdout): + def report_structural_issues(self, stream=None): """ Generates a summary report of any structural issues identified in the model provided and suggests next steps for debugging the model. @@ -968,6 +1220,9 @@ def report_structural_issues(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + # Potential evaluation errors # TODO: High Index? stats = _collect_model_statistics(self._model) @@ -997,7 +1252,7 @@ def report_structural_issues(self, stream=stdout): footer="=", ) - def report_numerical_issues(self, stream=stdout): + def report_numerical_issues(self, stream=None): """ Generates a summary report of any numerical issues identified in the model provided and suggest next steps for debugging model. @@ -1012,6 +1267,9 @@ def report_numerical_issues(self, stream=stdout): None """ + if stream is None: + stream = sys.stdout + jac, nlp = get_jacobian(self._model, scaled=False) warnings, next_steps = self._collect_numerical_warnings(jac=jac, nlp=nlp) @@ -1041,10 +1299,705 @@ def report_numerical_issues(self, stream=stdout): lines_list=next_steps, title="Suggested next steps:", line_if_empty=f"If you still have issues converging your model consider:\n" - f"{TAB*2}svd_analysis(TBA)\n{TAB*2}degeneracy_hunter (TBA)", + f"{TAB*2}prepare_svd_toolbox()\n{TAB*2}prepare_degeneracy_hunter()", footer="=", ) + @document_kwargs_from_configdict(SVDCONFIG) + def prepare_svd_toolbox(self, **kwargs): + """ + Create an instance of the SVDToolbox and store as self.svd_toolbox. + + After creating an instance of the toolbox, call + display_underdetermined_variables_and_constraints(). + + Returns: + + Instance of SVDToolbox + + """ + self.svd_toolbox = SVDToolbox(self.model, **kwargs) + + return self.svd_toolbox + + @document_kwargs_from_configdict(DHCONFIG) + def prepare_degeneracy_hunter(self, **kwargs): + """ + Create an instance of the DegeneracyHunter and store as self.degeneracy_hunter. + + After creating an instance of the toolbox, call + report_irreducible_degenerate_sets. + + Returns: + + Instance of DegeneracyHunter + + """ + self.degeneracy_hunter = DegeneracyHunter2(self.model, **kwargs) + + return self.degeneracy_hunter + + +@document_kwargs_from_configdict(SVDCONFIG) +class SVDToolbox: + """ + Toolbox for performing Singular Value Decomposition on the model Jacobian. + + Used help() for more information on available methods. + + Original code by Doug Allan + + Args: + + model: model to be diagnosed. The SVDToolbox does not support indexed Blocks. + + """ + + def __init__(self, model: _BlockData, **kwargs): + # TODO: In future may want to generalise this to accept indexed blocks + # However, for now some of the tools do not support indexed blocks + if not isinstance(model, _BlockData): + raise TypeError( + "model argument must be an instance of a Pyomo BlockData object " + "(either a scalar Block or an element of an indexed Block)." + ) + + self._model = model + self.config = SVDCONFIG(kwargs) + + self.u = None + self.s = None + self.v = None + + # Get Jacobian and NLP + self.jacobian, self.nlp = get_jacobian( + self._model, scaled=False, equality_constraints_only=True + ) + + if self.jacobian.shape[0] < 2: + raise ValueError( + "Model needs at least 2 equality constraints to perform svd_analysis." + ) + + def run_svd_analysis(self): + """ + Perform SVD analysis of the constraint Jacobian + + Args: + + None + + Returns: + + None + + Actions: + Stores SVD results in object + + """ + n_eq = self.jacobian.shape[0] + n_var = self.jacobian.shape[1] + + n_sv = self.config.number_of_smallest_singular_values + if n_sv is None: + # Determine the number of singular values to compute + # The "-1" is needed to avoid an error with svds + n_sv = min(10, min(n_eq, n_var) - 1) + elif n_sv >= min(n_eq, n_var): + raise ValueError( + f"For a {n_eq} by {n_var} system, svd_analysis " + f"can compute at most {min(n_eq, n_var) - 1} " + f"singular values and vectors, but {n_sv} were called for." + ) + + # Get optional arguments for SVD callback + svd_callback_arguments = self.config.svd_callback_arguments + if svd_callback_arguments is None: + svd_callback_arguments = {} + + # Perform SVD + # Recall J is a n_eq x n_var matrix + # Thus U is a n_eq x n_eq matrix + # And V is a n_var x n_var + # (U or V may be smaller in economy mode) + u, s, v = self.config.svd_callback( + self.jacobian, + number_singular_values=n_sv, + **svd_callback_arguments, + ) + + # Save results + self.u = u + self.s = s + self.v = v + + def display_rank_of_equality_constraints(self, stream=None): + """ + Method to display the number of singular values that fall below + tolerance specified in config block. + + Args: + stream: I/O object to write report to (default = stdout) + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + if self.s is None: + self.run_svd_analysis() + + counter = 0 + for e in self.s: + if e < self.config.singular_value_tolerance: + counter += 1 + + stream.write("=" * MAX_STR_LENGTH + "\n\n") + stream.write( + f"Number of Singular Values less than " + f"{self.config.singular_value_tolerance:.1E} is {counter}\n\n" + ) + stream.write("=" * MAX_STR_LENGTH + "\n") + + def display_underdetermined_variables_and_constraints( + self, singular_values=None, stream=None + ): + """ + Determines constraints and variables associated with the smallest + singular values by having large components in the left and right + singular vectors, respectively, associated with those singular values. + + Args: + singular_values: List of ints representing singular values to display, + as ordered from least to greatest starting from 1 (default show all) + stream: I/O object to write report to (default = stdout) + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + if self.s is None: + self.run_svd_analysis() + + tol = self.config.size_cutoff_in_singular_vector + + # Get list of equality constraint and variable names + eq_con_list = self.nlp.get_pyomo_equality_constraints() + var_list = self.nlp.get_pyomo_variables() + + if singular_values is None: + singular_values = range(1, len(self.s) + 1) + + stream.write("=" * MAX_STR_LENGTH + "\n") + stream.write( + "Constraints and Variables associated with smallest singular values\n\n" + ) + + for e in singular_values: + # First, make sure values are feasible + if e > len(self.s): + raise ValueError( + f"Cannot display the {e}-th smallest singular value. " + f"Only {len(self.s)} small singular values have been " + "calculated. You can set the number_of_smallest_singular_values " + "config argument and call run_svd_analysis again to get more " + "singular values." + ) + + stream.write(f"{TAB}Smallest Singular Value {e}:\n\n") + stream.write(f"{2 * TAB}Variables:\n\n") + for v in np.where(abs(self.v[:, e - 1]) > tol)[0]: + stream.write(f"{3 * TAB}{var_list[v].name}\n") + + stream.write(f"\n{2 * TAB}Constraints:\n\n") + for c in np.where(abs(self.u[:, e - 1]) > tol)[0]: + stream.write(f"{3 * TAB}{eq_con_list[c].name}\n") + stream.write("\n") + + stream.write("=" * MAX_STR_LENGTH + "\n") + + def display_constraints_including_variable(self, variable, stream=None): + """ + Display all constraints that include the specified variable and the + associated Jacobian coefficient. + + Args: + variable: variable object to get associated constraints for + stream: I/O object to write report to (default = stdout) + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + # Validate variable argument + if not isinstance(variable, _VarData): + raise TypeError( + f"variable argument must be an instance of a Pyomo _VarData " + f"object (got {variable})." + ) + + # Get list of equality constraint and variable names + eq_con_list = self.nlp.get_pyomo_equality_constraints() + var_list = self.nlp.get_pyomo_variables() + + # Get index of variable in Jacobian + try: + var_idx = var_list.index(variable) + except (ValueError, PyomoException): + raise AttributeError(f"Could not find {variable.name} in model.") + + nonzeroes = self.jacobian[:, var_idx].nonzero() + + # Build a list of all constraints that include var + cons_w_var = [] + for i, r in enumerate(nonzeroes[0]): + cons_w_var.append( + f"{eq_con_list[r].name}: {self.jacobian[(r, nonzeroes[1][i])]}" + ) + + # Write the output + _write_report_section( + stream=stream, + lines_list=cons_w_var, + title=f"The following constraints involve {variable.name}:", + header="=", + footer="=", + ) + + def display_variables_in_constraint(self, constraint, stream=None): + """ + Display all variables that appear in the specified constraint and the + associated Jacobian coefficient. + + Args: + constraint: constraint object to get associated variables for + stream: I/O object to write report to (default = stdout) + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + # Validate variable argument + if not isinstance(constraint, _ConstraintData): + raise TypeError( + f"constraint argument must be an instance of a Pyomo _ConstraintData " + f"object (got {constraint})." + ) + + # Get list of equality constraint and variable names + eq_con_list = self.nlp.get_pyomo_equality_constraints() + var_list = self.nlp.get_pyomo_variables() + + # Get index of variable in Jacobian + try: + con_idx = eq_con_list.index(constraint) + except ValueError: + raise AttributeError(f"Could not find {constraint.name} in model.") + + nonzeroes = self.jacobian[con_idx, :].nonzero() + + # Build a list of all vars in constraint + vars_in_cons = [] + for i, r in enumerate(nonzeroes[0]): + c = nonzeroes[1][i] + vars_in_cons.append(f"{var_list[c].name}: {self.jacobian[(r, c)]}") + + # Write the output + _write_report_section( + stream=stream, + lines_list=vars_in_cons, + title=f"The following variables are involved in {constraint.name}:", + header="=", + footer="=", + ) + + +# TODO: Rename and redirect once old DegeneracyHunter is removed +@document_kwargs_from_configdict(DHCONFIG) +class DegeneracyHunter2: + """ + Degeneracy Hunter is a tool for identifying Irreducible Degenerate Sets (IDS) in + Pyomo models. + + Original implementation by Alex Dowling. + + Args: + + model: model to be diagnosed. The DegeneracyHunter does not support indexed Blocks. + + """ + + def __init__(self, model, **kwargs): + # TODO: In future may want to generalise this to accept indexed blocks + # However, for now some of the tools do not support indexed blocks + if not isinstance(model, _BlockData): + raise TypeError( + "model argument must be an instance of a Pyomo BlockData object " + "(either a scalar Block or an element of an indexed Block)." + ) + + self._model = model + self.config = DHCONFIG(kwargs) + + # Get Jacobian and NLP + self.jacobian, self.nlp = get_jacobian( + self._model, scaled=False, equality_constraints_only=True + ) + + # Placeholder for solver - deferring construction lets us unit test more easily + self.solver = None + + # Create placeholders for results + self.degenerate_set = {} + self.irreducible_degenerate_sets = [] + + def _get_solver(self): + if self.solver is None: + self.solver = SolverFactory(self.config.solver) + + options = self.config.solver_options + if options is None: + options = {} + + self.solver.options = options + + return self.solver + + def _prepare_candidates_milp(self): + """ + Prepare MILP to find candidate equations for consider for IDS + + Args: + + None + + Returns: + + m_fc: Pyomo model to find candidates + + """ + _log.info("Building MILP model.") + + # Create Pyomo model for irreducible degenerate set + m_dh = ConcreteModel() + + # Create index for constraints + m_dh.C = Set(initialize=range(self.jacobian.shape[0])) + m_dh.V = Set(initialize=range(self.jacobian.shape[1])) + + # Specify minimum size for nu to be considered non-zero + M = self.config.M + m_small = self.config.m_small + + # Add variables + m_dh.nu = Var( + m_dh.C, + bounds=(-M - m_small, M + m_small), + initialize=1.0, + ) + m_dh.y_pos = Var(m_dh.C, domain=Binary) + m_dh.y_neg = Var(m_dh.C, domain=Binary) + m_dh.abs_nu = Var(m_dh.C, bounds=(0, M + m_small)) + + m_dh.pos_xor_neg = Constraint(m_dh.C) + + # Constraint to enforce set is degenerate + if issparse(self.jacobian): + J = self.jacobian.tocsc() + + def eq_degenerate(m_dh, v): + if np.sum(np.abs(J[:, v])) > self.config.trivial_constraint_tolerance: + # Find the columns with non-zero entries + C_ = find(J[:, v])[0] + return sum(J[c, v] * m_dh.nu[c] for c in C_) == 0 + else: + # This variable does not appear in any constraint + return Constraint.Skip + + else: + J = self.jacobian + + def eq_degenerate(m_dh, v): + if np.sum(np.abs(J[:, v])) > self.config.trivial_constraint_tolerance: + return sum(J[c, v] * m_dh.nu[c] for c in m_dh.C) == 0 + else: + # This variable does not appear in any constraint + return Constraint.Skip + + m_dh.degenerate = Constraint(m_dh.V, rule=eq_degenerate) + + # When y_pos = 1, nu >= m_small + # When y_pos = 0, nu >= - m_small + def eq_pos_lower(b, c): + return b.nu[c] >= -m_small + 2 * m_small * b.y_pos[c] + + m_dh.pos_lower = Constraint(m_dh.C, rule=eq_pos_lower) + + # When y_pos = 1, nu <= M + m_small + # When y_pos = 0, nu <= m_small + def eq_pos_upper(b, c): + return b.nu[c] <= M * b.y_pos[c] + m_small + + m_dh.pos_upper = Constraint(m_dh.C, rule=eq_pos_upper) + + # When y_neg = 1, nu <= -m_small + # When y_neg = 0, nu <= m_small + def eq_neg_upper(b, c): + return b.nu[c] <= m_small - 2 * m_small * b.y_neg[c] + + m_dh.neg_upper = Constraint(m_dh.C, rule=eq_neg_upper) + + # When y_neg = 1, nu >= -M - m_small + # When y_neg = 0, nu >= - m_small + def eq_neg_lower(b, c): + return b.nu[c] >= -M * b.y_neg[c] - m_small + + m_dh.neg_lower = Constraint(m_dh.C, rule=eq_neg_lower) + + # Absolute value + def eq_abs_lower(b, c): + return -b.abs_nu[c] <= b.nu[c] + + m_dh.abs_lower = Constraint(m_dh.C, rule=eq_abs_lower) + + def eq_abs_upper(b, c): + return b.nu[c] <= b.abs_nu[c] + + m_dh.abs_upper = Constraint(m_dh.C, rule=eq_abs_upper) + + # At least one constraint must be in the degenerate set + m_dh.degenerate_set_nonempty = Constraint( + expr=sum(m_dh.y_pos[c] + m_dh.y_neg[c] for c in m_dh.C) >= 1 + ) + + # Minimize the L1-norm of nu + m_dh.obj = Objective(expr=sum(m_dh.abs_nu[c] for c in m_dh.C)) + + self.candidates_milp = m_dh + + def _identify_candidates(self): + eq_con_list = self.nlp.get_pyomo_equality_constraints() + + for i in self.candidates_milp.C: + # Check if constraint is included + if self.candidates_milp.abs_nu[i]() > self.config.m_small * MMULT: + # If it is, save the value of nu + if eq_con_list is None: + name = i + else: + name = eq_con_list[i] + self.degenerate_set[name] = self.candidates_milp.nu[i]() + + def _solve_candidates_milp(self, tee: bool = False): + """Solve MILP to generate set of candidate equations + + Arguments: + + tee: print solver output (default = False) + + """ + _log.info("Solving Candidates MILP model.") + + results = self._get_solver().solve(self.candidates_milp, tee=tee) + + self.degenerate_set = {} + + if check_optimal_termination(results): + # We found a degenerate set + self._identify_candidates() + else: + _log.debug( + "Solver did not return an optimal termination condition for " + "Candidates MILP. This probably indicates the system is full rank." + ) + + def _prepare_ids_milp(self): + """ + Prepare MILP to compute the irreducible degenerate set + + """ + _log.info("Building MILP model to compute irreducible degenerate set.") + + n_eq = self.jacobian.shape[0] + n_var = self.jacobian.shape[1] + + # Create Pyomo model for irreducible degenerate set + m_dh = ConcreteModel() + + # Create index for constraints + m_dh.C = Set(initialize=range(n_eq)) + m_dh.V = Set(initialize=range(n_var)) + + # Specify minimum size for nu to be considered non-zero + M = self.config.M + + # Add variables + m_dh.nu = Var(m_dh.C, bounds=(-M, M), initialize=1.0) + m_dh.y = Var(m_dh.C, domain=Binary) + + # Constraint to enforce set is degenerate + if issparse(self.jacobian): + J = self.jacobian.tocsc() + + def eq_degenerate(m_dh, v): + # Find the columns with non-zero entries + C = find(J[:, v])[0] + return sum(J[c, v] * m_dh.nu[c] for c in C) == 0 + + else: + J = self.jacobian + + def eq_degenerate(m_dh, v): + return sum(J[c, v] * m_dh.nu[c] for c in m_dh.C) == 0 + + m_dh.degenerate = Constraint(m_dh.V, rule=eq_degenerate) + + def eq_lower(m_dh, c): + return -M * m_dh.y[c] <= m_dh.nu[c] + + m_dh.lower = Constraint(m_dh.C, rule=eq_lower) + + def eq_upper(m_dh, c): + return m_dh.nu[c] <= M * m_dh.y[c] + + m_dh.upper = Constraint(m_dh.C, rule=eq_upper) + + m_dh.obj = Objective(expr=sum(m_dh.y[c] for c in m_dh.C)) + + self.ids_milp = m_dh + + def _get_ids(self): + # Create empty dictionary + ids_ = {} + + eq_con_list = self.nlp.get_pyomo_equality_constraints() + + for i in self.ids_milp.C: + # Check if constraint is included + if self.ids_milp.y[i]() > YTOL: + # If it is, save the value of nu + ids_[eq_con_list[i]] = self.ids_milp.nu[i]() + + return ids_ + + def _solve_ids_milp(self, cons: Constraint, tee: bool = False): + """Solve MILP to check if equation 'cons' is a significant component + in an irreducible degenerate set + + Args: + cons: constraint to consider + tee: Boolean, print solver output (default = False) + + Returns: + ids: dictionary containing the IDS + + """ + _log.info(f"Solving IDS MILP for constraint {cons.name}.") + eq_con_list = self.nlp.get_pyomo_equality_constraints() + cons_idx = eq_con_list.index(cons) + + # Fix weight on candidate equation + self.ids_milp.nu[cons_idx].fix(1.0) + + # Solve MILP + results = self._get_solver().solve(self.ids_milp, tee=tee) + + self.ids_milp.nu[cons_idx].unfix() + + if check_optimal_termination(results): + # We found an irreducible degenerate set + return self._get_ids() + else: + raise ValueError( + f"Solver did not return an optimal termination condition for " + f"IDS MILP with constraint {cons.name}." + ) + + def find_irreducible_degenerate_sets(self, tee=False): + """ + Compute irreducible degenerate sets + + Args: + tee: Print solver output logs to screen (default=False) + + """ + + # Solve to find candidate equations + _log.info("Searching for Candidate Equations") + self._prepare_candidates_milp() + self._solve_candidates_milp(tee=tee) + + # Find irreducible degenerate sets + # Check if degenerate_set is not empty + if self.degenerate_set: + + _log.info("Searching for Irreducible Degenerate Sets") + self._prepare_ids_milp() + + # Loop over candidate equations + count = 1 + for k in self.degenerate_set: + print(f"Solving MILP {count} of {len(self.degenerate_set)}.") + _log.info_high(f"Solving MILP {count} of {len(self.degenerate_set)}.") + + # Check if equation is a major element of an IDS + ids_ = self._solve_ids_milp(cons=k, tee=tee) + + if ids_ is not None: + self.irreducible_degenerate_sets.append(ids_) + + count += 1 + else: + _log.debug("No candidate equations found") + + def report_irreducible_degenerate_sets(self, stream=None, tee: bool = False): + """ + Print a report of all the Irreducible Degenerate Sets (IDS) identified in + model. + + Args: + stream: I/O object to write report to (default = stdout) + tee: whether to write solver output logs to screen + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + self.find_irreducible_degenerate_sets(tee=tee) + + stream.write("=" * MAX_STR_LENGTH + "\n") + stream.write("Irreducible Degenerate Sets\n") + + if self.irreducible_degenerate_sets: + for i, s in enumerate(self.irreducible_degenerate_sets): + stream.write(f"\n{TAB}Irreducible Degenerate Set {i}") + stream.write(f"\n{TAB*2}nu{TAB}Constraint Name") + for k, v in s.items(): + value_string = f"{v:.1f}" + sep = (2 + len(TAB) - len(value_string)) * " " + stream.write(f"\n{TAB*2}{value_string}{sep}{k.name}") + stream.write("\n") + else: + stream.write( + f"\n{TAB}No candidate equations. The Jacobian is likely full rank.\n" + ) + + stream.write("\n" + "=" * MAX_STR_LENGTH + "\n") + class DegeneracyHunter: """ diff --git a/idaes/core/util/tests/test_model_diagnostics.py b/idaes/core/util/tests/test_model_diagnostics.py index 7da1b77f2a..e91a69160b 100644 --- a/idaes/core/util/tests/test_model_diagnostics.py +++ b/idaes/core/util/tests/test_model_diagnostics.py @@ -29,10 +29,12 @@ Suffix, TransformationFactory, units, + value, Var, ) from pyomo.common.collections import ComponentSet from pyomo.contrib.pynumero.asl import AmplInterface +from pyomo.contrib.pynumero.interfaces.pyomo_nlp import PyomoNLP import idaes.core.util.scaling as iscale import idaes.logger as idaeslog @@ -49,7 +51,11 @@ # Need to update from idaes.core.util.model_diagnostics import ( DiagnosticsToolbox, + SVDToolbox, DegeneracyHunter, + DegeneracyHunter2, + svd_dense, + svd_sparse, get_valid_range_of_component, set_bounds_from_valid_range, list_components_with_values_outside_valid_range, @@ -66,6 +72,8 @@ __author__ = "Alex Dowling, Douglas Allan, Andrew Lee" +solver_available = SolverFactory("scip").available() + @pytest.fixture def model(): @@ -517,7 +525,7 @@ def test_display_variables_at_or_outside_bounds(self, model): dt.display_variables_at_or_outside_bounds(stream) expected = """==================================================================================== -The following variable(s) have values at or outside their bounds: +The following variable(s) have values at or outside their bounds (tol=0.0E+00): b.v3 (free): value=0.0 bounds=(0, 5) b.v5 (fixed): value=2 bounds=(0, 1) @@ -552,14 +560,13 @@ def test_display_variables_with_value_near_zero(self, model): dt.display_variables_with_value_near_zero(stream) expected = """==================================================================================== -The following variable(s) have a value close to zero: +The following variable(s) have a value close to zero (tol=1.0E-08): b.v3: value=0.0 b.v6: value=0 ==================================================================================== """ - assert stream.getvalue() == expected @pytest.mark.component @@ -570,7 +577,7 @@ def test_display_variables_with_extreme_values(self, model): dt.display_variables_with_extreme_values(stream) expected = """==================================================================================== -The following variable(s) have extreme values: +The following variable(s) have extreme values (<1.0E-04 or > 1.0E+04): b.v7: 1.0000939326524314e-07 @@ -587,7 +594,7 @@ def test_display_variables_near_bounds(self, model): dt.display_variables_near_bounds(stream) expected = """==================================================================================== -The following variable(s) have values close to their bounds: +The following variable(s) have values close to their bounds (abs=1.0E-04, rel=1.0E-04): b.v3: value=0.0 bounds=(0, 5) b.v5: value=2 bounds=(0, 1) @@ -625,9 +632,9 @@ def test_display_constraints_with_large_residuals(self, model): dt.display_constraints_with_large_residuals(stream) expected = """==================================================================================== -The following constraint(s) have large residuals: +The following constraint(s) have large residuals (>1.0E-05): - b.c2 + b.c2: 6.66667E-01 ==================================================================================== """ @@ -830,7 +837,7 @@ def test_display_variables_with_extreme_jacobians(self): dt.display_variables_with_extreme_jacobians(stream) expected = """==================================================================================== -The following variable(s) are associated with extreme Jacobian values: +The following variable(s) are associated with extreme Jacobian values (<1.0E-04 or>1.0E+04): v2: 1.000E+10 v1: 1.000E+08 @@ -858,7 +865,7 @@ def test_display_constraints_with_extreme_jacobians(self): dt.display_constraints_with_extreme_jacobians(stream) expected = """==================================================================================== -The following constraint(s) are associated with extreme Jacobian values: +The following constraint(s) are associated with extreme Jacobian values (<1.0E-04 or>1.0E+04): c3: 1.000E+10 @@ -885,7 +892,7 @@ def test_display_extreme_jacobian_entries(self): expected = """==================================================================================== The following constraint(s) and variable(s) are associated with extreme Jacobian -values: +values (<1.0E-04 or>1.0E+04): c3, v2: 1.000E+10 c2, v3: 1.000E-08 @@ -979,8 +986,8 @@ def test_collect_numerical_warnings(self, model): warnings, next_steps = dt._collect_numerical_warnings() assert len(warnings) == 2 - assert "WARNING: 1 Constraint with large residuals" in warnings - assert "WARNING: 2 Variables at or outside bounds" in warnings + assert "WARNING: 1 Constraint with large residuals (>1.0E-05)" in warnings + assert "WARNING: 2 Variables at or outside bounds (tol=0.0E+00)" in warnings assert len(next_steps) == 2 assert "display_constraints_with_large_residuals()" in next_steps @@ -1019,11 +1026,17 @@ def test_collect_numerical_warnings_jacobian(self): dt = DiagnosticsToolbox(model=model) warnings, next_steps = dt._collect_numerical_warnings() - print(warnings) + assert len(warnings) == 3 - assert "WARNING: 2 Variables with extreme Jacobian values" in warnings - assert "WARNING: 1 Constraint with extreme Jacobian values" in warnings - assert "WARNING: 1 Constraint with large residuals" in warnings + assert ( + "WARNING: 2 Variables with extreme Jacobian values (<1.0E-08 or >1.0E+08)" + in warnings + ) + assert ( + "WARNING: 1 Constraint with extreme Jacobian values (<1.0E-08 or >1.0E+08)" + in warnings + ) + assert "WARNING: 1 Constraint with large residuals (>1.0E-05)" in warnings assert len(next_steps) == 3 assert "display_variables_with_extreme_jacobians()" in next_steps @@ -1035,13 +1048,18 @@ def test_collect_numerical_cautions(self, model): dt = DiagnosticsToolbox(model=model.b) cautions = dt._collect_numerical_cautions() - print(cautions) + assert len(cautions) == 5 - assert "Caution: 3 Variables with value close to their bounds" in cautions - assert "Caution: 2 Variables with value close to zero" in cautions + assert ( + "Caution: 3 Variables with value close to their bounds (abs=1.0E-04, rel=1.0E-04)" + in cautions + ) + assert "Caution: 2 Variables with value close to zero (tol=1.0E-08)" in cautions assert "Caution: 1 Variable with None value" in cautions - assert "Caution: 1 extreme Jacobian Entry" in cautions - assert "Caution: 1 Variable with extreme value" in cautions + assert "Caution: 1 extreme Jacobian Entry (<1.0E-04 or >1.0E+04)" in cautions + assert ( + "Caution: 1 Variable with extreme value (<1.0E-04 or >1.0E+04)" in cautions + ) @pytest.mark.component def test_collect_numerical_cautions_jacobian(self): @@ -1057,12 +1075,18 @@ def test_collect_numerical_cautions_jacobian(self): dt = DiagnosticsToolbox(model=model) cautions = dt._collect_numerical_cautions() - print(cautions) + assert len(cautions) == 4 - assert "Caution: 3 Variables with value close to zero" in cautions - assert "Caution: 3 Variables with extreme Jacobian values" in cautions - assert "Caution: 1 Constraint with extreme Jacobian values" in cautions - assert "Caution: 4 extreme Jacobian Entries" in cautions + assert "Caution: 3 Variables with value close to zero (tol=1.0E-08)" in cautions + assert ( + "Caution: 3 Variables with extreme Jacobian values (<1.0E-04 or >1.0E+04)" + in cautions + ) + assert ( + "Caution: 1 Constraint with extreme Jacobian values (<1.0E-04 or >1.0E+04)" + in cautions + ) + assert "Caution: 4 extreme Jacobian Entries (<1.0E-04 or >1.0E+04)" in cautions @pytest.mark.component def test_assert_no_structural_warnings(self, model): @@ -1151,17 +1175,17 @@ def test_report_numerical_issues(self, model): ------------------------------------------------------------------------------------ 2 WARNINGS - WARNING: 1 Constraint with large residuals - WARNING: 2 Variables at or outside bounds + WARNING: 1 Constraint with large residuals (>1.0E-05) + WARNING: 2 Variables at or outside bounds (tol=0.0E+00) ------------------------------------------------------------------------------------ 5 Cautions - Caution: 3 Variables with value close to their bounds - Caution: 2 Variables with value close to zero - Caution: 1 Variable with extreme value + Caution: 3 Variables with value close to their bounds (abs=1.0E-04, rel=1.0E-04) + Caution: 2 Variables with value close to zero (tol=1.0E-08) + Caution: 1 Variable with extreme value (<1.0E-04 or >1.0E+04) Caution: 1 Variable with None value - Caution: 1 extreme Jacobian Entry + Caution: 1 extreme Jacobian Entry (<1.0E-04 or >1.0E+04) ------------------------------------------------------------------------------------ Suggested next steps: @@ -1171,7 +1195,7 @@ def test_report_numerical_issues(self, model): ==================================================================================== """ - print(stream.getvalue()) + assert stream.getvalue() == expected @pytest.mark.component @@ -1198,17 +1222,17 @@ def test_report_numerical_issues_jacobian(self): ------------------------------------------------------------------------------------ 3 WARNINGS - WARNING: 1 Constraint with large residuals - WARNING: 2 Variables with extreme Jacobian values - WARNING: 1 Constraint with extreme Jacobian values + WARNING: 1 Constraint with large residuals (>1.0E-05) + WARNING: 2 Variables with extreme Jacobian values (<1.0E-08 or >1.0E+08) + WARNING: 1 Constraint with extreme Jacobian values (<1.0E-08 or >1.0E+08) ------------------------------------------------------------------------------------ 4 Cautions - Caution: 3 Variables with value close to zero - Caution: 3 Variables with extreme Jacobian values - Caution: 1 Constraint with extreme Jacobian values - Caution: 4 extreme Jacobian Entries + Caution: 3 Variables with value close to zero (tol=1.0E-08) + Caution: 3 Variables with extreme Jacobian values (<1.0E-04 or >1.0E+04) + Caution: 1 Constraint with extreme Jacobian values (<1.0E-04 or >1.0E+04) + Caution: 4 extreme Jacobian Entries (<1.0E-04 or >1.0E+04) ------------------------------------------------------------------------------------ Suggested next steps: @@ -1219,7 +1243,671 @@ def test_report_numerical_issues_jacobian(self): ==================================================================================== """ - print(stream.getvalue()) + + assert stream.getvalue() == expected + + @pytest.mark.skipif( + not AmplInterface.available(), reason="pynumero_ASL is not available" + ) + @pytest.mark.integration + def test_prepare_svd_toolbox(self, model): + dt = DiagnosticsToolbox(model=model.b) + svd = dt.prepare_svd_toolbox() + + assert isinstance(svd, SVDToolbox) + + @pytest.mark.skipif( + not AmplInterface.available(), reason="pynumero_ASL is not available" + ) + @pytest.mark.integration + def test_prepare_degeneracy_hunter(self, model): + dt = DiagnosticsToolbox(model=model.b) + dh = dt.prepare_degeneracy_hunter() + + assert isinstance(dh, DegeneracyHunter2) + + +def dummy_callback(arg1): + pass + + +def dummy_callback2(arg1=None, arg2=None): + pass + + +@pytest.mark.skipif( + not AmplInterface.available(), reason="pynumero_ASL is not available" +) +class TestSVDToolbox: + @pytest.mark.unit + def test_svd_callback_domain(self, dummy_problem): + with pytest.raises( + ValueError, + match="SVD callback must be a callable which takes at least two arguments.", + ): + SVDToolbox(dummy_problem, svd_callback="foo") + + with pytest.raises( + ValueError, + match="SVD callback must be a callable which takes at least two arguments.", + ): + SVDToolbox(dummy_problem, svd_callback=dummy_callback) + + svd = SVDToolbox(dummy_problem, svd_callback=dummy_callback2) + assert svd.config.svd_callback is dummy_callback2 + + @pytest.mark.unit + def test_init(self, dummy_problem): + svd = SVDToolbox(dummy_problem) + + assert svd._model is dummy_problem + assert svd.u is None + assert svd.s is None + assert svd.v is None + + # Get Jacobian and NLP + jac = { + (0, 0): 100.0, + (1, 1): 1.0, + (2, 2): 10.0, + (3, 3): 0.1, + (4, 4): 5.0, + } + for i, j in jac.items(): + assert j == svd.jacobian[i] + + assert isinstance(svd.nlp, PyomoNLP) + + @pytest.mark.unit + def test_init_small_model(self): + m = ConcreteModel() + m.v = Var() + m.c = Constraint(expr=m.v == 10) + + with pytest.raises( + ValueError, + match="Model needs at least 2 equality constraints to perform svd_analysis.", + ): + svd = SVDToolbox(m) + + @pytest.mark.unit + def test_run_svd_analysis(self, dummy_problem): + svd = SVDToolbox(dummy_problem) + + assert svd.config.svd_callback is svd_dense + + svd.run_svd_analysis() + + np.testing.assert_array_almost_equal( + svd.u, + np.array( + [[0, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 1], [1, 0, 0, 0], [0, 0, 1, 0]] + ), + ) + np.testing.assert_array_almost_equal(svd.s, np.array([0.1, 1, 5, 10])) + np.testing.assert_array_almost_equal( + svd.v, + np.array( + [[0, 0, 0, 1, 0], [0, 1, 0, 0, 0], [0, 0, 0, 0, 1], [0, 0, 1, 0, 0]] + ).T, + ) + + @pytest.mark.unit + def test_run_svd_analysis_sparse(self, dummy_problem): + svd = SVDToolbox(dummy_problem, svd_callback=svd_sparse) + svd.run_svd_analysis() + + # SVD sparse is not consistent with signs - manually iterate and check abs value + for i in range(5): + for j in range(4): + if (i, j) in [(1, 1), (2, 3), (3, 0), (4, 2)]: + assert abs(svd.u[i, j]) == pytest.approx(1, abs=1e-6, rel=1e-6) + else: + assert svd.u[i, j] == pytest.approx(0, abs=1e-6) + + np.testing.assert_array_almost_equal(svd.s, np.array([0.1, 1, 5, 10])) + + for i in range(5): + for j in range(4): + if (i, j) in [(1, 1), (2, 3), (3, 0), (4, 2)]: + assert abs(svd.v[i, j]) == pytest.approx(1, abs=1e-6, rel=1e-6) + else: + assert svd.v[i, j] == pytest.approx(0, abs=1e-6) + + @pytest.mark.unit + def test_run_svd_analysis_sparse_limit(self, dummy_problem): + svd = SVDToolbox( + dummy_problem, svd_callback=svd_sparse, number_of_smallest_singular_values=2 + ) + svd.run_svd_analysis() + + # SVD sparse is not consistent with signs - manually iterate and check abs value + for i in range(5): + for j in range(2): + if (i, j) in [(1, 1), (3, 0)]: + assert abs(svd.u[i, j]) == pytest.approx(1, abs=1e-6, rel=1e-6) + else: + assert svd.u[i, j] == pytest.approx(0, abs=1e-6) + + np.testing.assert_array_almost_equal(svd.s, np.array([0.1, 1])) + + for i in range(5): + for j in range(2): + if (i, j) in [(1, 1), (3, 0)]: + assert abs(svd.v[i, j]) == pytest.approx(1, abs=1e-6, rel=1e-6) + else: + assert svd.v[i, j] == pytest.approx(0, abs=1e-6) + + @pytest.mark.unit + def test_display_rank_of_equality_constraints(self, dummy_problem): + svd = SVDToolbox(dummy_problem) + + stream = StringIO() + svd.display_rank_of_equality_constraints(stream=stream) + + expected = """==================================================================================== + +Number of Singular Values less than 1.0E-6 is 0 + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.unit + def test_display_rank_of_equality_constraints(self, dummy_problem): + svd = SVDToolbox(dummy_problem, singular_value_tolerance=1) + + stream = StringIO() + svd.display_rank_of_equality_constraints(stream=stream) + + expected = """==================================================================================== + +Number of Singular Values less than 1.0E+00 is 1 + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.unit + def test_display_underdetermined_variables_and_constraints(self, dummy_problem): + svd = SVDToolbox(dummy_problem) + + stream = StringIO() + svd.display_underdetermined_variables_and_constraints(stream=stream) + + expected = """==================================================================================== +Constraints and Variables associated with smallest singular values + + Smallest Singular Value 1: + + Variables: + + x[3] + + Constraints: + + dummy_eqn[3] + + Smallest Singular Value 2: + + Variables: + + x[1] + + Constraints: + + dummy_eqn[1] + + Smallest Singular Value 3: + + Variables: + + x[4] + + Constraints: + + dummy_eqn[4] + + Smallest Singular Value 4: + + Variables: + + x[2] + + Constraints: + + dummy_eqn[2] + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.unit + def test_display_underdetermined_variables_and_constraints_specific( + self, dummy_problem + ): + svd = SVDToolbox(dummy_problem) + + stream = StringIO() + svd.display_underdetermined_variables_and_constraints( + singular_values=[1], stream=stream + ) + + expected = """==================================================================================== +Constraints and Variables associated with smallest singular values + + Smallest Singular Value 1: + + Variables: + + x[3] + + Constraints: + + dummy_eqn[3] + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.unit + def test_display_underdetermined_variables_and_constraints(self, dummy_problem): + svd = SVDToolbox(dummy_problem, size_cutoff_in_singular_vector=1) + + stream = StringIO() + svd.display_underdetermined_variables_and_constraints(stream=stream) + + expected = """==================================================================================== +Constraints and Variables associated with smallest singular values + + Smallest Singular Value 1: + + Variables: + + + Constraints: + + + Smallest Singular Value 2: + + Variables: + + + Constraints: + + + Smallest Singular Value 3: + + Variables: + + + Constraints: + + + Smallest Singular Value 4: + + Variables: + + + Constraints: + + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.unit + def test_display_constraints_including_variable(self): + m = ConcreteModel() + m.s = Set(initialize=[1, 2, 3, 4]) + m.v = Var(m.s) + + m.c1 = Constraint(expr=m.v[1] + 2 * m.v[2] == 10) + m.c2 = Constraint(expr=3 * m.v[2] + 4 * m.v[3] == 20) + m.c3 = Constraint(expr=5 * m.v[3] + 6 * m.v[4] == 30) + m.c4 = Constraint(expr=7 * m.v[4] + 8 * m.v[1] == 40) + + svd = SVDToolbox(m) + + stream = StringIO() + svd.display_constraints_including_variable(variable=m.v[1], stream=stream) + + expected = """==================================================================================== +The following constraints involve v[1]: + + c1: 1.0 + c4: 8.0 + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.unit + def test_display_constraints_including_variable_invalid(self): + m = ConcreteModel() + m.s = Set(initialize=[1, 2, 3, 4]) + m.v = Var(m.s) + + m.c1 = Constraint(expr=m.v[1] + 2 * m.v[2] == 10) + m.c2 = Constraint(expr=3 * m.v[2] + 4 * m.v[3] == 20) + m.c3 = Constraint(expr=5 * m.v[3] + 6 * m.v[4] == 30) + m.c4 = Constraint(expr=7 * m.v[4] + 8 * m.v[1] == 40) + + svd = SVDToolbox(m) + + with pytest.raises( + TypeError, + match="variable argument must be an instance of a Pyomo _VarData " + "object \(got foo\).", + ): + svd.display_constraints_including_variable(variable="foo") + + @pytest.mark.unit + def test_display_constraints_including_variable_not_in_model(self): + m = ConcreteModel() + m.s = Set(initialize=[1, 2, 3, 4]) + m.v = Var(m.s) + m2 = ConcreteModel() + m2.y = Var() + + m.c1 = Constraint(expr=m.v[1] + 2 * m.v[2] == 10) + m.c2 = Constraint(expr=3 * m.v[2] + 4 * m.v[3] == 20) + m.c3 = Constraint(expr=5 * m.v[3] + 6 * m.v[4] == 30) + m.c4 = Constraint(expr=7 * m.v[4] + 8 * m.v[1] == 40) + + svd = SVDToolbox(m) + + with pytest.raises(AttributeError, match="Could not find y in model."): + svd.display_constraints_including_variable(variable=m2.y) + + @pytest.mark.unit + def test_display_variables_in_constraint(self): + m = ConcreteModel() + m.s = Set(initialize=[1, 2, 3, 4]) + m.v = Var(m.s) + + m.c1 = Constraint(expr=m.v[1] + 2 * m.v[2] == 10) + m.c2 = Constraint(expr=3 * m.v[2] + 4 * m.v[3] == 20) + m.c3 = Constraint(expr=5 * m.v[3] + 6 * m.v[4] == 30) + m.c4 = Constraint(expr=7 * m.v[4] + 8 * m.v[1] == 40) + + svd = SVDToolbox(m) + + stream = StringIO() + svd.display_variables_in_constraint(constraint=m.c1, stream=stream) + + expected = """==================================================================================== +The following variables are involved in c1: + + v[1]: 1.0 + v[2]: 2.0 + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.unit + def test_display_variables_in_constraint_invalid(self): + m = ConcreteModel() + m.s = Set(initialize=[1, 2, 3, 4]) + m.v = Var(m.s) + + m.c1 = Constraint(expr=m.v[1] + 2 * m.v[2] == 10) + m.c2 = Constraint(expr=3 * m.v[2] + 4 * m.v[3] == 20) + m.c3 = Constraint(expr=5 * m.v[3] + 6 * m.v[4] == 30) + m.c4 = Constraint(expr=7 * m.v[4] + 8 * m.v[1] == 40) + + svd = SVDToolbox(m) + + with pytest.raises( + TypeError, + match="constraint argument must be an instance of a Pyomo _ConstraintData " + "object \(got foo\).", + ): + svd.display_variables_in_constraint(constraint="foo") + + @pytest.mark.unit + def test_display_variables_in_constraint_no_in_model(self): + m = ConcreteModel() + m.s = Set(initialize=[1, 2, 3, 4]) + m.v = Var(m.s) + + m.c1 = Constraint(expr=m.v[1] + 2 * m.v[2] == 10) + m.c2 = Constraint(expr=3 * m.v[2] + 4 * m.v[3] == 20) + m.c3 = Constraint(expr=5 * m.v[3] + 6 * m.v[4] == 30) + m.c4 = Constraint(expr=7 * m.v[4] + 8 * m.v[1] == 40) + + c6 = Constraint(expr=m.v[1] == m.v[2]) + + svd = SVDToolbox(m) + + with pytest.raises( + AttributeError, match="Could not find AbstractScalarConstraint in model." + ): + svd.display_variables_in_constraint(constraint=c6) + + +@pytest.mark.skipif( + not AmplInterface.available(), reason="pynumero_ASL is not available" +) +class TestDegeneracyHunter: + @pytest.fixture + def model(self): + m = ConcreteModel() + + m.I = Set(initialize=[i for i in range(1, 4)]) + + m.x = Var(m.I, bounds=(0, 5), initialize=1.0) + + m.con1 = Constraint(expr=m.x[1] + m.x[2] >= 1) + m.con2 = Constraint(expr=m.x[1] + m.x[2] + m.x[3] == 1) + m.con3 = Constraint(expr=m.x[2] - 2 * m.x[3] <= 1) + m.con4 = Constraint(expr=m.x[1] + m.x[3] >= 1) + + m.con5 = Constraint(expr=m.x[1] + m.x[2] + m.x[3] == 1) + + m.obj = Objective(expr=sum(m.x[i] for i in m.I)) + + return m + + @pytest.mark.unit + def test_init(self, model): + dh = DegeneracyHunter2(model) + + assert dh._model is model + + # Get Jacobian and NLP + jac = { + (0, 0): 1.0, + (0, 1): 1.0, + (0, 2): 1.0, + (1, 0): 1.0, + (1, 1): 1.0, + (1, 2): 1.0, + } + + for i, j in jac.items(): + assert j == dh.jacobian[i] + + assert isinstance(dh.nlp, PyomoNLP) + + assert dh.degenerate_set == {} + assert dh.irreducible_degenerate_sets == [] + + @pytest.mark.unit + def test_get_solver(self, model): + dh = DegeneracyHunter2(model, solver="ipopt", solver_options={"maxiter": 50}) + + solver = dh._get_solver() + + assert solver.options == {"maxiter": 50} + + @pytest.mark.unit + def test_prepare_candidates_milp(self, model): + dh = DegeneracyHunter2(model) + dh._prepare_candidates_milp() + + assert isinstance(dh.candidates_milp, ConcreteModel) + + @pytest.mark.unit + def test_identify_candidates(self, model): + dh = DegeneracyHunter2(model) + dh._prepare_candidates_milp() + + dh.candidates_milp.nu[0].set_value(-1e-05) + dh.candidates_milp.nu[1].set_value(1e-05) + + dh.candidates_milp.y_pos[0].set_value(0) + dh.candidates_milp.y_pos[1].set_value(1) + + dh.candidates_milp.y_neg[0].set_value(-0) + dh.candidates_milp.y_neg[1].set_value(-0) + + dh.candidates_milp.abs_nu[0].set_value(1e-05) + dh.candidates_milp.abs_nu[1].set_value(1e-05) + + dh._identify_candidates() + + assert dh.degenerate_set == { + model.con2: -1e-05, + model.con5: 1e-05, + } + + @pytest.mark.solver + @pytest.mark.component + @pytest.mark.skipif(not solver_available, reason="SCIP is not available") + def test_solve_candidates_milp(self, model): + dh = DegeneracyHunter2(model) + dh._prepare_candidates_milp() + dh._solve_candidates_milp() + + assert value(dh.candidates_milp.nu[0]) == pytest.approx(-1e-05, rel=1e-5) + assert value(dh.candidates_milp.nu[1]) == pytest.approx(1e-05, rel=1e-5) + + assert value(dh.candidates_milp.y_pos[0]) == pytest.approx(0, abs=1e-5) + assert value(dh.candidates_milp.y_pos[1]) == pytest.approx(1, rel=1e-5) + + assert value(dh.candidates_milp.y_neg[0]) == pytest.approx(-0, abs=1e-5) + assert value(dh.candidates_milp.y_neg[1]) == pytest.approx(-0, abs=1e-5) + + assert value(dh.candidates_milp.abs_nu[0]) == pytest.approx(1e-05, rel=1e-5) + assert value(dh.candidates_milp.abs_nu[1]) == pytest.approx(1e-05, rel=1e-5) + + assert dh.degenerate_set == { + model.con2: -1e-05, + model.con5: 1e-05, + } + + @pytest.mark.unit + def test_prepare_ids_milp(self, model): + dh = DegeneracyHunter2(model) + dh._prepare_ids_milp() + + assert isinstance(dh.ids_milp, ConcreteModel) + + @pytest.mark.unit + def test_solve_ids_milp(self, model): + dh = DegeneracyHunter2(model) + dh._prepare_ids_milp() + + dh.ids_milp.nu[0].set_value(1) + dh.ids_milp.nu[1].set_value(-1) + + dh.ids_milp.y[0].set_value(1) + dh.ids_milp.y[1].set_value(1) + + ids_ = dh._get_ids() + + assert ids_ == { + model.con2: 1, + model.con5: -1, + } + + @pytest.mark.solver + @pytest.mark.component + @pytest.mark.skipif(not solver_available, reason="SCIP is not available") + def test_solve_ids_milp(self, model): + dh = DegeneracyHunter2(model) + dh._prepare_ids_milp() + ids_ = dh._solve_ids_milp(cons=model.con2) + + assert ids_ == { + model.con2: 1, + model.con5: -1, + } + + assert value(dh.ids_milp.nu[0]) == pytest.approx(1, rel=1e-5) + assert value(dh.ids_milp.nu[1]) == pytest.approx(-1, rel=1e-5) + + assert value(dh.ids_milp.y[0]) == pytest.approx(1, rel=1e-5) + assert value(dh.ids_milp.y[1]) == pytest.approx(1, rel=1e-5) + + @pytest.mark.solver + @pytest.mark.component + @pytest.mark.skipif(not solver_available, reason="SCIP is not available") + def test_find_irreducible_degenerate_sets(self, model): + dh = DegeneracyHunter2(model) + dh.find_irreducible_degenerate_sets() + + assert dh.irreducible_degenerate_sets == [ + {model.con2: 1, model.con5: -1}, + {model.con5: 1, model.con2: -1}, + ] + + @pytest.mark.solver + @pytest.mark.component + @pytest.mark.skipif(not solver_available, reason="SCIP is not available") + def test_report_irreducible_degenerate_sets(self, model): + stream = StringIO() + + dh = DegeneracyHunter2(model) + dh.report_irreducible_degenerate_sets(stream=stream) + + expected = """==================================================================================== +Irreducible Degenerate Sets + + Irreducible Degenerate Set 0 + nu Constraint Name + 1.0 con2 + -1.0 con5 + + Irreducible Degenerate Set 1 + nu Constraint Name + -1.0 con2 + 1.0 con5 + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.solver + @pytest.mark.component + @pytest.mark.skipif(not solver_available, reason="SCIP is not available") + def test_report_irreducible_degenerate_sets_none(self, model): + stream = StringIO() + + # Delete degenerate constraint + model.del_component(model.con5) + + dh = DegeneracyHunter2(model) + dh.report_irreducible_degenerate_sets(stream=stream) + + expected = """==================================================================================== +Irreducible Degenerate Sets + + No candidate equations. The Jacobian is likely full rank. + +==================================================================================== +""" + assert stream.getvalue() == expected